_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
f8fbe4cde44f829b16ed92a2847312809cc43baf3c73a042095c9663dc4340be | cunger/pythia | spotlight.clj | (ns core.external.ner.spotlight
(:require [settings]
[core.external.http :as http]
[clojure.data.json :as json]))
(declare type-list)
(defn http-request [input]
(settings/language (settings/domain
{ :dbpedia { :de (str "=" input "&spotter=Default")
:en (str "=" input "&spotter=Default&confidence=0.5&support=20")
:es (str ":2231/rest/annotate?text=" input "&spotter=Default") }})))
;; Main
;; must implement: get-entities, filter-entities
(defn get-entities [input]
(let [request (http-request (http/urlize input))
response (http/get-response :get request {:headers {"accept" "application/json"}} identity)
status (:status response)]
(if (= status 200)
(let [body (json/read-str (:body response))]
(if (contains? body "Resources")
(for [resource (get body "Resources")]
{ :uri (get resource "@URI" )
:form (get resource "@surfaceForm")
:offset (get resource "@offset")
:types (type-list (get resource "@types"))})))
[])))
;; Aux
(defn type-list [string]
(map #(clojure.string/replace % "DBpedia:" "/")
(filter #(.startsWith % "DBpedia:")
(clojure.string/split string #"\,"))))
(defn filter-entities [entities]
(remove #(or (empty? (:types %))
(some #{""} (:types %)))
entities))
(defn most-general-type [entity] (clojure.string/replace (last (:types entity)) "/" "")) | null | https://raw.githubusercontent.com/cunger/pythia/f58e35395968d4c46aef495fd363c26b1102003c/src/core/external/ner/spotlight.clj | clojure | Main
must implement: get-entities, filter-entities
Aux | (ns core.external.ner.spotlight
(:require [settings]
[core.external.http :as http]
[clojure.data.json :as json]))
(declare type-list)
(defn http-request [input]
(settings/language (settings/domain
{ :dbpedia { :de (str "=" input "&spotter=Default")
:en (str "=" input "&spotter=Default&confidence=0.5&support=20")
:es (str ":2231/rest/annotate?text=" input "&spotter=Default") }})))
(defn get-entities [input]
(let [request (http-request (http/urlize input))
response (http/get-response :get request {:headers {"accept" "application/json"}} identity)
status (:status response)]
(if (= status 200)
(let [body (json/read-str (:body response))]
(if (contains? body "Resources")
(for [resource (get body "Resources")]
{ :uri (get resource "@URI" )
:form (get resource "@surfaceForm")
:offset (get resource "@offset")
:types (type-list (get resource "@types"))})))
[])))
(defn type-list [string]
(map #(clojure.string/replace % "DBpedia:" "/")
(filter #(.startsWith % "DBpedia:")
(clojure.string/split string #"\,"))))
(defn filter-entities [entities]
(remove #(or (empty? (:types %))
(some #{""} (:types %)))
entities))
(defn most-general-type [entity] (clojure.string/replace (last (:types entity)) "/" "")) |
8d48afaf562f9e5a378743add857efba78a94da33189c9cf5414ef2f1c7aa891 | ds-wizard/engine-backend | DocumentTemplateFormatSM.hs | module Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatSM where
import Data.Swagger
import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatDTO
import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatJM ()
import Shared.Database.Migration.Development.DocumentTemplate.Data.DocumentTemplateFormats
import Shared.Service.DocumentTemplate.DocumentTemplateMapper
import Shared.Util.Swagger
instance ToSchema DocumentTemplateFormatDTO where
declareNamedSchema = toSwagger (toFormatDTO formatJson)
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-shared/src/Shared/Api/Resource/DocumentTemplate/DocumentTemplateFormatSM.hs | haskell | module Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatSM where
import Data.Swagger
import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatDTO
import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatJM ()
import Shared.Database.Migration.Development.DocumentTemplate.Data.DocumentTemplateFormats
import Shared.Service.DocumentTemplate.DocumentTemplateMapper
import Shared.Util.Swagger
instance ToSchema DocumentTemplateFormatDTO where
declareNamedSchema = toSwagger (toFormatDTO formatJson)
|
|
240f2e36657f1341c292d0b81fd24559402c623c1ad95501d7c8fcac07e629fd | kepler16/gx.cljc | impl.cljc | (ns k16.gx.beta.impl
(:refer-clojure :exclude [ref])
#?(:cljs (:require-macros [k16.gx.beta.error-context :refer [with-err-ctx]]))
(:require [clojure.walk :as walk]
[k16.gx.beta.errors :as gx.err]
[k16.gx.beta.schema :as gx.schema]
#?(:cljs [clojure.string :as string])
#?(:cljs [k16.gx.beta.registry :as gx.reg])
#?(:clj [k16.gx.beta.error-context :refer [with-err-ctx]])))
(defn sccs
"Returns a topologically sorted list of strongly connected components.
Tarjan's algorithm."
([g] (sccs g []))
([g sccs-init]
(let [strong-connect
(fn strong-connect [acc v]
(let [acc (-> acc
(assoc-in [:idxs v] (:idx acc))
(assoc-in [:low-links v] (:idx acc))
(update :idx inc)
(update :S conj v)
(assoc-in [:on-stack v] true))
acc (reduce
(fn [acc w]
(cond
(not (get-in acc [:idxs w]))
(let [acc (strong-connect acc w)]
(update-in acc
[:low-links v]
min
(get-in acc [:low-links w])))
(get-in acc [:on-stack w])
(update-in acc
[:low-links v]
min
(get-in acc [:idxs w]))
:else acc))
acc
(get g v))]
(if (= (get-in acc [:idxs v]) (get-in acc [:low-links v]))
(let [[S on-stack scc] (loop [S (:S acc)
on-stack (:on-stack acc)
scc #{}]
(let [w (peek S)
S (pop S)
on-stack (dissoc on-stack w)
scc (conj scc w)]
(if (= v w)
[S on-stack scc]
(recur S on-stack scc))))]
(-> acc
(assoc :S S
:on-stack on-stack)
(update :sccs conj scc)))
acc)))]
(:sccs
(reduce (fn [acc v]
(if-not (contains? (:idxs acc) v) (strong-connect acc v) acc))
{:S ()
:idx 0
:sccs sccs-init}
(keys g))))))
(defn cycles
[sccs g]
(filter #(or (>= (count %) 2)
(get-in g [(first %) (first %)]))
sccs))
(defn dependency-errors
[g sccs]
(concat
(mapcat
(fn [[k v]]
(seq
(map (fn [does-not-exist]
{:type :missing
:from k
:to does-not-exist})
(remove #(contains? g %) v))))
g)
(map (fn [cycle] {:type :cycle :between cycle})
(cycles sccs g))))
(defn human-render-dependency-error
[dependency-error]
(case (:type dependency-error)
:missing
(str (:from dependency-error) " depends on " (:to dependency-error)
", but " (:to dependency-error) " doesn't exist")
:cycle
(str "circular "
(apply str (interpose
" -> "
(concat
(reverse (:between dependency-error))
[(first (reverse (:between dependency-error)))]))))
(pr-str dependency-error)))
#?(:cljs (defn resolve-exported-symbol
[sym-str]
(let [path (-> sym-str
(string/replace #"-" "_")
(string/replace #"/" ".")
(string/split #"\."))]
(loop [p path
obj goog.global]
(if (and (seq p) obj)
(recur (rest p) (aget obj (first p)))
obj)))))
#?(:cljs (defn sym->js-resolve [sym]
(let [ssym (str sym)]
(or (get @gx.reg/registry* ssym)
(resolve-exported-symbol ssym)))))
(defn namespace-symbol
"Returns symbol unchanged if it has a namespace, or with clojure.core as it's
namespace otherwise."
[sym]
(cond
(namespace sym)
#?(:clj sym :cljs (sym->js-resolve sym))
:else
#?(:clj (symbol "clojure.core" (name sym))
:cljs ((ns-publics 'cljs.core) sym))))
(def mergable? (every-pred map? (complement record?)))
(defn merger
[left right]
(if (mergable? left right)
(merge-with merger left right)
(or right left)))
(defn deep-merge
"Recursively merges maps."
[& maps]
(reduce merger maps))
(def locals #{'gx/ref 'gx/ref-keys})
(defn local-form?
[form]
(and (seq? form)
(locals (first form))))
(defn parse-local
[env form]
(condp = (first form)
'gx/ref (get env (second form))
'gx/ref-keys (select-keys env (second form))))
(defn postwalk-evaluate
"A postwalk runtime signal processor evaluator, works most of the time.
Doesn't support special symbols and macros, basically just function application.
For cljs, consider compiled components or sci-evaluator, would require allowing
for swappable evaluation stategies. Point to docs, to inform how to swap evaluator,
or alternative ways to specify functions (that get compiled) that can be used."
[props form initial-form]
(walk/postwalk
(fn [x]
(cond
(local-form? x)
(parse-local props x)
(and (seq? x) (ifn? (first x)))
(try
(apply (first x) (rest x))
(catch #?(:clj Throwable :cljs :default) e
(gx.err/throw-gx-err
(str "Form evaluate error:\n\t>> " initial-form) {:props props} e)))
:else x))
form))
(defn resolve-symbol
[sym]
(if (symbol? sym)
(if-let [nss #?(:cljs (namespace-symbol sym)
:clj (try
(some->> sym
(namespace-symbol)
(requiring-resolve)
(var-get))
(catch Throwable e
(gx.err/add-err-cause
{:title :symbol-cannot-be-resolved
:data sym
:exception e}))))]
nss
(gx.err/add-err-cause {:title :symbol-cannot-be-resolved
:data sym}))
sym))
(defn form->runnable [form-def]
(let [props* (atom #{})
resolved-form
(->> form-def
(walk/postwalk
(fn [sub-form]
(cond
(locals sub-form) sub-form
(local-form? sub-form)
(do (swap! props* concat (-> sub-form rest flatten))
sub-form)
(special-symbol? sub-form)
(gx.err/throw-gx-err "Special forms are not supported"
{:form-def form-def
:token sub-form})
(resolve-symbol sub-form) (resolve-symbol sub-form)
(symbol? sub-form)
(gx.err/throw-gx-err "Unable to resolve symbol"
{:form-def form-def
:token sub-form})
:else sub-form))))]
{:env @props*
:initial-form form-def
:form resolved-form}))
(defn push-down-props
[{{:keys [props-signals]} :normalize} {:gx/keys [props] :as node-def}]
(if (and props (seq props-signals))
(reduce-kv (fn [m k v]
(if (and (contains? props-signals k)
(not (:gx/props v)))
(assoc-in m [k :gx/props] props)
m))
node-def
node-def)
node-def))
(defn remap-signals
[from-signals to-signals]
(cond
(and (seq from-signals) (seq to-signals))
(if from-signals
(->> to-signals
(map (fn [[k v]]
[k (v from-signals)]))
(into {}))
to-signals)
(seq from-signals) from-signals
:else to-signals))
(defn flatten-component
"Flattens nested components by creating one root component using
signal mappings from context (if any)"
[context root-component]
(let [root-component (assoc root-component
:gx/signal-mapping
(or
(:gx/signal-mapping root-component)
(:signal-mapping context)))]
(loop [{:gx/keys [component signal-mapping] :as current} root-component]
(if-let [nested component]
(recur (update nested :gx/signal-mapping
#(remap-signals % signal-mapping)))
(if-let [mapping (seq (:gx/signal-mapping current))]
(->> mapping
(map (fn [[k v]]
[k (get current v)]))
(into root-component))
(dissoc current :gx/signal-mapping))))))
(defn resolve-component
"Resolve component by it's symbol and validate against malli schema"
[context component]
(when component
(with-err-ctx {:error-type :normalize-node-component}
(let [resolved (some->> component
(resolve-symbol)
(flatten-component context))
[issues schema] (when resolved
(gx.schema/validate-component context resolved))]
(cond
(not resolved)
(gx.err/throw-gx-err "Component could not be resolved"
{:component component})
issues
(gx.err/throw-gx-err "Component schema error"
{:component resolved
:component-schema schema
:schema-error (set issues)})
:else resolved)))))
| null | https://raw.githubusercontent.com/kepler16/gx.cljc/fdd8103ce5a1fcf7fc974b82493fab0b9b53002f/src/k16/gx/beta/impl.cljc | clojure | (ns k16.gx.beta.impl
(:refer-clojure :exclude [ref])
#?(:cljs (:require-macros [k16.gx.beta.error-context :refer [with-err-ctx]]))
(:require [clojure.walk :as walk]
[k16.gx.beta.errors :as gx.err]
[k16.gx.beta.schema :as gx.schema]
#?(:cljs [clojure.string :as string])
#?(:cljs [k16.gx.beta.registry :as gx.reg])
#?(:clj [k16.gx.beta.error-context :refer [with-err-ctx]])))
(defn sccs
"Returns a topologically sorted list of strongly connected components.
Tarjan's algorithm."
([g] (sccs g []))
([g sccs-init]
(let [strong-connect
(fn strong-connect [acc v]
(let [acc (-> acc
(assoc-in [:idxs v] (:idx acc))
(assoc-in [:low-links v] (:idx acc))
(update :idx inc)
(update :S conj v)
(assoc-in [:on-stack v] true))
acc (reduce
(fn [acc w]
(cond
(not (get-in acc [:idxs w]))
(let [acc (strong-connect acc w)]
(update-in acc
[:low-links v]
min
(get-in acc [:low-links w])))
(get-in acc [:on-stack w])
(update-in acc
[:low-links v]
min
(get-in acc [:idxs w]))
:else acc))
acc
(get g v))]
(if (= (get-in acc [:idxs v]) (get-in acc [:low-links v]))
(let [[S on-stack scc] (loop [S (:S acc)
on-stack (:on-stack acc)
scc #{}]
(let [w (peek S)
S (pop S)
on-stack (dissoc on-stack w)
scc (conj scc w)]
(if (= v w)
[S on-stack scc]
(recur S on-stack scc))))]
(-> acc
(assoc :S S
:on-stack on-stack)
(update :sccs conj scc)))
acc)))]
(:sccs
(reduce (fn [acc v]
(if-not (contains? (:idxs acc) v) (strong-connect acc v) acc))
{:S ()
:idx 0
:sccs sccs-init}
(keys g))))))
(defn cycles
[sccs g]
(filter #(or (>= (count %) 2)
(get-in g [(first %) (first %)]))
sccs))
(defn dependency-errors
[g sccs]
(concat
(mapcat
(fn [[k v]]
(seq
(map (fn [does-not-exist]
{:type :missing
:from k
:to does-not-exist})
(remove #(contains? g %) v))))
g)
(map (fn [cycle] {:type :cycle :between cycle})
(cycles sccs g))))
(defn human-render-dependency-error
[dependency-error]
(case (:type dependency-error)
:missing
(str (:from dependency-error) " depends on " (:to dependency-error)
", but " (:to dependency-error) " doesn't exist")
:cycle
(str "circular "
(apply str (interpose
" -> "
(concat
(reverse (:between dependency-error))
[(first (reverse (:between dependency-error)))]))))
(pr-str dependency-error)))
#?(:cljs (defn resolve-exported-symbol
[sym-str]
(let [path (-> sym-str
(string/replace #"-" "_")
(string/replace #"/" ".")
(string/split #"\."))]
(loop [p path
obj goog.global]
(if (and (seq p) obj)
(recur (rest p) (aget obj (first p)))
obj)))))
#?(:cljs (defn sym->js-resolve [sym]
(let [ssym (str sym)]
(or (get @gx.reg/registry* ssym)
(resolve-exported-symbol ssym)))))
(defn namespace-symbol
"Returns symbol unchanged if it has a namespace, or with clojure.core as it's
namespace otherwise."
[sym]
(cond
(namespace sym)
#?(:clj sym :cljs (sym->js-resolve sym))
:else
#?(:clj (symbol "clojure.core" (name sym))
:cljs ((ns-publics 'cljs.core) sym))))
(def mergable? (every-pred map? (complement record?)))
(defn merger
[left right]
(if (mergable? left right)
(merge-with merger left right)
(or right left)))
(defn deep-merge
"Recursively merges maps."
[& maps]
(reduce merger maps))
(def locals #{'gx/ref 'gx/ref-keys})
(defn local-form?
[form]
(and (seq? form)
(locals (first form))))
(defn parse-local
[env form]
(condp = (first form)
'gx/ref (get env (second form))
'gx/ref-keys (select-keys env (second form))))
(defn postwalk-evaluate
"A postwalk runtime signal processor evaluator, works most of the time.
Doesn't support special symbols and macros, basically just function application.
For cljs, consider compiled components or sci-evaluator, would require allowing
for swappable evaluation stategies. Point to docs, to inform how to swap evaluator,
or alternative ways to specify functions (that get compiled) that can be used."
[props form initial-form]
(walk/postwalk
(fn [x]
(cond
(local-form? x)
(parse-local props x)
(and (seq? x) (ifn? (first x)))
(try
(apply (first x) (rest x))
(catch #?(:clj Throwable :cljs :default) e
(gx.err/throw-gx-err
(str "Form evaluate error:\n\t>> " initial-form) {:props props} e)))
:else x))
form))
(defn resolve-symbol
[sym]
(if (symbol? sym)
(if-let [nss #?(:cljs (namespace-symbol sym)
:clj (try
(some->> sym
(namespace-symbol)
(requiring-resolve)
(var-get))
(catch Throwable e
(gx.err/add-err-cause
{:title :symbol-cannot-be-resolved
:data sym
:exception e}))))]
nss
(gx.err/add-err-cause {:title :symbol-cannot-be-resolved
:data sym}))
sym))
(defn form->runnable [form-def]
(let [props* (atom #{})
resolved-form
(->> form-def
(walk/postwalk
(fn [sub-form]
(cond
(locals sub-form) sub-form
(local-form? sub-form)
(do (swap! props* concat (-> sub-form rest flatten))
sub-form)
(special-symbol? sub-form)
(gx.err/throw-gx-err "Special forms are not supported"
{:form-def form-def
:token sub-form})
(resolve-symbol sub-form) (resolve-symbol sub-form)
(symbol? sub-form)
(gx.err/throw-gx-err "Unable to resolve symbol"
{:form-def form-def
:token sub-form})
:else sub-form))))]
{:env @props*
:initial-form form-def
:form resolved-form}))
(defn push-down-props
[{{:keys [props-signals]} :normalize} {:gx/keys [props] :as node-def}]
(if (and props (seq props-signals))
(reduce-kv (fn [m k v]
(if (and (contains? props-signals k)
(not (:gx/props v)))
(assoc-in m [k :gx/props] props)
m))
node-def
node-def)
node-def))
(defn remap-signals
[from-signals to-signals]
(cond
(and (seq from-signals) (seq to-signals))
(if from-signals
(->> to-signals
(map (fn [[k v]]
[k (v from-signals)]))
(into {}))
to-signals)
(seq from-signals) from-signals
:else to-signals))
(defn flatten-component
"Flattens nested components by creating one root component using
signal mappings from context (if any)"
[context root-component]
(let [root-component (assoc root-component
:gx/signal-mapping
(or
(:gx/signal-mapping root-component)
(:signal-mapping context)))]
(loop [{:gx/keys [component signal-mapping] :as current} root-component]
(if-let [nested component]
(recur (update nested :gx/signal-mapping
#(remap-signals % signal-mapping)))
(if-let [mapping (seq (:gx/signal-mapping current))]
(->> mapping
(map (fn [[k v]]
[k (get current v)]))
(into root-component))
(dissoc current :gx/signal-mapping))))))
(defn resolve-component
"Resolve component by it's symbol and validate against malli schema"
[context component]
(when component
(with-err-ctx {:error-type :normalize-node-component}
(let [resolved (some->> component
(resolve-symbol)
(flatten-component context))
[issues schema] (when resolved
(gx.schema/validate-component context resolved))]
(cond
(not resolved)
(gx.err/throw-gx-err "Component could not be resolved"
{:component component})
issues
(gx.err/throw-gx-err "Component schema error"
{:component resolved
:component-schema schema
:schema-error (set issues)})
:else resolved)))))
|
|
7236d404ea9376a8e5feceab066a4058598b6bbc6bca6d7bb370333ec46b2af1 | riemann/riemann | logging.clj | (ns riemann.logging
(:import (org.slf4j
LoggerFactory)
(ch.qos.logback.classic
Level
Logger)
(ch.qos.logback.core
ConsoleAppender
FileAppender)
(ch.qos.logback.core.util
FileSize)
(ch.qos.logback.core.encoder
LayoutWrappingEncoder)
(ch.qos.logback.core.rolling
RollingFileAppender
TimeBasedRollingPolicy
FixedWindowRollingPolicy
SizeBasedTriggeringPolicy)
(ch.qos.logback.classic.encoder
PatternLayoutEncoder)
(net.logstash.logback
JSONEventLayoutV0
JSONEventLayoutV1)
(net.logstash.logback.encoder
LogstashEncoder)
(java.net URL)
(ch.qos.logback.classic.joran JoranConfigurator))
(:require wall.hack))
(defn get-logger
([]
(LoggerFactory/getLogger Logger/ROOT_LOGGER_NAME))
([logger]
(LoggerFactory/getLogger logger)))
(defn- get-context
[]
(LoggerFactory/getILoggerFactory))
(defmulti encoder identity)
(defmethod encoder :json
[type]
(LogstashEncoder.))
(defmethod encoder :json-event
[type]
(encoder :json-event-v0))
(defmethod encoder :json-event-v0
[type]
(doto (LayoutWrappingEncoder.)
(.setLayout (JSONEventLayoutV0.))))
(defmethod encoder :json-event-v1
[type]
(doto (LayoutWrappingEncoder.)
(.setLayout (JSONEventLayoutV1.))))
(defmethod encoder :riemann
[type]
(doto (PatternLayoutEncoder.)
(.setPattern "%p [%d] %t - %c - %m%n%throwable")))
(defmethod encoder :default
[type]
(binding [*out* *err*]
(println "invalid logging layout specified: " type))
(encoder :riemann))
(defn set-level
"Set the level for the given logger, by string name.
Example:
(set-level Level/INFO)
or
(set-level \"riemann.client\", Level/DEBUG)"
([level]
(. (get-logger)
(setLevel level)))
([logger level]
(. (get-logger logger)
(setLevel level))))
(defmacro suppress
"Turns off logging for the evaluation of body."
[loggers & body]
(let [[logger & more] (flatten [loggers])]
(if logger
`(let [old-level# (.getLevel (get-logger ~logger))]
(try
(set-level ~logger Level/ERROR)
(suppress ~more ~@body)
(finally
(set-level ~logger old-level#))))
`(do ~@body))))
(defn configure-from-file
"Configure logging from a configuration file"
[context config-file]
(doto (JoranConfigurator.)
(.setContext context)
(.doConfigure (URL. config-file))))
(defn configure-from-opts
"Configure logging from opts"
[logger context opts]
(let [{:keys [console?
console-layout
file
file-layout
files
rotate-count
logsize-rotate]
:or {console? true
console-layout :riemann
file-layout :riemann}} opts]
(do
(when console?
(let [encoder (doto (encoder console-layout)
(.setContext context)
(.start))
console-appender (doto (ConsoleAppender.)
(.setContext context)
(.setEncoder encoder)
(.start))]
(.addAppender logger console-appender)))
(doseq [{:keys [file file-layout]}
(conj files {:file file :file-layout file-layout})
:when file]
(if logsize-rotate
(let [encoder (doto (encoder file-layout)
(.setContext context)
(.start))
log-appender (doto (RollingFileAppender.)
(.setFile file)
(.setContext context)
(.setEncoder encoder))
rolling-policy (doto (FixedWindowRollingPolicy.)
(.setMinIndex 1)
(.setMaxIndex (or rotate-count 10))
(.setFileNamePattern
(str file ".%i"))
(.setParent log-appender)
(.setContext context)
(.start))
triggering-policy (doto (SizeBasedTriggeringPolicy.)
(.setMaxFileSize (FileSize. logsize-rotate))
(.setContext context)
(.start))
log-appender (doto log-appender
(.setRollingPolicy rolling-policy)
(.setTriggeringPolicy triggering-policy)
(.start))]
(.addAppender logger log-appender))
(let [encoder (doto (encoder file-layout)
(.setContext context)
(.start))
log-appender (doto (RollingFileAppender.)
(.setFile file)
(.setContext context)
(.setEncoder encoder))
rolling-policy (doto (TimeBasedRollingPolicy.)
(.setMaxHistory (or rotate-count 10))
(.setFileNamePattern
(str file ".%d{yyyy-MM-dd}"))
(.setParent log-appender)
(.setContext context)
(.start))
log-appender (doto log-appender
(.setRollingPolicy rolling-policy)
(.start))]
(.addAppender logger log-appender))))
(set-level Level/INFO)
(set-level "riemann.client" Level/DEBUG)
(set-level "riemann.server" Level/DEBUG)
(set-level "riemann.streams" Level/DEBUG)
(set-level "riemann.graphite" Level/DEBUG))))
(defn init
"Initialize logging. You will probably call this from the config file. You can
call init more than once; its changes are destructive. Options:
- :console? Determine if logging should happen on the console.
- :console-layout Specifying console layout.
- :file The file to log to. If omitted, log to console only.
- :file-layout Specifying file layout.
- :files A list of files to log to. If provided, a seq or vector is
expected containing maps with a :file and an :file-layout
- :logsize-rotate If size (in bytes) is specified use size based rotation
otherwise use default time based rotation.
- :rotate-count Specifying the number of rotated files to keep. If omitted,
keep last 10 rotated files.
Layout can be :riemann or :json. If layout is omitted, the default layout
:riemann will be used.
For example:
```clojure
; Basic console logging
(init)
; Also log to a file
(init {:file \"/var/log/riemann.log\"})
; With rotation
(init {:console? false :file \"/var/log/riemann.log\" :rotate-count 10})
; Rotate at a certain size
(init {:console? false
:file \"/var/log/riemann.log\"
:logsize-rotate 1000000000})
; Multiple files in different formats
(init {:console? false
:files [{:file \"/var/log/riemann.log\"},
{:file \"/var/log/riemann.json.log\" :file-layout :json}]
:logsize-rotate 100
:rotate-count 5})
```"
([] (init {}))
([opts]
(let [logger (get-logger)
context (get-context)]
(.detachAndStopAllAppenders logger)
(if-let [config-file (System/getProperty "logback.configurationFile")]
(configure-from-file context config-file)
(configure-from-opts logger context opts)))))
(defn nice-syntax-error
"Rewrites clojure.lang.LispReader$ReaderException to have error messages that
might actually help someone."
([e] (nice-syntax-error e "(no file)"))
([e file]
; Lord help me.
(let [line (wall.hack/field (class e) :line e)
msg (.getMessage (or (.getCause e) e))]
(RuntimeException. (str "Syntax error (" file ":" line ") " msg)))))
| null | https://raw.githubusercontent.com/riemann/riemann/1649687c0bd913c378701ee0b964a9863bde7c7c/src/riemann/logging.clj | clojure | its changes are destructive. Options:
Basic console logging
Also log to a file
With rotation
Rotate at a certain size
Multiple files in different formats
Lord help me. | (ns riemann.logging
(:import (org.slf4j
LoggerFactory)
(ch.qos.logback.classic
Level
Logger)
(ch.qos.logback.core
ConsoleAppender
FileAppender)
(ch.qos.logback.core.util
FileSize)
(ch.qos.logback.core.encoder
LayoutWrappingEncoder)
(ch.qos.logback.core.rolling
RollingFileAppender
TimeBasedRollingPolicy
FixedWindowRollingPolicy
SizeBasedTriggeringPolicy)
(ch.qos.logback.classic.encoder
PatternLayoutEncoder)
(net.logstash.logback
JSONEventLayoutV0
JSONEventLayoutV1)
(net.logstash.logback.encoder
LogstashEncoder)
(java.net URL)
(ch.qos.logback.classic.joran JoranConfigurator))
(:require wall.hack))
(defn get-logger
([]
(LoggerFactory/getLogger Logger/ROOT_LOGGER_NAME))
([logger]
(LoggerFactory/getLogger logger)))
(defn- get-context
[]
(LoggerFactory/getILoggerFactory))
(defmulti encoder identity)
(defmethod encoder :json
[type]
(LogstashEncoder.))
(defmethod encoder :json-event
[type]
(encoder :json-event-v0))
(defmethod encoder :json-event-v0
[type]
(doto (LayoutWrappingEncoder.)
(.setLayout (JSONEventLayoutV0.))))
(defmethod encoder :json-event-v1
[type]
(doto (LayoutWrappingEncoder.)
(.setLayout (JSONEventLayoutV1.))))
(defmethod encoder :riemann
[type]
(doto (PatternLayoutEncoder.)
(.setPattern "%p [%d] %t - %c - %m%n%throwable")))
(defmethod encoder :default
[type]
(binding [*out* *err*]
(println "invalid logging layout specified: " type))
(encoder :riemann))
(defn set-level
"Set the level for the given logger, by string name.
Example:
(set-level Level/INFO)
or
(set-level \"riemann.client\", Level/DEBUG)"
([level]
(. (get-logger)
(setLevel level)))
([logger level]
(. (get-logger logger)
(setLevel level))))
(defmacro suppress
"Turns off logging for the evaluation of body."
[loggers & body]
(let [[logger & more] (flatten [loggers])]
(if logger
`(let [old-level# (.getLevel (get-logger ~logger))]
(try
(set-level ~logger Level/ERROR)
(suppress ~more ~@body)
(finally
(set-level ~logger old-level#))))
`(do ~@body))))
(defn configure-from-file
"Configure logging from a configuration file"
[context config-file]
(doto (JoranConfigurator.)
(.setContext context)
(.doConfigure (URL. config-file))))
(defn configure-from-opts
"Configure logging from opts"
[logger context opts]
(let [{:keys [console?
console-layout
file
file-layout
files
rotate-count
logsize-rotate]
:or {console? true
console-layout :riemann
file-layout :riemann}} opts]
(do
(when console?
(let [encoder (doto (encoder console-layout)
(.setContext context)
(.start))
console-appender (doto (ConsoleAppender.)
(.setContext context)
(.setEncoder encoder)
(.start))]
(.addAppender logger console-appender)))
(doseq [{:keys [file file-layout]}
(conj files {:file file :file-layout file-layout})
:when file]
(if logsize-rotate
(let [encoder (doto (encoder file-layout)
(.setContext context)
(.start))
log-appender (doto (RollingFileAppender.)
(.setFile file)
(.setContext context)
(.setEncoder encoder))
rolling-policy (doto (FixedWindowRollingPolicy.)
(.setMinIndex 1)
(.setMaxIndex (or rotate-count 10))
(.setFileNamePattern
(str file ".%i"))
(.setParent log-appender)
(.setContext context)
(.start))
triggering-policy (doto (SizeBasedTriggeringPolicy.)
(.setMaxFileSize (FileSize. logsize-rotate))
(.setContext context)
(.start))
log-appender (doto log-appender
(.setRollingPolicy rolling-policy)
(.setTriggeringPolicy triggering-policy)
(.start))]
(.addAppender logger log-appender))
(let [encoder (doto (encoder file-layout)
(.setContext context)
(.start))
log-appender (doto (RollingFileAppender.)
(.setFile file)
(.setContext context)
(.setEncoder encoder))
rolling-policy (doto (TimeBasedRollingPolicy.)
(.setMaxHistory (or rotate-count 10))
(.setFileNamePattern
(str file ".%d{yyyy-MM-dd}"))
(.setParent log-appender)
(.setContext context)
(.start))
log-appender (doto log-appender
(.setRollingPolicy rolling-policy)
(.start))]
(.addAppender logger log-appender))))
(set-level Level/INFO)
(set-level "riemann.client" Level/DEBUG)
(set-level "riemann.server" Level/DEBUG)
(set-level "riemann.streams" Level/DEBUG)
(set-level "riemann.graphite" Level/DEBUG))))
(defn init
"Initialize logging. You will probably call this from the config file. You can
- :console? Determine if logging should happen on the console.
- :console-layout Specifying console layout.
- :file The file to log to. If omitted, log to console only.
- :file-layout Specifying file layout.
- :files A list of files to log to. If provided, a seq or vector is
expected containing maps with a :file and an :file-layout
- :logsize-rotate If size (in bytes) is specified use size based rotation
otherwise use default time based rotation.
- :rotate-count Specifying the number of rotated files to keep. If omitted,
keep last 10 rotated files.
Layout can be :riemann or :json. If layout is omitted, the default layout
:riemann will be used.
For example:
```clojure
(init)
(init {:file \"/var/log/riemann.log\"})
(init {:console? false :file \"/var/log/riemann.log\" :rotate-count 10})
(init {:console? false
:file \"/var/log/riemann.log\"
:logsize-rotate 1000000000})
(init {:console? false
:files [{:file \"/var/log/riemann.log\"},
{:file \"/var/log/riemann.json.log\" :file-layout :json}]
:logsize-rotate 100
:rotate-count 5})
```"
([] (init {}))
([opts]
(let [logger (get-logger)
context (get-context)]
(.detachAndStopAllAppenders logger)
(if-let [config-file (System/getProperty "logback.configurationFile")]
(configure-from-file context config-file)
(configure-from-opts logger context opts)))))
(defn nice-syntax-error
"Rewrites clojure.lang.LispReader$ReaderException to have error messages that
might actually help someone."
([e] (nice-syntax-error e "(no file)"))
([e file]
(let [line (wall.hack/field (class e) :line e)
msg (.getMessage (or (.getCause e) e))]
(RuntimeException. (str "Syntax error (" file ":" line ") " msg)))))
|
dbd95e2a65d61f146144cf6660ca696c0e65b4bebea2df903bcfeca44af4e5fb | static-analysis-engineering/codehawk | bCHExtractInvariants.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHAtlas
(* bchlib *)
open BCHLibTypes
val extract_ranges:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
val extract_linear_equalities:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
val extract_valuesets:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/15765b4be65024f1687ccc3cc7b645347ce72063/CodeHawk/CHB/bchanalyze/bCHExtractInvariants.mli | ocaml | bchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHAtlas
open BCHLibTypes
val extract_ranges:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
val extract_linear_equalities:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
val extract_valuesets:
function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit
|
450975b99cc112f44795aedafa6829ce837a9d9a708bc5f483632d8668fb19d0 | tolysz/ghcjs-stack | GlobalFlags.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE RecordWildCards #
module Distribution.Client.GlobalFlags (
GlobalFlags(..)
, defaultGlobalFlags
, RepoContext(..)
, withRepoContext
, withRepoContext'
) where
import Distribution.Client.Types
( Repo(..), RemoteRepo(..) )
import Distribution.Compat.Semigroup
import Distribution.Simple.Setup
( Flag(..), fromFlag, flagToMaybe )
import Distribution.Utils.NubList
( NubList, fromNubList )
import Distribution.Client.HttpUtils
( HttpTransport, configureTransport )
import Distribution.Verbosity
( Verbosity )
import Distribution.Simple.Utils
( info )
import Data.Maybe
( fromMaybe )
import Control.Concurrent
( MVar, newMVar, modifyMVar )
import Control.Exception
( throwIO )
import Control.Monad
( when )
import System.FilePath
( (</>) )
import Network.URI
( uriScheme, uriPath )
import Data.Map
( Map )
import qualified Data.Map as Map
import GHC.Generics ( Generic )
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Util.Path as Sec
import qualified Hackage.Security.Util.Pretty as Sec
import qualified Hackage.Security.Client.Repository.Cache as Sec
import qualified Hackage.Security.Client.Repository.Local as Sec.Local
import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote
import qualified Distribution.Client.Security.HTTP as Sec.HTTP
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalSandboxConfigFile :: Flag FilePath,
globalConstraintsFile :: Flag FilePath,
globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: NubList FilePath,
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath,
globalRequireSandbox :: Flag Bool,
globalIgnoreSandbox :: Flag Bool,
globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates
globalHttpTransport :: Flag String
} deriving Generic
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = Flag False,
globalIgnoreSandbox = Flag False,
globalIgnoreExpiry = Flag False,
globalHttpTransport = mempty
}
instance Monoid GlobalFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup GlobalFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Repo context
-- ------------------------------------------------------------
-- | Access to repositories
data RepoContext = RepoContext {
-- | All user-specified repositories
repoContextRepos :: [Repo]
-- | Get the HTTP transport
--
The transport will be initialized on the first call to this function .
--
-- NOTE: It is important that we don't eagerly initialize the transport.
-- Initializing the transport is not free, and especially in contexts where
-- we don't know a-priori whether or not we need the transport (for instance
-- when using cabal in "nix mode") incurring the overhead of transport
initialization on _ every _ invocation ( eg ) is undesirable .
, repoContextGetTransport :: IO HttpTransport
-- | Get the (initialized) secure repo
--
-- (the 'Repo' type itself is stateless and must remain so, because it
-- must be serializable)
, repoContextWithSecureRepo :: forall a.
Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
-- | Should we ignore expiry times (when checking security)?
, repoContextIgnoreExpiry :: Bool
}
-- | Wrapper around 'Repository', hiding the type argument
data SecureRepo = forall down. SecureRepo (Sec.Repository down)
withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a
withRepoContext verbosity globalFlags =
withRepoContext'
verbosity
(fromNubList (globalRemoteRepos globalFlags))
(fromNubList (globalLocalRepos globalFlags))
(fromFlag (globalCacheDir globalFlags))
(flagToMaybe (globalHttpTransport globalFlags))
(flagToMaybe (globalIgnoreExpiry globalFlags))
withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath]
-> FilePath -> Maybe String -> Maybe Bool
-> (RepoContext -> IO a)
-> IO a
withRepoContext' verbosity remoteRepos localRepos
sharedCacheDir httpTransport ignoreExpiry = \callback -> do
transportRef <- newMVar Nothing
let httpLib = Sec.HTTP.transportAdapter
verbosity
(getTransport transportRef)
initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' ->
callback RepoContext {
repoContextRepos = allRemoteRepos
++ map RepoLocal localRepos
, repoContextGetTransport = getTransport transportRef
, repoContextWithSecureRepo = withSecureRepo secureRepos'
, repoContextIgnoreExpiry = fromMaybe False ignoreExpiry
}
where
secureRemoteRepos =
[ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ]
allRemoteRepos =
[ (if isSecure then RepoSecure else RepoRemote) remote cacheDir
| remote <- remoteRepos
, let cacheDir = sharedCacheDir </> remoteRepoName remote
isSecure = remoteRepoSecure remote == Just True
]
getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport
getTransport transportRef =
modifyMVar transportRef $ \mTransport -> do
transport <- case mTransport of
Just tr -> return tr
Nothing -> configureTransport verbosity httpTransport
return (Just transport, transport)
withSecureRepo :: Map Repo SecureRepo
-> Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
withSecureRepo secureRepos repo callback =
case Map.lookup repo secureRepos of
Just (SecureRepo secureRepo) -> callback secureRepo
Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo"
-- | Initialize the provided secure repositories
--
-- Assumed invariant: `remoteRepoSecure` should be set for all these repos.
initSecureRepos :: forall a. Verbosity
-> Sec.HTTP.HttpLib
-> [(RemoteRepo, FilePath)]
-> (Map Repo SecureRepo -> IO a)
-> IO a
initSecureRepos verbosity httpLib repos callback = go Map.empty repos
where
go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a
go !acc [] = callback acc
go !acc ((r,cacheDir):rs) = do
cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir
initSecureRepo verbosity httpLib r cachePath $ \r' ->
go (Map.insert (RepoSecure r cacheDir) r' acc) rs
-- | Initialize the given secure repo
--
-- The security library has its own concept of a "local" repository, distinct
from - install@ 's ; these are secure repositories , but live in the local
-- file system. We use the convention that these repositories are identified by
-- URLs of the form @file:/path/to/local/repo@.
initSecureRepo :: Verbosity
-> Sec.HTTP.HttpLib
-> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed)
-> Sec.Path Sec.Absolute -- ^ Cache dir
-> (SecureRepo -> IO a) -- ^ Callback
-> IO a
initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do
withRepo $ \r -> do
requiresBootstrap <- Sec.requiresBootstrap r
when requiresBootstrap $ Sec.uncheckClientErrors $
Sec.bootstrap r
(map Sec.KeyId remoteRepoRootKeys)
(Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold))
callback $ SecureRepo r
where
Initialize local or remote repo depending on the URI
withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a
withRepo callback | uriScheme remoteRepoURI == "file:" = do
dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI)
Sec.Local.withRepository dir
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
withRepo callback =
Sec.Remote.withRepository httpLib
[remoteRepoURI]
Sec.Remote.defaultRepoOpts
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
cache :: Sec.Cache
cache = Sec.Cache {
cacheRoot = cachePath
, cacheLayout = Sec.cabalCacheLayout
}
We display any TUF progress only in verbose mode , including any transient
-- verification errors. If verification fails, then the final exception that
-- is thrown will of course be shown.
logTUF :: Sec.LogMessage -> IO ()
logTUF = info verbosity . Sec.pretty
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal-next/cabal-install/Distribution/Client/GlobalFlags.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE RankNTypes #
------------------------------------------------------------
* Global flags
------------------------------------------------------------
| Flags that apply at the top level, not to any sub-command.
^ Available Hackage servers.
^ Ignore security expiry dates
------------------------------------------------------------
* Repo context
------------------------------------------------------------
| Access to repositories
| All user-specified repositories
| Get the HTTP transport
NOTE: It is important that we don't eagerly initialize the transport.
Initializing the transport is not free, and especially in contexts where
we don't know a-priori whether or not we need the transport (for instance
when using cabal in "nix mode") incurring the overhead of transport
| Get the (initialized) secure repo
(the 'Repo' type itself is stateless and must remain so, because it
must be serializable)
| Should we ignore expiry times (when checking security)?
| Wrapper around 'Repository', hiding the type argument
| Initialize the provided secure repositories
Assumed invariant: `remoteRepoSecure` should be set for all these repos.
| Initialize the given secure repo
The security library has its own concept of a "local" repository, distinct
file system. We use the convention that these repositories are identified by
URLs of the form @file:/path/to/local/repo@.
^ Secure repo ('remoteRepoSecure' assumed)
^ Cache dir
^ Callback
verification errors. If verification fails, then the final exception that
is thrown will of course be shown. | # LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE RecordWildCards #
module Distribution.Client.GlobalFlags (
GlobalFlags(..)
, defaultGlobalFlags
, RepoContext(..)
, withRepoContext
, withRepoContext'
) where
import Distribution.Client.Types
( Repo(..), RemoteRepo(..) )
import Distribution.Compat.Semigroup
import Distribution.Simple.Setup
( Flag(..), fromFlag, flagToMaybe )
import Distribution.Utils.NubList
( NubList, fromNubList )
import Distribution.Client.HttpUtils
( HttpTransport, configureTransport )
import Distribution.Verbosity
( Verbosity )
import Distribution.Simple.Utils
( info )
import Data.Maybe
( fromMaybe )
import Control.Concurrent
( MVar, newMVar, modifyMVar )
import Control.Exception
( throwIO )
import Control.Monad
( when )
import System.FilePath
( (</>) )
import Network.URI
( uriScheme, uriPath )
import Data.Map
( Map )
import qualified Data.Map as Map
import GHC.Generics ( Generic )
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Util.Path as Sec
import qualified Hackage.Security.Util.Pretty as Sec
import qualified Hackage.Security.Client.Repository.Cache as Sec
import qualified Hackage.Security.Client.Repository.Local as Sec.Local
import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote
import qualified Distribution.Client.Security.HTTP as Sec.HTTP
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalSandboxConfigFile :: Flag FilePath,
globalConstraintsFile :: Flag FilePath,
globalCacheDir :: Flag FilePath,
globalLocalRepos :: NubList FilePath,
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath,
globalRequireSandbox :: Flag Bool,
globalIgnoreSandbox :: Flag Bool,
globalHttpTransport :: Flag String
} deriving Generic
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = Flag False,
globalIgnoreSandbox = Flag False,
globalIgnoreExpiry = Flag False,
globalHttpTransport = mempty
}
instance Monoid GlobalFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup GlobalFlags where
(<>) = gmappend
data RepoContext = RepoContext {
repoContextRepos :: [Repo]
The transport will be initialized on the first call to this function .
initialization on _ every _ invocation ( eg ) is undesirable .
, repoContextGetTransport :: IO HttpTransport
, repoContextWithSecureRepo :: forall a.
Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
, repoContextIgnoreExpiry :: Bool
}
data SecureRepo = forall down. SecureRepo (Sec.Repository down)
withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a
withRepoContext verbosity globalFlags =
withRepoContext'
verbosity
(fromNubList (globalRemoteRepos globalFlags))
(fromNubList (globalLocalRepos globalFlags))
(fromFlag (globalCacheDir globalFlags))
(flagToMaybe (globalHttpTransport globalFlags))
(flagToMaybe (globalIgnoreExpiry globalFlags))
withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath]
-> FilePath -> Maybe String -> Maybe Bool
-> (RepoContext -> IO a)
-> IO a
withRepoContext' verbosity remoteRepos localRepos
sharedCacheDir httpTransport ignoreExpiry = \callback -> do
transportRef <- newMVar Nothing
let httpLib = Sec.HTTP.transportAdapter
verbosity
(getTransport transportRef)
initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' ->
callback RepoContext {
repoContextRepos = allRemoteRepos
++ map RepoLocal localRepos
, repoContextGetTransport = getTransport transportRef
, repoContextWithSecureRepo = withSecureRepo secureRepos'
, repoContextIgnoreExpiry = fromMaybe False ignoreExpiry
}
where
secureRemoteRepos =
[ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ]
allRemoteRepos =
[ (if isSecure then RepoSecure else RepoRemote) remote cacheDir
| remote <- remoteRepos
, let cacheDir = sharedCacheDir </> remoteRepoName remote
isSecure = remoteRepoSecure remote == Just True
]
getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport
getTransport transportRef =
modifyMVar transportRef $ \mTransport -> do
transport <- case mTransport of
Just tr -> return tr
Nothing -> configureTransport verbosity httpTransport
return (Just transport, transport)
withSecureRepo :: Map Repo SecureRepo
-> Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
withSecureRepo secureRepos repo callback =
case Map.lookup repo secureRepos of
Just (SecureRepo secureRepo) -> callback secureRepo
Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo"
initSecureRepos :: forall a. Verbosity
-> Sec.HTTP.HttpLib
-> [(RemoteRepo, FilePath)]
-> (Map Repo SecureRepo -> IO a)
-> IO a
initSecureRepos verbosity httpLib repos callback = go Map.empty repos
where
go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a
go !acc [] = callback acc
go !acc ((r,cacheDir):rs) = do
cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir
initSecureRepo verbosity httpLib r cachePath $ \r' ->
go (Map.insert (RepoSecure r cacheDir) r' acc) rs
from - install@ 's ; these are secure repositories , but live in the local
initSecureRepo :: Verbosity
-> Sec.HTTP.HttpLib
-> IO a
initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do
withRepo $ \r -> do
requiresBootstrap <- Sec.requiresBootstrap r
when requiresBootstrap $ Sec.uncheckClientErrors $
Sec.bootstrap r
(map Sec.KeyId remoteRepoRootKeys)
(Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold))
callback $ SecureRepo r
where
Initialize local or remote repo depending on the URI
withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a
withRepo callback | uriScheme remoteRepoURI == "file:" = do
dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI)
Sec.Local.withRepository dir
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
withRepo callback =
Sec.Remote.withRepository httpLib
[remoteRepoURI]
Sec.Remote.defaultRepoOpts
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
cache :: Sec.Cache
cache = Sec.Cache {
cacheRoot = cachePath
, cacheLayout = Sec.cabalCacheLayout
}
We display any TUF progress only in verbose mode , including any transient
logTUF :: Sec.LogMessage -> IO ()
logTUF = info verbosity . Sec.pretty
|
82284587cb68b515f27436afd216fb6e1c552c4de4a79ac432832a13af417752 | stassats/lisp-bots | mop.lisp | ((:name "mop"
:description "Metaobject Protocol"
:url-prefix "-MOP/"
:abbreviate t)
("accessor-method-slot-definition" "accessor-method-slot-definition.html")
("add-dependent" "add-dependent.html")
("add-direct-method" "add-direct-method.html")
("add-direct-subclass" "add-direct-subclass.html")
("add-method" "add-method.html")
("allocate-instance" "allocate-instance.html")
("built-in-class" "class-built-in-class.html")
("class" "class-class.html")
("class-default-initargs" "class-default-initargs.html")
("class-direct-default-initargs" "class-direct-default-initargs.html")
("class-direct-slots" "class-direct-slots.html")
("class-direct-subclasses" "class-direct-subclasses.html")
("class-direct-superclasses" "class-direct-superclasses.html")
("class-finalized-p" "class-finalized-p.html")
("class-name" "class-name.html")
("class-precedence-list" "class-precedence-list.html")
("class-prototype" "class-prototype.html")
("class-slots" "class-slots.html")
("compute-applicable-methods" "compute-applicable-methods.html")
("compute-applicable-methods-using-classes" "compute-applicable-methods-using-classes.html")
("compute-class-precedence-list" "compute-class-precedence-list.html")
("compute-default-initargs" "compute-default-initargs.html")
("compute-discriminating-function" "compute-discriminating-function.html")
("compute-effective-method" "compute-effective-method.html")
("compute-effective-slot-definition" "compute-effective-slot-definition.html")
("compute-slots" "compute-slots.html")
("direct-slot-definition" "class-direct-slot-definition.html")
("direct-slot-definition-class" "direct-slot-definition-class.html")
("effective-slot-definition" "class-effective-slot-definition.html")
("effective-slot-definition-class" "effective-slot-definition-class.html")
("ensure-class" "ensure-class.html")
("ensure-class-using-class" "ensure-class-using-class.html")
("ensure-generic-function" "ensure-generic-function.html")
("ensure-generic-function-using-class" "ensure-generic-function-using-class.html")
("eql-specializer" "class-eql-specializer.html")
("eql-specializer-object" "eql-specializer-object.html")
("extract-lambda-list" "extract-lambda-list.html")
("extract-lambda-list" "extract-lambda-list.html")
("extract-specializer-names" "extract-specializer-names.html")
("finalize-inheritance" "finalize-inheritance.html")
("find-method-combination" "find-method-combination.html")
("forward-referenced-class" "class-forward-referenced-class.html")
("funcallable-standard-class" "class-funcallable-standard-class.html")
("funcallable-standard-instance-access" "funcallable-standard-instance-access.html")
("funcallable-standard-object" "class-funcallable-standard-object.html")
("function" "class-function.html")
("generic-function" "class-generic-function.html")
("generic-function-argument-precedence-order" "generic-function-argument-precedence-order.html")
("generic-function-declarations" "generic-function-declarations.html")
("generic-function-lambda-list" "generic-function-lambda-list.html")
("generic-function-method-class" "generic-function-method-class.html")
("generic-function-method-combination" "generic-function-method-combination.html")
("generic-function-methods" "generic-function-methods.html")
("generic-function-name" "generic-function-name.html")
("intern-eql-specializer" "intern-eql-specializer.html")
("make-instance" "make-instance.html")
("make-method-lambda" "make-method-lambda.html")
("map-dependents" "map-dependents.html")
("metaobject" "class-metaobject.html")
("method" "class-method.html")
("method-combination" "class-method-combination.html")
("method-function" "method-function.html")
("method-generic-function" "method-generic-function.html")
("method-lambda-list" "method-lambda-list.html")
("method-qualifiers" "method-qualifiers.html")
("method-specializers" "method-specializers.html")
("reader-method-class" "reader-method-class.html")
("remove-dependent" "remove-dependent.html")
("remove-direct-method" "remove-direct-method.html")
("remove-direct-subclass" "remove-direct-subclass.html")
("remove-method" "remove-method.html")
("set-funcallable-instance-function" "set-funcallable-instance-function.html")
("setf-class-name" "setf-class-name.html")
("setf-generic-function-name" "setf-generic-function-name.html")
("setf-slot-value-using-class" "setf-slot-value-using-class.html")
("slot-boundp-using-class" "slot-boundp-using-class.html")
("slot-definition" "class-slot-definition.html")
("slot-definition-allocation" "slot-definition-allocation.html")
("slot-definition-initargs" "slot-definition-initargs.html")
("slot-definition-initform" "slot-definition-initform.html")
("slot-definition-initfunction" "slot-definition-initfunction.html")
("slot-definition-location" "slot-definition-location.html")
("slot-definition-name" "slot-definition-name.html")
("slot-definition-readers" "slot-definition-readers.html")
("slot-definition-type" "slot-definition-type.html")
("slot-definition-writers" "slot-definition-writers.html")
("slot-makunbound-using-class" "slot-makunbound-using-class.html")
("slot-value-using-class" "slot-value-using-class.html")
("specializer" "class-specializer.html")
("specializer" "class-specializer.html")
("specializer-direct-generic-functions" "specializer-direct-generic-functions.html")
("specializer-direct-methods" "specializer-direct-methods.html")
("standard-accessor-method" "class-standard-accessor-method.html")
("standard-class" "class-standard-class.html")
("standard-direct-slot-definition" "class-standard-direct-slot-definition.html")
("standard-effective-slot-definition" "class-standard-effective-slot-definition.html")
("standard-generic-function" "class-standard-generic-function.html")
("standard-instance-access" "standard-instance-access.html")
("standard-method" "class-standard-method.html")
("standard-object" "class-standard-object.html")
("standard-reader-method" "class-standard-reader-method.html")
("standard-slot-definition" "class-standard-slot-definition.html")
("standard-writer-method" "class-standard-writer-method.html")
("t" "class-t.html")
("update-dependent" "update-dependent.html")
("validate-superclass" "validate-superclass.html")
("writer-method-class" "writer-method-class.html"))
| null | https://raw.githubusercontent.com/stassats/lisp-bots/09bfce724afd20c91a08acde8816be6faf5f54b2/specs/mop.lisp | lisp | ((:name "mop"
:description "Metaobject Protocol"
:url-prefix "-MOP/"
:abbreviate t)
("accessor-method-slot-definition" "accessor-method-slot-definition.html")
("add-dependent" "add-dependent.html")
("add-direct-method" "add-direct-method.html")
("add-direct-subclass" "add-direct-subclass.html")
("add-method" "add-method.html")
("allocate-instance" "allocate-instance.html")
("built-in-class" "class-built-in-class.html")
("class" "class-class.html")
("class-default-initargs" "class-default-initargs.html")
("class-direct-default-initargs" "class-direct-default-initargs.html")
("class-direct-slots" "class-direct-slots.html")
("class-direct-subclasses" "class-direct-subclasses.html")
("class-direct-superclasses" "class-direct-superclasses.html")
("class-finalized-p" "class-finalized-p.html")
("class-name" "class-name.html")
("class-precedence-list" "class-precedence-list.html")
("class-prototype" "class-prototype.html")
("class-slots" "class-slots.html")
("compute-applicable-methods" "compute-applicable-methods.html")
("compute-applicable-methods-using-classes" "compute-applicable-methods-using-classes.html")
("compute-class-precedence-list" "compute-class-precedence-list.html")
("compute-default-initargs" "compute-default-initargs.html")
("compute-discriminating-function" "compute-discriminating-function.html")
("compute-effective-method" "compute-effective-method.html")
("compute-effective-slot-definition" "compute-effective-slot-definition.html")
("compute-slots" "compute-slots.html")
("direct-slot-definition" "class-direct-slot-definition.html")
("direct-slot-definition-class" "direct-slot-definition-class.html")
("effective-slot-definition" "class-effective-slot-definition.html")
("effective-slot-definition-class" "effective-slot-definition-class.html")
("ensure-class" "ensure-class.html")
("ensure-class-using-class" "ensure-class-using-class.html")
("ensure-generic-function" "ensure-generic-function.html")
("ensure-generic-function-using-class" "ensure-generic-function-using-class.html")
("eql-specializer" "class-eql-specializer.html")
("eql-specializer-object" "eql-specializer-object.html")
("extract-lambda-list" "extract-lambda-list.html")
("extract-lambda-list" "extract-lambda-list.html")
("extract-specializer-names" "extract-specializer-names.html")
("finalize-inheritance" "finalize-inheritance.html")
("find-method-combination" "find-method-combination.html")
("forward-referenced-class" "class-forward-referenced-class.html")
("funcallable-standard-class" "class-funcallable-standard-class.html")
("funcallable-standard-instance-access" "funcallable-standard-instance-access.html")
("funcallable-standard-object" "class-funcallable-standard-object.html")
("function" "class-function.html")
("generic-function" "class-generic-function.html")
("generic-function-argument-precedence-order" "generic-function-argument-precedence-order.html")
("generic-function-declarations" "generic-function-declarations.html")
("generic-function-lambda-list" "generic-function-lambda-list.html")
("generic-function-method-class" "generic-function-method-class.html")
("generic-function-method-combination" "generic-function-method-combination.html")
("generic-function-methods" "generic-function-methods.html")
("generic-function-name" "generic-function-name.html")
("intern-eql-specializer" "intern-eql-specializer.html")
("make-instance" "make-instance.html")
("make-method-lambda" "make-method-lambda.html")
("map-dependents" "map-dependents.html")
("metaobject" "class-metaobject.html")
("method" "class-method.html")
("method-combination" "class-method-combination.html")
("method-function" "method-function.html")
("method-generic-function" "method-generic-function.html")
("method-lambda-list" "method-lambda-list.html")
("method-qualifiers" "method-qualifiers.html")
("method-specializers" "method-specializers.html")
("reader-method-class" "reader-method-class.html")
("remove-dependent" "remove-dependent.html")
("remove-direct-method" "remove-direct-method.html")
("remove-direct-subclass" "remove-direct-subclass.html")
("remove-method" "remove-method.html")
("set-funcallable-instance-function" "set-funcallable-instance-function.html")
("setf-class-name" "setf-class-name.html")
("setf-generic-function-name" "setf-generic-function-name.html")
("setf-slot-value-using-class" "setf-slot-value-using-class.html")
("slot-boundp-using-class" "slot-boundp-using-class.html")
("slot-definition" "class-slot-definition.html")
("slot-definition-allocation" "slot-definition-allocation.html")
("slot-definition-initargs" "slot-definition-initargs.html")
("slot-definition-initform" "slot-definition-initform.html")
("slot-definition-initfunction" "slot-definition-initfunction.html")
("slot-definition-location" "slot-definition-location.html")
("slot-definition-name" "slot-definition-name.html")
("slot-definition-readers" "slot-definition-readers.html")
("slot-definition-type" "slot-definition-type.html")
("slot-definition-writers" "slot-definition-writers.html")
("slot-makunbound-using-class" "slot-makunbound-using-class.html")
("slot-value-using-class" "slot-value-using-class.html")
("specializer" "class-specializer.html")
("specializer" "class-specializer.html")
("specializer-direct-generic-functions" "specializer-direct-generic-functions.html")
("specializer-direct-methods" "specializer-direct-methods.html")
("standard-accessor-method" "class-standard-accessor-method.html")
("standard-class" "class-standard-class.html")
("standard-direct-slot-definition" "class-standard-direct-slot-definition.html")
("standard-effective-slot-definition" "class-standard-effective-slot-definition.html")
("standard-generic-function" "class-standard-generic-function.html")
("standard-instance-access" "standard-instance-access.html")
("standard-method" "class-standard-method.html")
("standard-object" "class-standard-object.html")
("standard-reader-method" "class-standard-reader-method.html")
("standard-slot-definition" "class-standard-slot-definition.html")
("standard-writer-method" "class-standard-writer-method.html")
("t" "class-t.html")
("update-dependent" "update-dependent.html")
("validate-superclass" "validate-superclass.html")
("writer-method-class" "writer-method-class.html"))
|
|
5901ac1b622ad79f188ada09cd1a41f8e092b16fa144a7097474c5f0dcf4cd0b | kowainik/cake-slayer | Random.hs | {- | Utilities for generating random strings.
-}
module CakeSlayer.Random
( mkRandomDigits
, mkRandomString
) where
import Relude.Unsafe ((!!))
import System.Random (newStdGen, randomRIO, randomRs)
-- | Generates @n@ random digits.
mkRandomDigits :: (MonadIO m) => Int -> m Text
mkRandomDigits len = toText . take len . randomRs ('0', '9') <$> liftIO newStdGen
| Make a random string comprised of the following letters of a given length :
1 . Lowercase characters @[a .. z]@
2 . Uppercase characters @[A .. Z]@
3 . Digits @[0 .. 9]@.
Returns empty string if given length is less than zero .
1. Lowercase characters @[a..z]@
2. Uppercase characters @[A..Z]@
3. Digits @[0..9]@.
Returns empty string if given length is less than zero.
-}
mkRandomString
:: MonadIO m
=> Int -- ^ Length of the string
-> m Text -- ^ Generated string of the given length
mkRandomString n =
liftIO $ toText <$> replicateM n peekRandomChar
where
alphabet :: String
alphabet = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9']
alphabetLength :: Int
alphabetLength = length alphabet
peekRandomChar :: IO Char
peekRandomChar = do
i <- randomRIO (0, alphabetLength - 1)
pure $ alphabet !! i
| null | https://raw.githubusercontent.com/kowainik/cake-slayer/744f072c0eeaf50e43210e4b548705e1948e5a39/src/CakeSlayer/Random.hs | haskell | | Utilities for generating random strings.
| Generates @n@ random digits.
^ Length of the string
^ Generated string of the given length |
module CakeSlayer.Random
( mkRandomDigits
, mkRandomString
) where
import Relude.Unsafe ((!!))
import System.Random (newStdGen, randomRIO, randomRs)
mkRandomDigits :: (MonadIO m) => Int -> m Text
mkRandomDigits len = toText . take len . randomRs ('0', '9') <$> liftIO newStdGen
| Make a random string comprised of the following letters of a given length :
1 . Lowercase characters @[a .. z]@
2 . Uppercase characters @[A .. Z]@
3 . Digits @[0 .. 9]@.
Returns empty string if given length is less than zero .
1. Lowercase characters @[a..z]@
2. Uppercase characters @[A..Z]@
3. Digits @[0..9]@.
Returns empty string if given length is less than zero.
-}
mkRandomString
:: MonadIO m
mkRandomString n =
liftIO $ toText <$> replicateM n peekRandomChar
where
alphabet :: String
alphabet = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9']
alphabetLength :: Int
alphabetLength = length alphabet
peekRandomChar :: IO Char
peekRandomChar = do
i <- randomRIO (0, alphabetLength - 1)
pure $ alphabet !! i
|
7bbfc2185cb9872d86ad1621639768765447ebdd1fc69768c1224aae55e1c841 | startalkIM/ejabberd | node_public.erl | %%%----------------------------------------------------------------------
File : node_public.erl
Author :
%%% Purpose :
Created : 1 Dec 2007 by
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(node_public).
-behaviour(gen_pubsub_node).
-author('').
-include("pubsub.hrl").
-include("jlib.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1]).
init(Host, ServerHost, Opts) ->
node_flat:init(Host, ServerHost, Opts).
terminate(Host, ServerHost) ->
node_flat:terminate(Host, ServerHost).
options() ->
[{deliver_payloads, true},
{notify_config, false},
{notify_delete, false},
{notify_retract, true},
{purge_offline, false},
{persist_items, true},
{max_items, ?MAXITEMS},
{subscribe, true},
{access_model, open},
{roster_groups_allowed, []},
{publish_model, publishers},
{notification_type, headline},
{max_payload_size, ?MAX_PAYLOAD_SIZE},
{send_last_published_item, never},
{deliver_notifications, true},
{presence_based_delivery, false},
{itemreply, none}].
features() ->
[<<"create-nodes">>,
<<"delete-nodes">>,
<<"delete-items">>,
<<"instant-nodes">>,
<<"outcast-affiliation">>,
<<"persistent-items">>,
<<"publish">>,
<<"purge-nodes">>,
<<"retract-items">>,
<<"retrieve-affiliations">>,
<<"retrieve-items">>,
<<"retrieve-subscriptions">>,
<<"subscribe">>,
<<"subscription-notifications">>].
create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) ->
node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access).
create_node(Nidx, Owner) ->
node_flat:create_node(Nidx, Owner).
delete_node(Removed) ->
node_flat:delete_node(Removed).
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
node_flat:subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast,
PresenceSubscription, RosterGroup, Options).
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId).
publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) ->
node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId,
Payload, PubOpts).
remove_extra_items(Nidx, MaxItems, ItemIds) ->
node_flat:remove_extra_items(Nidx, MaxItems, ItemIds).
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId).
purge_node(Nidx, Owner) ->
node_flat:purge_node(Nidx, Owner).
get_entity_affiliations(Host, Owner) ->
node_flat:get_entity_affiliations(Host, Owner).
get_node_affiliations(Nidx) ->
node_flat:get_node_affiliations(Nidx).
get_affiliation(Nidx, Owner) ->
node_flat:get_affiliation(Nidx, Owner).
set_affiliation(Nidx, Owner, Affiliation) ->
node_flat:set_affiliation(Nidx, Owner, Affiliation).
get_entity_subscriptions(Host, Owner) ->
node_flat:get_entity_subscriptions(Host, Owner).
get_node_subscriptions(Nidx) ->
node_flat:get_node_subscriptions(Nidx).
get_subscriptions(Nidx, Owner) ->
node_flat:get_subscriptions(Nidx, Owner).
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId).
get_pending_nodes(Host, Owner) ->
node_flat:get_pending_nodes(Host, Owner).
get_states(Nidx) ->
node_flat:get_states(Nidx).
get_state(Nidx, JID) ->
node_flat:get_state(Nidx, JID).
set_state(State) ->
node_flat:set_state(State).
get_items(Nidx, From, RSM) ->
node_flat:get_items(Nidx, From, RSM).
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) ->
node_flat:get_items(Nidx, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId, RSM).
get_item(Nidx, ItemId) ->
node_flat:get_item(Nidx, ItemId).
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) ->
node_flat:get_item(Nidx, ItemId, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId).
set_item(Item) ->
node_flat:set_item(Item).
get_item_name(Host, Node, Id) ->
node_flat:get_item_name(Host, Node, Id).
node_to_path(Node) ->
node_flat:node_to_path(Node).
path_to_node(Path) ->
node_flat:path_to_node(Path).
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/node_public.erl | erlang | ----------------------------------------------------------------------
Purpose :
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
---------------------------------------------------------------------- | File : node_public.erl
Author :
Created : 1 Dec 2007 by
ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(node_public).
-behaviour(gen_pubsub_node).
-author('').
-include("pubsub.hrl").
-include("jlib.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1]).
init(Host, ServerHost, Opts) ->
node_flat:init(Host, ServerHost, Opts).
terminate(Host, ServerHost) ->
node_flat:terminate(Host, ServerHost).
options() ->
[{deliver_payloads, true},
{notify_config, false},
{notify_delete, false},
{notify_retract, true},
{purge_offline, false},
{persist_items, true},
{max_items, ?MAXITEMS},
{subscribe, true},
{access_model, open},
{roster_groups_allowed, []},
{publish_model, publishers},
{notification_type, headline},
{max_payload_size, ?MAX_PAYLOAD_SIZE},
{send_last_published_item, never},
{deliver_notifications, true},
{presence_based_delivery, false},
{itemreply, none}].
features() ->
[<<"create-nodes">>,
<<"delete-nodes">>,
<<"delete-items">>,
<<"instant-nodes">>,
<<"outcast-affiliation">>,
<<"persistent-items">>,
<<"publish">>,
<<"purge-nodes">>,
<<"retract-items">>,
<<"retrieve-affiliations">>,
<<"retrieve-items">>,
<<"retrieve-subscriptions">>,
<<"subscribe">>,
<<"subscription-notifications">>].
create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) ->
node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access).
create_node(Nidx, Owner) ->
node_flat:create_node(Nidx, Owner).
delete_node(Removed) ->
node_flat:delete_node(Removed).
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
node_flat:subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast,
PresenceSubscription, RosterGroup, Options).
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId).
publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) ->
node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId,
Payload, PubOpts).
remove_extra_items(Nidx, MaxItems, ItemIds) ->
node_flat:remove_extra_items(Nidx, MaxItems, ItemIds).
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId).
purge_node(Nidx, Owner) ->
node_flat:purge_node(Nidx, Owner).
get_entity_affiliations(Host, Owner) ->
node_flat:get_entity_affiliations(Host, Owner).
get_node_affiliations(Nidx) ->
node_flat:get_node_affiliations(Nidx).
get_affiliation(Nidx, Owner) ->
node_flat:get_affiliation(Nidx, Owner).
set_affiliation(Nidx, Owner, Affiliation) ->
node_flat:set_affiliation(Nidx, Owner, Affiliation).
get_entity_subscriptions(Host, Owner) ->
node_flat:get_entity_subscriptions(Host, Owner).
get_node_subscriptions(Nidx) ->
node_flat:get_node_subscriptions(Nidx).
get_subscriptions(Nidx, Owner) ->
node_flat:get_subscriptions(Nidx, Owner).
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId).
get_pending_nodes(Host, Owner) ->
node_flat:get_pending_nodes(Host, Owner).
get_states(Nidx) ->
node_flat:get_states(Nidx).
get_state(Nidx, JID) ->
node_flat:get_state(Nidx, JID).
set_state(State) ->
node_flat:set_state(State).
get_items(Nidx, From, RSM) ->
node_flat:get_items(Nidx, From, RSM).
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) ->
node_flat:get_items(Nidx, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId, RSM).
get_item(Nidx, ItemId) ->
node_flat:get_item(Nidx, ItemId).
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) ->
node_flat:get_item(Nidx, ItemId, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId).
set_item(Item) ->
node_flat:set_item(Item).
get_item_name(Host, Node, Id) ->
node_flat:get_item_name(Host, Node, Id).
node_to_path(Node) ->
node_flat:node_to_path(Node).
path_to_node(Path) ->
node_flat:path_to_node(Path).
|
e96e8c5eea53204a2c9a461284fcf2aa2dafe422aae74abe9ce785496dc3c9fb | CatalaLang/catala | from_dcalc.mli | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 , contributor :
< >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020 Inria, contributor:
Denis Merigoux <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
val translate_program_with_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program
(** Translation from the default calculus to the lambda calculus. This
translation uses exceptions to handle empty default terms. *)
val translate_program_without_exceptions :
'm Dcalc.Ast.program -> 'm Ast.program
* Translation from the default calculus to the lambda calculus . This
translation uses an option monad to handle empty defaults terms . This
transformation is one piece to permit to compile toward legacy languages
that does not contains exceptions .
translation uses an option monad to handle empty defaults terms. This
transformation is one piece to permit to compile toward legacy languages
that does not contains exceptions. *)
| null | https://raw.githubusercontent.com/CatalaLang/catala/4f059883c1b30d5c4efde77cecbd977704cbf972/compiler/lcalc/from_dcalc.mli | ocaml | * Translation from the default calculus to the lambda calculus. This
translation uses exceptions to handle empty default terms. | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 , contributor :
< >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020 Inria, contributor:
Denis Merigoux <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
val translate_program_with_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program
val translate_program_without_exceptions :
'm Dcalc.Ast.program -> 'm Ast.program
* Translation from the default calculus to the lambda calculus . This
translation uses an option monad to handle empty defaults terms . This
transformation is one piece to permit to compile toward legacy languages
that does not contains exceptions .
translation uses an option monad to handle empty defaults terms. This
transformation is one piece to permit to compile toward legacy languages
that does not contains exceptions. *)
|
9993a7adcba57a62452659ffdeacf2d79bfd08cd46acc1b124c7eb3a293d3444 | nikita-volkov/rebase | Strict.hs | module Rebase.Data.STRef.Strict
(
module Data.STRef.Strict
)
where
import Data.STRef.Strict
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Data/STRef/Strict.hs | haskell | module Rebase.Data.STRef.Strict
(
module Data.STRef.Strict
)
where
import Data.STRef.Strict
|
|
2c5377437517b03d96690384e664cf3266aa582b219a6f0b0d342cbca83a1520 | fpco/stack-docker-image-build | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import System.Process.Typed
import System.FilePath
import Data.Foldable
import System.Directory
import System.Environment (getArgs)
import Control.Exception
import Control.Monad
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding (decodeUtf8)
import Data.Yaml
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Vector as V
import Data.Maybe
parseStackYaml :: IO ([String], [String])
parseStackYaml = do
val <- decodeFileEither "stack.yaml" >>= either throwIO return
let buildFirst = fromMaybe [] $ do
Object o1 <- Just val
Object o2 <- HashMap.lookup "x-stack-docker-image-build" o1
Array vals <- HashMap.lookup "build-first" o2
Just [T.unpack dep | String dep <- V.toList vals]
extraDeps = fromMaybe [] $ do
Object o <- Just val
Array vals <- HashMap.lookup "extra-deps" o
Just [T.unpack dep | String dep <- V.toList vals]
return (buildFirst, extraDeps)
stack :: [String] -> ProcessConfig () () ()
stack args = proc "stack" $ ["--no-install-ghc", "--system-ghc"] ++ args
runStack :: [String] -> IO ()
runStack = runProcess_ . stack
readStack :: [String] -> IO String
readStack = fmap (TL.unpack . decodeUtf8 . fst) . readProcess_ . stack
getDir :: String -> IO FilePath
getDir flag = do
dirRaw <- readStack ["path", flag]
return $ takeWhile (/= '\n') dirRaw
getDBDir :: String -> IO FilePath
getDBDir typ = getDir $ concat ["--", typ, "-pkg-db"]
getBinDir :: String -> IO FilePath
getBinDir typ = do
dir <- getDir $ concat ["--", typ, "-install-root"]
return $ dir </> "bin"
main :: IO ()
main = do
args <- getArgs
(buildFirst, deps) <- parseStackYaml
forM_ buildFirst $ \pkg -> do
putStrLn $ "Building " ++ pkg ++ " from build-first"
runStack $ "build" : words pkg ++ args
unless (null deps) $ do
putStrLn "Building extra-deps"
runStack $ "build" : deps ++ args
putStrLn "Performing build local"
runStack $ "build" : args
globaldb <- getDBDir "global"
forM_ (words "snapshot local") $ \typ -> do
bindir <- getBinDir typ
bindirexists <- doesDirectoryExist bindir
bincontents <- if bindirexists then getDirectoryContents bindir else return []
forM_ bincontents $ \file -> do
let fp = bindir </> file
exists <- doesFileExist fp
when exists $ do
putStrLn $ "Linking " ++ fp
runProcess_ $ proc "ln" [fp, "/usr/local/bin" </> file]
dbdir <- getDBDir typ
dbdirexists <- doesDirectoryExist dbdir
dbcontents <- if dbdirexists then getDirectoryContents dbdir else return []
forM_ dbcontents $ \file -> when (takeExtension file == ".conf") $ do
let fp = dbdir </> file
putStrLn $ "Registering: " ++ file
runStack
[ "exec"
, "--"
, "ghc-pkg"
, "register"
, fp
, "--package-db"
, globaldb
, "--force"
]
stackDir <- getAppUserDataDirectory "stack"
stackContents <- getDirectoryContents stackDir
let toKeep "." = True
toKeep ".." = True
toKeep "snapshots" = True
toKeep _ = False
forM_ (filter (not . toKeep) stackContents) $ \x ->
runProcess_ $ proc "rm" ["-rf", stackDir </> x]
| null | https://raw.githubusercontent.com/fpco/stack-docker-image-build/8aa2ff15e7a5807e295f6ff6b2f70f74f5d08722/app/Main.hs | haskell | # LANGUAGE OverloadedStrings # | module Main (main) where
import System.Process.Typed
import System.FilePath
import Data.Foldable
import System.Directory
import System.Environment (getArgs)
import Control.Exception
import Control.Monad
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding (decodeUtf8)
import Data.Yaml
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Vector as V
import Data.Maybe
parseStackYaml :: IO ([String], [String])
parseStackYaml = do
val <- decodeFileEither "stack.yaml" >>= either throwIO return
let buildFirst = fromMaybe [] $ do
Object o1 <- Just val
Object o2 <- HashMap.lookup "x-stack-docker-image-build" o1
Array vals <- HashMap.lookup "build-first" o2
Just [T.unpack dep | String dep <- V.toList vals]
extraDeps = fromMaybe [] $ do
Object o <- Just val
Array vals <- HashMap.lookup "extra-deps" o
Just [T.unpack dep | String dep <- V.toList vals]
return (buildFirst, extraDeps)
stack :: [String] -> ProcessConfig () () ()
stack args = proc "stack" $ ["--no-install-ghc", "--system-ghc"] ++ args
runStack :: [String] -> IO ()
runStack = runProcess_ . stack
readStack :: [String] -> IO String
readStack = fmap (TL.unpack . decodeUtf8 . fst) . readProcess_ . stack
getDir :: String -> IO FilePath
getDir flag = do
dirRaw <- readStack ["path", flag]
return $ takeWhile (/= '\n') dirRaw
getDBDir :: String -> IO FilePath
getDBDir typ = getDir $ concat ["--", typ, "-pkg-db"]
getBinDir :: String -> IO FilePath
getBinDir typ = do
dir <- getDir $ concat ["--", typ, "-install-root"]
return $ dir </> "bin"
main :: IO ()
main = do
args <- getArgs
(buildFirst, deps) <- parseStackYaml
forM_ buildFirst $ \pkg -> do
putStrLn $ "Building " ++ pkg ++ " from build-first"
runStack $ "build" : words pkg ++ args
unless (null deps) $ do
putStrLn "Building extra-deps"
runStack $ "build" : deps ++ args
putStrLn "Performing build local"
runStack $ "build" : args
globaldb <- getDBDir "global"
forM_ (words "snapshot local") $ \typ -> do
bindir <- getBinDir typ
bindirexists <- doesDirectoryExist bindir
bincontents <- if bindirexists then getDirectoryContents bindir else return []
forM_ bincontents $ \file -> do
let fp = bindir </> file
exists <- doesFileExist fp
when exists $ do
putStrLn $ "Linking " ++ fp
runProcess_ $ proc "ln" [fp, "/usr/local/bin" </> file]
dbdir <- getDBDir typ
dbdirexists <- doesDirectoryExist dbdir
dbcontents <- if dbdirexists then getDirectoryContents dbdir else return []
forM_ dbcontents $ \file -> when (takeExtension file == ".conf") $ do
let fp = dbdir </> file
putStrLn $ "Registering: " ++ file
runStack
[ "exec"
, "--"
, "ghc-pkg"
, "register"
, fp
, "--package-db"
, globaldb
, "--force"
]
stackDir <- getAppUserDataDirectory "stack"
stackContents <- getDirectoryContents stackDir
let toKeep "." = True
toKeep ".." = True
toKeep "snapshots" = True
toKeep _ = False
forM_ (filter (not . toKeep) stackContents) $ \x ->
runProcess_ $ proc "rm" ["-rf", stackDir </> x]
|
52aaf9bbbb665fac4c939b34fdbecf3f098567139b92bfe5f9deb1c2b6833d0d | lmj/lparallel | kernel-test.lisp | Copyright ( c ) 2011 - 2012 , . All rights reserved .
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials provided
;;; with the distribution.
;;;
;;; * Neither the name of the project nor the names of its
;;; contributors may be used to endorse or promote products derived
;;; from this software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package #:lparallel-test)
(full-test kernel-test
(let ((channel (make-channel)))
(mapcar (lambda (x) (submit-task channel (lambda () (* x x))))
(list 5 6 7 8))
(is (equal (list 25 36 49 64)
(sort (collect-n 4 (receive-result channel)) '<)))))
(full-test no-kernel-test
(let ((*kernel* nil))
(signals no-kernel-error
(submit-task (make-channel) (lambda ())))))
(base-test end-kernel-test
(repeat 10
(loop for n from 1 below 32
do (with-temp-kernel (n)
(is (= 1 1))))))
(full-test many-task-test
(let ((channel (make-channel)))
(repeat 1000
(submit-task channel (lambda ()))
(is (null (receive-result channel))))
(repeat 1000
(submit-task channel (lambda ())))
(repeat 1000
(is (null (receive-result channel))))
(repeat 1000
(let ((*task-priority* :low))
(submit-task channel (lambda ())))
(is (null (receive-result channel))))
(repeat 1000
(let ((*task-priority* :low))
(submit-task channel (lambda ()))))
(repeat 1000
(is (null (receive-result channel))))))
#-lparallel.without-kill
(base-test kill-during-end-kernel-test
(let* ((*kernel* (make-kernel 2))
(kernel *kernel*)
(out *standard-output*)
(channel (make-channel))
(handled (make-queue))
(finished (make-queue)))
(task-handler-bind ((error #'invoke-transfer-error))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop))))
(with-thread ()
(block top
(handler-bind ((task-killed-error
(lambda (e)
(declare (ignore e))
(push-queue t handled)
(return-from top))))
(receive-result channel))))
(sleep 0.2)
(let ((thread (with-thread ()
(let ((*standard-output* out))
(let ((*kernel* kernel))
(end-kernel :wait t)
(push-queue t finished))))))
(sleep 0.2)
(is (null (peek-queue finished)))
(is (eql 1 (kill-tasks :default)))
(sleep 0.2)
(is (eq t (peek-queue handled)))
(is (eq t (peek-queue finished)))
(is (not (null thread))))))
(full-test channel-capacity-test
(let ((channel (make-channel :fixed-capacity 1)))
(submit-task channel (lambda () 3))
(submit-task channel (lambda () 4))
(submit-task channel (lambda () 5))
(is (equal '(3 4 5)
avoid sbcl warning
(locally (declare (notinline sort))
(sort (list (receive-result channel)
(receive-result channel)
(receive-result channel))
#'<))))))
(full-test try-receive-test
(let ((channel (make-channel)))
(multiple-value-bind (a b) (try-receive-result channel)
(is (null a))
(is (null b)))
(submit-task channel (lambda () 3))
(sleep 0.1)
(multiple-value-bind (a b) (try-receive-result channel)
(is (= 3 a))
(is (eq t b)))
(multiple-value-bind (a b) (try-receive-result channel)
(is (null a))
(is (null b)))))
#-lparallel.without-bordeaux-threads-condition-wait-timeout
(full-test try-receive-timeout-test
(let ((channel (make-channel)))
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (null a))
(is (null b)))
(submit-task channel (lambda () 3))
(sleep 0.1)
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (= 3 a))
(is (eq t b)))
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (null a))
(is (null b)))))
(full-test kernel-client-error-test
(task-handler-bind ((client-error #'invoke-transfer-error))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals client-error
(receive-result channel))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'transfer-error (make-condition 'foo-error)))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'transfer-error 'foo-error))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel)))))
(full-test user-restart-test
(task-handler-bind
((foo-error (lambda (e)
(declare (ignore e))
(invoke-restart 'eleven))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(eleven () 11))))
(is (eql 11 (receive-result channel)))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'eleven))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(eleven () 11))))
(is (eql 11 (receive-result channel))))))
(full-test error-cascade-test
(task-handler-bind
((error (lambda (e)
(invoke-restart 'transfer-error e))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(error 'foo-error))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel))))))
(base-test complex-handler-test
(flet ((estr (e)
(with-output-to-string (out)
(write e :escape nil :stream out))))
(let ((queue (make-queue)))
(ignore-errors
(handler-bind ((error (lambda (e)
(push-queue (cons 'a (estr e)) queue))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'b (estr e)) queue)
(error "Z"))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'c (estr e)) queue)
(error "Y"))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'd (estr e)) queue))))
(error "X"))))))
(is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z"))
(extract-queue queue))))
(with-temp-kernel (2)
(let ((queue (make-queue)))
(task-handler-bind ((error #'invoke-transfer-error))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'a (estr e)) queue))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'b (estr e)) queue)
(error "Z"))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'c (estr e)) queue)
(error "Y"))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'd (estr e))
queue))))
(submit-task (make-channel) #'error "X"))))))
(is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z"))
(loop repeat 4 collect (pop-queue queue))))))))
(base-test kernel-worker-context-test
(with-temp-kernel (2 :context (lambda (run)
(let ((*memo* 9))
(funcall run))))
(let ((channel (make-channel)))
(setf *memo* 7)
(submit-task channel (lambda () *memo*))
(is (eql 9 (receive-result channel)))
(is (eql 7 *memo*)))))
(base-test kernel-binding-test
(unwind-protect
(progn
(end-kernel)
(setf *kernel* (make-kernel 4))
(let ((channel (make-channel)))
(setf *memo* :main)
(submit-task channel (lambda () (setf *memo* :worker) *memo*))
(is (eq :worker (receive-result channel)))
(is (eq :worker *memo*))))
(end-kernel))
(with-temp-kernel (4 :bindings (acons '*memo* :worker nil))
(let ((node (assoc '*memo* (kernel-bindings))))
(is (eq (cdr node) :worker)))
(let ((channel (make-channel)))
(setf *memo* :main)
(submit-task channel (lambda () *memo*))
(is (eq :worker (receive-result channel)))
(is (eq :main *memo*)))))
(full-test kernel-var-test
(let ((channel (make-channel)))
(submit-task channel (lambda () *kernel*))
(is (eq *kernel* (receive-result channel)))))
(base-test task-categories-test
(with-temp-kernel (2)
(is (notany #'identity (task-categories-running)))
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.2)
(is (eql 1 (count :default (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count :foo (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* 999))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count 999 (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 2 (count :foo (task-categories-running)))))))
(base-test no-kernel-restart-test
(let ((*kernel* nil))
(unwind-protect
(let ((flag nil))
(handler-bind
((no-kernel-error
(lambda (c)
(setf flag :called)
(invoke-restart (find-restart 'make-kernel c) 3))))
(let ((channel (make-channel)))
(submit-task channel (lambda (x) (* x x)) 3)
(is (= 9 (receive-result channel))))
(is (= 3 (kernel-worker-count)))
(is (eq :called flag))))
(end-kernel))))
(base-test kernel-warnings-test
(let ((*error-output* (make-string-output-stream)))
(with-temp-kernel (3)
(is (zerop (length (get-output-stream-string *error-output*))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (warn "blah")))
(receive-result channel))
(is (search "blah" (get-output-stream-string *error-output*))))))
(full-test handler-bind-test
(task-handler-bind
((foo-error (lambda (e)
(declare (ignore e))
(invoke-restart 'double-me 3))))
(let ((channel (make-channel)))
(repeat 3
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(double-me (x)
;; clisp warns unless interactive is given
:interactive (lambda ())
(* 2 x))))))
(is (equal '(6 6 6)
(collect-n 3 (receive-result channel)))))))
(full-test aborted-worker-test
(task-handler-bind ((foo-error (lambda (e)
(declare (ignore e))
(invoke-abort-thread))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(restart-case (error 'foo-error)
(eleven () 11))))
(signals task-killed-error
(receive-result channel)))))
(defun all-workers-alive-p ()
(sleep 0.2)
(every #'bordeaux-threads:thread-alive-p
(map 'list
#'lparallel.kernel::thread
(lparallel.kernel::workers *kernel*))))
(base-test active-worker-replacement-test
(with-thread-count-check
(with-temp-kernel (2)
(is (all-workers-alive-p))
(task-handler-bind ((foo-error (lambda (e)
(declare (ignore e))
(invoke-abort-thread))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(error 'foo-error)))
(signals task-killed-error
(receive-result channel))))
(is (all-workers-alive-p)))))
#-lparallel.without-kill
(base-test sleeping-worker-replacement-test
(with-thread-count-check
(with-temp-kernel (2 :bindings (list (cons '*error-output*
(make-broadcast-stream))))
(is (all-workers-alive-p))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 0)))
(is (all-workers-alive-p))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 0)))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 1)))
(is (all-workers-alive-p)))))
(define-condition foo-condition () ())
(full-test non-error-condition-test
(let ((result nil))
(task-handler-bind ((foo-condition (lambda (c)
(declare (ignore c))
(setf result :called))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(signal 'foo-condition)))
(receive-result channel)))
(is (eq :called result))))
#-lparallel.without-kill
(base-test custom-kill-task-test
(with-thread-count-check
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* 'blah))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop))))
(sleep 0.2)
(submit-task channel (lambda () 'survived))
(sleep 0.2)
(kill-tasks 'blah)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result channel) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars)))))))
#-lparallel.without-kill
(base-test default-kill-task-test
(with-thread-count-check
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(sleep 0.2)
(submit-task channel (lambda () 'survived))
(sleep 0.2)
(kill-tasks *task-category*)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result channel) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars)))))))
(base-test submit-timeout-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(declare (notinline submit-timeout))
(submit-timeout channel 0.1 'timeout)
(submit-task channel (lambda () 3))
(is (eql 3 (receive-result channel)))
(is (eq 'timeout (receive-result channel))))))
#-lparallel.without-kill
(base-test cancel-timeout-test
(with-temp-kernel (2)
(locally (declare (notinline submit-timeout cancel-timeout))
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(cancel-timeout timeout 'a)
(is (eq 'a (receive-result channel)))))))
#-lparallel.without-kill
(base-test kill-timeout-test
(with-temp-kernel (2)
(locally (declare (notinline submit-timeout))
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(lparallel.kernel::with-timeout-slots (lparallel.kernel::thread) timeout
(destroy-thread lparallel.kernel::thread))
(signals task-killed-error
(receive-result channel))))))
(define-condition foo-condition-2 (condition) ())
(full-test signaling-after-signal-test
(let ((q (make-queue)))
(task-handler-bind ((foo-condition-2 (lambda (c)
(declare (ignore c))
(push-queue 'outer q))))
(task-handler-bind ((foo-condition (lambda (c)
(declare (ignore c))
(push-queue 'inner q)
(signal 'foo-condition-2))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (signal 'foo-condition)))
(receive-result channel))))
(is (equal '(inner outer)
(extract-queue q)))))
(base-test task-handler-bind-syntax-test
(signals error
(macroexpand '(task-handler-bind ((())))))
(signals error
(macroexpand '(task-handler-bind (()))))
(signals error
(macroexpand '(task-handler-bind ((x)))))
(signals error
(macroexpand '(task-handler-bind ((x y z))))))
(full-test print-kernel-test
(is (plusp (length (with-output-to-string (s)
(print *kernel* s))))))
(base-test end-kernel-wait-test
(with-thread-count-check
(let ((*kernel* (make-kernel 3)))
(unwind-protect
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 1))))
(is (eql 3 (length (end-kernel :wait t))))))))
(base-test steal-work-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.1)
(let ((execp nil))
(submit-task channel (lambda () (setf execp t)))
(sleep 0.1)
(is (eq t (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*)))
(is (eq t execp))
(is (eq nil (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.2)))
(submit-task channel (lambda () (sleep 0.2)))
(sleep 0.1)
(is (eq nil (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*))))))
(base-test kernel-store-value-test
(unwind-protect
(handler-bind ((no-kernel-error
(lambda (e)
(declare (ignore e))
(invoke-restart 'store-value
(make-kernel 2)))))
(let ((channel (make-channel)))
(submit-task channel 'identity 3)
(is (= 3 (receive-result channel)))))
(end-kernel)))
#-lparallel.without-kill
(base-test reject-kill-nil-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(sleep 999)))
(sleep 0.2)
(signals error
(kill-tasks nil))
(= 1 (kill-tasks :default)))))
#-lparallel.without-kill
(full-test worker-suicide-test
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(kill-tasks :default)))
(signals task-killed-error
(receive-result channel)))
(let ((channel (make-channel))
(*task-category* 'foo))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(kill-tasks 'foo)))
(signals task-killed-error
(receive-result channel))))
(full-test submit-after-end-kernel-test
(let ((channel (make-channel)))
(end-kernel :wait t)
(signals error
(submit-task channel (lambda ())))))
(base-test double-end-kernel-test
(let* ((kernel (make-kernel 2))
(*kernel* kernel))
(end-kernel :wait t)
(let ((*kernel* kernel))
(end-kernel :wait t)))
;; got here without an error
(is (= 1 1)))
(base-test kernel-reader-test
(setf *memo* nil)
(let ((context (lambda (worker-loop)
(let ((*memo* 3))
(funcall worker-loop)))))
(with-temp-kernel (2 :name "foo"
:bindings `((*blah* . 99))
:context context)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(declare (special *blah*))
(list *memo* *blah*)))
(is (equal '(3 99) (receive-result channel))))
(is (string-equal "foo" (kernel-name)))
(is (equal '((*blah* . 99)) (kernel-bindings)))
(is (eq context (kernel-context))))))
(defun aborting-context (worker-loop)
(declare (ignore worker-loop))
(invoke-abort-thread))
(defun non-funcalling-context (worker-loop)
(declare (ignore worker-loop)))
(base-test context-error-test
(dolist (n '(1 2 4 8))
(with-thread-count-check
(signals kernel-creation-error
(make-kernel n :context #'aborting-context)))))
(base-test non-funcalling-context-test
(dolist (n '(1 2 4 8))
(with-thread-count-check
(signals kernel-creation-error
(make-kernel n :context 'non-funcalling-context)))))
(base-test nonexistent-context-test
(with-thread-count-check
(signals error
(make-kernel 1 :context 'nonexistent-function))))
(base-test broadcast-test
(setf *memo* 0)
(dolist (n '(1 2 3 4 7 8 15 16))
(with-temp-kernel (n :bindings '((*memo* . 1)))
(is (= 0 *memo*))
(let ((channel (make-channel)))
(repeat 100 (submit-task channel (lambda () *memo*)))
(repeat 100 (is (= 1 (receive-result channel)))))
(is (every (lambda (x) (= x 1))
(broadcast-task (lambda () *memo*))))
(let ((channel (make-channel)))
(repeat (kernel-worker-count)
(submit-task channel #'sleep 0.2)))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(let ((channel (make-channel)))
(repeat 1000 (submit-task channel (lambda ()))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(is (= 0 *memo*))
(let ((channel (make-channel)))
(repeat 100 (submit-task channel (lambda () *memo*)))
(repeat 100 (is (= 99 (receive-result channel)))))
(let ((channel (make-channel)))
(repeat 1000 (submit-task channel (lambda ()))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () *memo*))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () *memo*))))
(is (every (lambda (x) (= x 5))
(broadcast-task #'+ 2 3))))))
(full-test broadcast-error-test
(let ((*kernel* nil))
(signals no-kernel-error
(broadcast-task (lambda ()))))
(signals error
(broadcast-task 3))
(signals error
(broadcast-task "foo"))
(task-handler-bind ((error #'invoke-transfer-error))
(signals foo-error
(broadcast-task #'error 'foo-error))
(let ((channel (make-channel)))
(submit-task channel (lambda () (broadcast-task (lambda ()))))
(signals error
(receive-result channel)))
(signals error
(broadcast-task (lambda () (broadcast-task (lambda ())))))))
(full-test worker-index-test
(is (null (kernel-worker-index)))
(let ((channel (make-channel)))
(repeat 1000
(submit-task channel #'kernel-worker-index))
(repeat 1000
(let ((x (receive-result channel)))
(is (and (>= x 0)
(< x (kernel-worker-count)))))))
(loop for i across (sort (broadcast-task #'kernel-worker-index) #'<)
for j from 0
do (is (= i j))))
;;;; check for messed up imports
(defun packages-matching (string)
(remove-if-not (lambda (package)
(search string (package-name package) :test #'equalp))
(list-all-packages)))
(defun assert-internal-symbols-not-imported (&key own-packages
third-party-packages)
(let ((third-party-packages (mapcar #'find-package third-party-packages)))
(dolist (own-package own-packages)
(do-symbols (symbol own-package)
(when-let (third-party-package (find (symbol-package symbol)
third-party-packages))
(when (eq :internal (nth-value 1 (find-symbol (symbol-name symbol)
third-party-package)))
(error "Internal symbol ~s was imported into ~a."
symbol (package-name own-package))))))))
(base-test package-test
(assert-internal-symbols-not-imported
:own-packages (packages-matching "lparallel")
:third-party-packages '(#:alexandria #:bordeaux-threads))
(is t))
| null | https://raw.githubusercontent.com/lmj/lparallel/9c11f40018155a472c540b63684049acc9b36e15/test/kernel-test.lisp | lisp |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the project nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
clisp warns unless interactive is given
got here without an error
check for messed up imports | Copyright ( c ) 2011 - 2012 , . All rights reserved .
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
(in-package #:lparallel-test)
(full-test kernel-test
(let ((channel (make-channel)))
(mapcar (lambda (x) (submit-task channel (lambda () (* x x))))
(list 5 6 7 8))
(is (equal (list 25 36 49 64)
(sort (collect-n 4 (receive-result channel)) '<)))))
(full-test no-kernel-test
(let ((*kernel* nil))
(signals no-kernel-error
(submit-task (make-channel) (lambda ())))))
(base-test end-kernel-test
(repeat 10
(loop for n from 1 below 32
do (with-temp-kernel (n)
(is (= 1 1))))))
(full-test many-task-test
(let ((channel (make-channel)))
(repeat 1000
(submit-task channel (lambda ()))
(is (null (receive-result channel))))
(repeat 1000
(submit-task channel (lambda ())))
(repeat 1000
(is (null (receive-result channel))))
(repeat 1000
(let ((*task-priority* :low))
(submit-task channel (lambda ())))
(is (null (receive-result channel))))
(repeat 1000
(let ((*task-priority* :low))
(submit-task channel (lambda ()))))
(repeat 1000
(is (null (receive-result channel))))))
#-lparallel.without-kill
(base-test kill-during-end-kernel-test
(let* ((*kernel* (make-kernel 2))
(kernel *kernel*)
(out *standard-output*)
(channel (make-channel))
(handled (make-queue))
(finished (make-queue)))
(task-handler-bind ((error #'invoke-transfer-error))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop))))
(with-thread ()
(block top
(handler-bind ((task-killed-error
(lambda (e)
(declare (ignore e))
(push-queue t handled)
(return-from top))))
(receive-result channel))))
(sleep 0.2)
(let ((thread (with-thread ()
(let ((*standard-output* out))
(let ((*kernel* kernel))
(end-kernel :wait t)
(push-queue t finished))))))
(sleep 0.2)
(is (null (peek-queue finished)))
(is (eql 1 (kill-tasks :default)))
(sleep 0.2)
(is (eq t (peek-queue handled)))
(is (eq t (peek-queue finished)))
(is (not (null thread))))))
(full-test channel-capacity-test
(let ((channel (make-channel :fixed-capacity 1)))
(submit-task channel (lambda () 3))
(submit-task channel (lambda () 4))
(submit-task channel (lambda () 5))
(is (equal '(3 4 5)
avoid sbcl warning
(locally (declare (notinline sort))
(sort (list (receive-result channel)
(receive-result channel)
(receive-result channel))
#'<))))))
(full-test try-receive-test
(let ((channel (make-channel)))
(multiple-value-bind (a b) (try-receive-result channel)
(is (null a))
(is (null b)))
(submit-task channel (lambda () 3))
(sleep 0.1)
(multiple-value-bind (a b) (try-receive-result channel)
(is (= 3 a))
(is (eq t b)))
(multiple-value-bind (a b) (try-receive-result channel)
(is (null a))
(is (null b)))))
#-lparallel.without-bordeaux-threads-condition-wait-timeout
(full-test try-receive-timeout-test
(let ((channel (make-channel)))
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (null a))
(is (null b)))
(submit-task channel (lambda () 3))
(sleep 0.1)
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (= 3 a))
(is (eq t b)))
(multiple-value-bind (a b) (try-receive-result channel :timeout 0.1)
(is (null a))
(is (null b)))))
(full-test kernel-client-error-test
(task-handler-bind ((client-error #'invoke-transfer-error))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals client-error
(receive-result channel))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'transfer-error (make-condition 'foo-error)))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'transfer-error 'foo-error))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel)))))
(full-test user-restart-test
(task-handler-bind
((foo-error (lambda (e)
(declare (ignore e))
(invoke-restart 'eleven))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(eleven () 11))))
(is (eql 11 (receive-result channel)))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(invoke-restart 'eleven))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(eleven () 11))))
(is (eql 11 (receive-result channel))))))
(full-test error-cascade-test
(task-handler-bind
((error (lambda (e)
(invoke-restart 'transfer-error e))))
(task-handler-bind
((error (lambda (e)
(declare (ignore e))
(error 'foo-error))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (error 'client-error)))
(signals foo-error
(receive-result channel))))))
(base-test complex-handler-test
(flet ((estr (e)
(with-output-to-string (out)
(write e :escape nil :stream out))))
(let ((queue (make-queue)))
(ignore-errors
(handler-bind ((error (lambda (e)
(push-queue (cons 'a (estr e)) queue))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'b (estr e)) queue)
(error "Z"))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'c (estr e)) queue)
(error "Y"))))
(handler-bind ((error (lambda (e)
(push-queue (cons 'd (estr e)) queue))))
(error "X"))))))
(is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z"))
(extract-queue queue))))
(with-temp-kernel (2)
(let ((queue (make-queue)))
(task-handler-bind ((error #'invoke-transfer-error))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'a (estr e)) queue))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'b (estr e)) queue)
(error "Z"))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'c (estr e)) queue)
(error "Y"))))
(task-handler-bind ((error (lambda (e)
(push-queue (cons 'd (estr e))
queue))))
(submit-task (make-channel) #'error "X"))))))
(is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z"))
(loop repeat 4 collect (pop-queue queue))))))))
(base-test kernel-worker-context-test
(with-temp-kernel (2 :context (lambda (run)
(let ((*memo* 9))
(funcall run))))
(let ((channel (make-channel)))
(setf *memo* 7)
(submit-task channel (lambda () *memo*))
(is (eql 9 (receive-result channel)))
(is (eql 7 *memo*)))))
(base-test kernel-binding-test
(unwind-protect
(progn
(end-kernel)
(setf *kernel* (make-kernel 4))
(let ((channel (make-channel)))
(setf *memo* :main)
(submit-task channel (lambda () (setf *memo* :worker) *memo*))
(is (eq :worker (receive-result channel)))
(is (eq :worker *memo*))))
(end-kernel))
(with-temp-kernel (4 :bindings (acons '*memo* :worker nil))
(let ((node (assoc '*memo* (kernel-bindings))))
(is (eq (cdr node) :worker)))
(let ((channel (make-channel)))
(setf *memo* :main)
(submit-task channel (lambda () *memo*))
(is (eq :worker (receive-result channel)))
(is (eq :main *memo*)))))
(full-test kernel-var-test
(let ((channel (make-channel)))
(submit-task channel (lambda () *kernel*))
(is (eq *kernel* (receive-result channel)))))
(base-test task-categories-test
(with-temp-kernel (2)
(is (notany #'identity (task-categories-running)))
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.2)
(is (eql 1 (count :default (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count :foo (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* 999))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count 999 (task-categories-running))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 2 (count :foo (task-categories-running)))))))
(base-test no-kernel-restart-test
(let ((*kernel* nil))
(unwind-protect
(let ((flag nil))
(handler-bind
((no-kernel-error
(lambda (c)
(setf flag :called)
(invoke-restart (find-restart 'make-kernel c) 3))))
(let ((channel (make-channel)))
(submit-task channel (lambda (x) (* x x)) 3)
(is (= 9 (receive-result channel))))
(is (= 3 (kernel-worker-count)))
(is (eq :called flag))))
(end-kernel))))
(base-test kernel-warnings-test
(let ((*error-output* (make-string-output-stream)))
(with-temp-kernel (3)
(is (zerop (length (get-output-stream-string *error-output*))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (warn "blah")))
(receive-result channel))
(is (search "blah" (get-output-stream-string *error-output*))))))
(full-test handler-bind-test
(task-handler-bind
((foo-error (lambda (e)
(declare (ignore e))
(invoke-restart 'double-me 3))))
(let ((channel (make-channel)))
(repeat 3
(submit-task channel (lambda ()
(restart-case (error 'foo-error)
(double-me (x)
:interactive (lambda ())
(* 2 x))))))
(is (equal '(6 6 6)
(collect-n 3 (receive-result channel)))))))
(full-test aborted-worker-test
(task-handler-bind ((foo-error (lambda (e)
(declare (ignore e))
(invoke-abort-thread))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(restart-case (error 'foo-error)
(eleven () 11))))
(signals task-killed-error
(receive-result channel)))))
(defun all-workers-alive-p ()
(sleep 0.2)
(every #'bordeaux-threads:thread-alive-p
(map 'list
#'lparallel.kernel::thread
(lparallel.kernel::workers *kernel*))))
(base-test active-worker-replacement-test
(with-thread-count-check
(with-temp-kernel (2)
(is (all-workers-alive-p))
(task-handler-bind ((foo-error (lambda (e)
(declare (ignore e))
(invoke-abort-thread))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(error 'foo-error)))
(signals task-killed-error
(receive-result channel))))
(is (all-workers-alive-p)))))
#-lparallel.without-kill
(base-test sleeping-worker-replacement-test
(with-thread-count-check
(with-temp-kernel (2 :bindings (list (cons '*error-output*
(make-broadcast-stream))))
(is (all-workers-alive-p))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 0)))
(is (all-workers-alive-p))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 0)))
(destroy-thread
(lparallel.kernel::thread
(aref (lparallel.kernel::workers *kernel*) 1)))
(is (all-workers-alive-p)))))
(define-condition foo-condition () ())
(full-test non-error-condition-test
(let ((result nil))
(task-handler-bind ((foo-condition (lambda (c)
(declare (ignore c))
(setf result :called))))
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(signal 'foo-condition)))
(receive-result channel)))
(is (eq :called result))))
#-lparallel.without-kill
(base-test custom-kill-task-test
(with-thread-count-check
(with-temp-kernel (2)
(let ((channel (make-channel)))
(let ((*task-category* 'blah))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop))))
(sleep 0.2)
(submit-task channel (lambda () 'survived))
(sleep 0.2)
(kill-tasks 'blah)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result channel) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars)))))))
#-lparallel.without-kill
(base-test default-kill-task-test
(with-thread-count-check
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(infinite-loop)))
(sleep 0.2)
(submit-task channel (lambda () 'survived))
(sleep 0.2)
(kill-tasks *task-category*)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result channel) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars)))))))
(base-test submit-timeout-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(declare (notinline submit-timeout))
(submit-timeout channel 0.1 'timeout)
(submit-task channel (lambda () 3))
(is (eql 3 (receive-result channel)))
(is (eq 'timeout (receive-result channel))))))
#-lparallel.without-kill
(base-test cancel-timeout-test
(with-temp-kernel (2)
(locally (declare (notinline submit-timeout cancel-timeout))
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(cancel-timeout timeout 'a)
(is (eq 'a (receive-result channel)))))))
#-lparallel.without-kill
(base-test kill-timeout-test
(with-temp-kernel (2)
(locally (declare (notinline submit-timeout))
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(lparallel.kernel::with-timeout-slots (lparallel.kernel::thread) timeout
(destroy-thread lparallel.kernel::thread))
(signals task-killed-error
(receive-result channel))))))
(define-condition foo-condition-2 (condition) ())
(full-test signaling-after-signal-test
(let ((q (make-queue)))
(task-handler-bind ((foo-condition-2 (lambda (c)
(declare (ignore c))
(push-queue 'outer q))))
(task-handler-bind ((foo-condition (lambda (c)
(declare (ignore c))
(push-queue 'inner q)
(signal 'foo-condition-2))))
(let ((channel (make-channel)))
(submit-task channel (lambda () (signal 'foo-condition)))
(receive-result channel))))
(is (equal '(inner outer)
(extract-queue q)))))
(base-test task-handler-bind-syntax-test
(signals error
(macroexpand '(task-handler-bind ((())))))
(signals error
(macroexpand '(task-handler-bind (()))))
(signals error
(macroexpand '(task-handler-bind ((x)))))
(signals error
(macroexpand '(task-handler-bind ((x y z))))))
(full-test print-kernel-test
(is (plusp (length (with-output-to-string (s)
(print *kernel* s))))))
(base-test end-kernel-wait-test
(with-thread-count-check
(let ((*kernel* (make-kernel 3)))
(unwind-protect
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 1))))
(is (eql 3 (length (end-kernel :wait t))))))))
(base-test steal-work-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.1)
(let ((execp nil))
(submit-task channel (lambda () (setf execp t)))
(sleep 0.1)
(is (eq t (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*)))
(is (eq t execp))
(is (eq nil (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*))))))
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.2)))
(submit-task channel (lambda () (sleep 0.2)))
(sleep 0.1)
(is (eq nil (lparallel.kernel::steal-work
*kernel*
lparallel.kernel::*worker*))))))
(base-test kernel-store-value-test
(unwind-protect
(handler-bind ((no-kernel-error
(lambda (e)
(declare (ignore e))
(invoke-restart 'store-value
(make-kernel 2)))))
(let ((channel (make-channel)))
(submit-task channel 'identity 3)
(is (= 3 (receive-result channel)))))
(end-kernel)))
#-lparallel.without-kill
(base-test reject-kill-nil-test
(with-temp-kernel (2)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(sleep 999)))
(sleep 0.2)
(signals error
(kill-tasks nil))
(= 1 (kill-tasks :default)))))
#-lparallel.without-kill
(full-test worker-suicide-test
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(kill-tasks :default)))
(signals task-killed-error
(receive-result channel)))
(let ((channel (make-channel))
(*task-category* 'foo))
(submit-task channel (lambda ()
(setf *error-output* (make-broadcast-stream))
(kill-tasks 'foo)))
(signals task-killed-error
(receive-result channel))))
(full-test submit-after-end-kernel-test
(let ((channel (make-channel)))
(end-kernel :wait t)
(signals error
(submit-task channel (lambda ())))))
(base-test double-end-kernel-test
(let* ((kernel (make-kernel 2))
(*kernel* kernel))
(end-kernel :wait t)
(let ((*kernel* kernel))
(end-kernel :wait t)))
(is (= 1 1)))
(base-test kernel-reader-test
(setf *memo* nil)
(let ((context (lambda (worker-loop)
(let ((*memo* 3))
(funcall worker-loop)))))
(with-temp-kernel (2 :name "foo"
:bindings `((*blah* . 99))
:context context)
(let ((channel (make-channel)))
(submit-task channel (lambda ()
(declare (special *blah*))
(list *memo* *blah*)))
(is (equal '(3 99) (receive-result channel))))
(is (string-equal "foo" (kernel-name)))
(is (equal '((*blah* . 99)) (kernel-bindings)))
(is (eq context (kernel-context))))))
(defun aborting-context (worker-loop)
(declare (ignore worker-loop))
(invoke-abort-thread))
(defun non-funcalling-context (worker-loop)
(declare (ignore worker-loop)))
(base-test context-error-test
(dolist (n '(1 2 4 8))
(with-thread-count-check
(signals kernel-creation-error
(make-kernel n :context #'aborting-context)))))
(base-test non-funcalling-context-test
(dolist (n '(1 2 4 8))
(with-thread-count-check
(signals kernel-creation-error
(make-kernel n :context 'non-funcalling-context)))))
(base-test nonexistent-context-test
(with-thread-count-check
(signals error
(make-kernel 1 :context 'nonexistent-function))))
(base-test broadcast-test
(setf *memo* 0)
(dolist (n '(1 2 3 4 7 8 15 16))
(with-temp-kernel (n :bindings '((*memo* . 1)))
(is (= 0 *memo*))
(let ((channel (make-channel)))
(repeat 100 (submit-task channel (lambda () *memo*)))
(repeat 100 (is (= 1 (receive-result channel)))))
(is (every (lambda (x) (= x 1))
(broadcast-task (lambda () *memo*))))
(let ((channel (make-channel)))
(repeat (kernel-worker-count)
(submit-task channel #'sleep 0.2)))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(let ((channel (make-channel)))
(repeat 1000 (submit-task channel (lambda ()))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () (setf *memo* 99)))))
(is (= 0 *memo*))
(let ((channel (make-channel)))
(repeat 100 (submit-task channel (lambda () *memo*)))
(repeat 100 (is (= 99 (receive-result channel)))))
(let ((channel (make-channel)))
(repeat 1000 (submit-task channel (lambda ()))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () *memo*))))
(is (every (lambda (x) (= x 99))
(broadcast-task (lambda () *memo*))))
(is (every (lambda (x) (= x 5))
(broadcast-task #'+ 2 3))))))
(full-test broadcast-error-test
(let ((*kernel* nil))
(signals no-kernel-error
(broadcast-task (lambda ()))))
(signals error
(broadcast-task 3))
(signals error
(broadcast-task "foo"))
(task-handler-bind ((error #'invoke-transfer-error))
(signals foo-error
(broadcast-task #'error 'foo-error))
(let ((channel (make-channel)))
(submit-task channel (lambda () (broadcast-task (lambda ()))))
(signals error
(receive-result channel)))
(signals error
(broadcast-task (lambda () (broadcast-task (lambda ())))))))
(full-test worker-index-test
(is (null (kernel-worker-index)))
(let ((channel (make-channel)))
(repeat 1000
(submit-task channel #'kernel-worker-index))
(repeat 1000
(let ((x (receive-result channel)))
(is (and (>= x 0)
(< x (kernel-worker-count)))))))
(loop for i across (sort (broadcast-task #'kernel-worker-index) #'<)
for j from 0
do (is (= i j))))
(defun packages-matching (string)
(remove-if-not (lambda (package)
(search string (package-name package) :test #'equalp))
(list-all-packages)))
(defun assert-internal-symbols-not-imported (&key own-packages
third-party-packages)
(let ((third-party-packages (mapcar #'find-package third-party-packages)))
(dolist (own-package own-packages)
(do-symbols (symbol own-package)
(when-let (third-party-package (find (symbol-package symbol)
third-party-packages))
(when (eq :internal (nth-value 1 (find-symbol (symbol-name symbol)
third-party-package)))
(error "Internal symbol ~s was imported into ~a."
symbol (package-name own-package))))))))
(base-test package-test
(assert-internal-symbols-not-imported
:own-packages (packages-matching "lparallel")
:third-party-packages '(#:alexandria #:bordeaux-threads))
(is t))
|
36542d02c484f2d7b330af14cb44c5f7244a919497c9b311045a883bdd513bea | madstap/comfy | project.clj | (defproject madstap/comfy "1.0.5"
:description "Clojure(script) utils"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/clojurescript "1.9.946" :scope "provided"]]
:plugins [[lein-codox "0.10.3"]
[lein-cljsbuild "1.1.6"]
[lein-doo "0.1.7"]]
:codox {:output-path "docs"
:metadata {:doc/format :markdown}
:source-uri "/{version}/{filepath}#L{line}"}
:cljsbuild
{:builds
{:test
{:source-paths ["src" "test"]
:compiler {:output-to "target/main.js"
:output-dir "target"
:main madstap.comfy.test-runner
:process-shim false ; ¯\_(ツ)_/¯
:optimizations :simple}}}}
:doo {:paths {:rhino "lein run -m org.mozilla.javascript.tools.shell.Main"}}
:aliases
{"test-cljs" ["doo" "rhino" "test" "once"]
"test-clj" ["with-profile" "+1.9" "test"]
"test-all" ["do" ["test-clj"] ["test-cljs"]]}
:profiles
{:dev {:dependencies [[com.cemerick/piggieback "0.2.2"]
[org.clojure/test.check "0.10.0-alpha2"]
[org.clojure/tools.nrepl "0.2.10"]]
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}}
:test {:dependencies [[org.mozilla/rhino "1.7.7.2"]]}
:1.9 {:dependencies [[org.clojure/clojure "1.9.0-beta1"]]}})
| null | https://raw.githubusercontent.com/madstap/comfy/0af9cc940bcf1726e0c11c6f19986620d8cfd345/project.clj | clojure | ¯\_(ツ)_/¯ | (defproject madstap/comfy "1.0.5"
:description "Clojure(script) utils"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/clojurescript "1.9.946" :scope "provided"]]
:plugins [[lein-codox "0.10.3"]
[lein-cljsbuild "1.1.6"]
[lein-doo "0.1.7"]]
:codox {:output-path "docs"
:metadata {:doc/format :markdown}
:source-uri "/{version}/{filepath}#L{line}"}
:cljsbuild
{:builds
{:test
{:source-paths ["src" "test"]
:compiler {:output-to "target/main.js"
:output-dir "target"
:main madstap.comfy.test-runner
:optimizations :simple}}}}
:doo {:paths {:rhino "lein run -m org.mozilla.javascript.tools.shell.Main"}}
:aliases
{"test-cljs" ["doo" "rhino" "test" "once"]
"test-clj" ["with-profile" "+1.9" "test"]
"test-all" ["do" ["test-clj"] ["test-cljs"]]}
:profiles
{:dev {:dependencies [[com.cemerick/piggieback "0.2.2"]
[org.clojure/test.check "0.10.0-alpha2"]
[org.clojure/tools.nrepl "0.2.10"]]
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}}
:test {:dependencies [[org.mozilla/rhino "1.7.7.2"]]}
:1.9 {:dependencies [[org.clojure/clojure "1.9.0-beta1"]]}})
|
c0485baea4035191ebeba91eed2285ce5edb45d00e7aba43a08b49d1b9b0e904 | charlieg/Sparser | compile-singletons.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(CL-USER COMMON-LISP) -*-
Copyright ( c ) 2007 BBNT Solutions LLC . All Rights Reserved
$ I d : compile-singletons.lisp 207 2009 - 06 - 18 20:59:16Z cgreenba $
;;;
;;; File: "compile-singletons"
;;; Module: "init;scripts:"
version : March 2007
;; Derived from compile-everthing on 3/20/07
(in-package :sparser)
;;;--------------------------------------------------
;;; compile individual files in the preloader, etc.
;;;--------------------------------------------------
(just-compile "init;everything")
(just-compile "init;Lisp:kind-of-lisp")
(just-compile "init;Lisp:grammar-module")
(just-compile "init;Lisp:ddef-logical")
(just-compile "init;Lisp:lload")
(just-compile "init;scripts:just-dm&p")
(when nil
(just-compile "init;scripts:Academic version")
(just-compile "init;scripts:Apple loader")
(just-compile "init;scripts:BBN")
(just-compile "init;scripts:compile everything")
(just-compile "init;scripts:compile academic")
(just-compile "init;scripts:copy everything")
(just-compile "init;scripts:copy academic")
(just-compile "init;scripts:just dm&p")
(just-compile "init;scripts:no grammar")
(just-compile "init;scripts:SUN")
(just-compile "init;scripts:v2.3a") ;; standard
(just-compile "init;scripts:v2.3ag") ;; "academic grammar"
(just-compile "init;scripts:v2.3g") ;; (public) "grammar"
)
(just-compile "version;loaders:grammar")
(just-compile "version;loaders:grammar modules")
(just-compile "version;loaders:lisp-switch-settings")
(just-compile "version;loaders:logicals")
(just-compile "version;loaders:master-loader")
(just-compile "version;loaders:model")
(unless *nothing-Mac-specific*
(just-compile "version;loaders:save routine"))
(just-compile "version;loaders:stubs")
(just-compile "version;salutation")
(just-compile "version;updating")
(just-compile "config;explicitly-loaded-files")
(just-compile "config;image")
(just-compile "config;launch")
(just-compile "config;load")
(when nil
(just-compile "grammar-configurations;academic grammar")
(just-compile "grammar-configurations;AssetNet")
(just-compile "grammar-configurations;bbn")
(just-compile "grammar-configurations;Debris analysis")
(just-compile "grammar-configurations;full grammar")
(just-compile "grammar-configurations;minimal dm&p grammar")
(just-compile "grammar-configurations;partial grammar")
(just-compile "grammar-configurations;public grammar")
(just-compile "grammar-configurations;SUN")
)
(unless *nothing-Mac-specific*
(just-compile "images;do-the-save"))
(when nil
( just - compile " init;workspaces : Apple " ) -- references
(just-compile "init;workspaces:Darwin")
(just-compile "init;workspaces:dm&p")
(just-compile "init;workspaces:generic")
(just-compile "init;workspaces:Mari")
(just-compile "init;workspaces:quarterly earnings")
(just-compile "init;workspaces:Sandia")
(just-compile "init;workspaces:SUN")
(just-compile "init;workspaces:SUN1")
(just-compile "init;workspaces:SUN2")
(just-compile "init;workspaces:Switchboard")
(just-compile "init;workspaces:text segments")
(just-compile "init;workspaces:tipster")
(just-compile "init;workspaces:Who's News")
(just-compile "init;workspaces:workbench")
:finished-compilation
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/init/scripts/compile-singletons.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(CL-USER COMMON-LISP) -*-
File: "compile-singletons"
Module: "init;scripts:"
Derived from compile-everthing on 3/20/07
--------------------------------------------------
compile individual files in the preloader, etc.
--------------------------------------------------
standard
"academic grammar"
(public) "grammar" | Copyright ( c ) 2007 BBNT Solutions LLC . All Rights Reserved
$ I d : compile-singletons.lisp 207 2009 - 06 - 18 20:59:16Z cgreenba $
version : March 2007
(in-package :sparser)
(just-compile "init;everything")
(just-compile "init;Lisp:kind-of-lisp")
(just-compile "init;Lisp:grammar-module")
(just-compile "init;Lisp:ddef-logical")
(just-compile "init;Lisp:lload")
(just-compile "init;scripts:just-dm&p")
(when nil
(just-compile "init;scripts:Academic version")
(just-compile "init;scripts:Apple loader")
(just-compile "init;scripts:BBN")
(just-compile "init;scripts:compile everything")
(just-compile "init;scripts:compile academic")
(just-compile "init;scripts:copy everything")
(just-compile "init;scripts:copy academic")
(just-compile "init;scripts:just dm&p")
(just-compile "init;scripts:no grammar")
(just-compile "init;scripts:SUN")
)
(just-compile "version;loaders:grammar")
(just-compile "version;loaders:grammar modules")
(just-compile "version;loaders:lisp-switch-settings")
(just-compile "version;loaders:logicals")
(just-compile "version;loaders:master-loader")
(just-compile "version;loaders:model")
(unless *nothing-Mac-specific*
(just-compile "version;loaders:save routine"))
(just-compile "version;loaders:stubs")
(just-compile "version;salutation")
(just-compile "version;updating")
(just-compile "config;explicitly-loaded-files")
(just-compile "config;image")
(just-compile "config;launch")
(just-compile "config;load")
(when nil
(just-compile "grammar-configurations;academic grammar")
(just-compile "grammar-configurations;AssetNet")
(just-compile "grammar-configurations;bbn")
(just-compile "grammar-configurations;Debris analysis")
(just-compile "grammar-configurations;full grammar")
(just-compile "grammar-configurations;minimal dm&p grammar")
(just-compile "grammar-configurations;partial grammar")
(just-compile "grammar-configurations;public grammar")
(just-compile "grammar-configurations;SUN")
)
(unless *nothing-Mac-specific*
(just-compile "images;do-the-save"))
(when nil
( just - compile " init;workspaces : Apple " ) -- references
(just-compile "init;workspaces:Darwin")
(just-compile "init;workspaces:dm&p")
(just-compile "init;workspaces:generic")
(just-compile "init;workspaces:Mari")
(just-compile "init;workspaces:quarterly earnings")
(just-compile "init;workspaces:Sandia")
(just-compile "init;workspaces:SUN")
(just-compile "init;workspaces:SUN1")
(just-compile "init;workspaces:SUN2")
(just-compile "init;workspaces:Switchboard")
(just-compile "init;workspaces:text segments")
(just-compile "init;workspaces:tipster")
(just-compile "init;workspaces:Who's News")
(just-compile "init;workspaces:workbench")
:finished-compilation
|
2a158bd53e3f5eaf51db78ecd149879964f7516bedd85dc706efa569256e1f3f | avsm/platform | test.ml | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2007
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
open Format
open Graph
module Int = struct
type t = int
let compare = compare
let hash = Hashtbl.hash
let equal = (=)
let default = 0
end
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
let g = G.empty
let g = G.add_vertex g 1
let g = G.add_edge_e g (G.E.create 1 10 2)
let g = G.add_edge_e g (G.E.create 2 50 3)
let g = G.add_edge_e g (G.E.create 1 30 4)
let g = G.add_edge_e g (G.E.create 1 100 5)
let g = G.add_edge_e g (G.E.create 3 10 5)
let g = G.add_edge_e g (G.E.create 4 20 3)
let g = G.add_edge_e g (G.E.create 4 60 5)
let g = G.remove_vertex g 4
let gc = G.add_edge_e g (G.E.create 5 10 1)
let gc = G.add_vertex gc 6
module W = struct
type edge = G.E.t
type t = int
let weight e = G.E.label e
let zero = 0
let add = (+)
let sub = (-)
let compare = compare
end
module Dij = Path.Dijkstra(G)(W)
let p,w = Dij.shortest_path gc 1 5
open G.E
let () = List.iter (fun e -> printf "[%d -> %d]" (src e) (dst e)) p; printf "@."
module Comp = Components.Make(G)
let g = G.add_edge g 3 2
let n, f = Comp.scc g
let () = G.iter_edges (fun u v -> printf "%d -> %d@." u v) g
let () = printf "%d components@." n
let () = G.iter_vertex (fun v -> printf " %d -> %d@." v (f v)) g
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/ocamlgraph.1.8.8%2Bdune/tests/test.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************ | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2007
, and
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
open Format
open Graph
module Int = struct
type t = int
let compare = compare
let hash = Hashtbl.hash
let equal = (=)
let default = 0
end
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
let g = G.empty
let g = G.add_vertex g 1
let g = G.add_edge_e g (G.E.create 1 10 2)
let g = G.add_edge_e g (G.E.create 2 50 3)
let g = G.add_edge_e g (G.E.create 1 30 4)
let g = G.add_edge_e g (G.E.create 1 100 5)
let g = G.add_edge_e g (G.E.create 3 10 5)
let g = G.add_edge_e g (G.E.create 4 20 3)
let g = G.add_edge_e g (G.E.create 4 60 5)
let g = G.remove_vertex g 4
let gc = G.add_edge_e g (G.E.create 5 10 1)
let gc = G.add_vertex gc 6
module W = struct
type edge = G.E.t
type t = int
let weight e = G.E.label e
let zero = 0
let add = (+)
let sub = (-)
let compare = compare
end
module Dij = Path.Dijkstra(G)(W)
let p,w = Dij.shortest_path gc 1 5
open G.E
let () = List.iter (fun e -> printf "[%d -> %d]" (src e) (dst e)) p; printf "@."
module Comp = Components.Make(G)
let g = G.add_edge g 3 2
let n, f = Comp.scc g
let () = G.iter_edges (fun u v -> printf "%d -> %d@." u v) g
let () = printf "%d components@." n
let () = G.iter_vertex (fun v -> printf " %d -> %d@." v (f v)) g
|
1fa4dbcb1799f9f9cf25347ecd197741923658e9810bed2056d07b67775ba2d6 | yellowtides/owenbot-hs | Config.hs | # LANGUAGE DeriveGeneric #
module Config where
import Control.Exception (IOException, try)
import Data.Aeson (FromJSON, ToJSON, eitherDecode, encode)
import qualified Data.ByteString as BS (ByteString, readFile, writeFile)
import qualified Data.ByteString.Lazy as BL (ByteString, fromStrict, toStrict)
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import GHC.Generics
import System.Directory
(XdgDirectory(XdgConfig), createDirectoryIfMissing, getXdgDirectory)
import Discord.Types (ChannelId)
| OwenConfig represents the configuration of Owenbot !
data OwenConfig = OwenConfig
{ owenConfigToken :: T.Text
, owenConfigDevs :: [T.Text]
these two do n't do anything yet
, owenConfigDadFreq :: Int -- because reading values every time is slow and a solution can't be thought of
, owenConfigRepoDir :: Maybe FilePath
, owenConfigStartupChan :: ChannelId
, owenConfigQuizChan :: ChannelId -- maybe move this into a per-guild db
}
deriving (Generic, Show)
instance FromJSON OwenConfig
instance ToJSON OwenConfig
getConfigDir :: IO FilePath
getConfigDir = getXdgDirectory XdgConfig "owen"
| Takes a filename and reads from it into a data structure .
readConfig :: IO OwenConfig
readConfig = do
createDirectoryIfMissing True <$> getConfigDir
fp <- (<> "/config.json") <$> getConfigDir
json <- BS.readFile fp
case eitherDecode (BL.fromStrict json) of
Left e ->
error
$ "Incorrect config format, can't continue running Owen:\n[ERROR] "
<> e
Right cfg -> pure cfg
-- (commented since writing to config is never necessary and goes against rules)
-- | Takes a filename (with no suffix) and a data structure, and writes a json
-- file to that location.
writeConfig : : ToJSON a = > String - > a - > IO ( )
-- writeConfig file db = do
-- fp <- mkPath file
-- BS.writeFile fp $ BL.toStrict $ encode db
| null | https://raw.githubusercontent.com/yellowtides/owenbot-hs/74669e8620b5202e7ad9c5ac69cbf74118fc9b64/src/Config.hs | haskell | because reading values every time is slow and a solution can't be thought of
maybe move this into a per-guild db
(commented since writing to config is never necessary and goes against rules)
| Takes a filename (with no suffix) and a data structure, and writes a json
file to that location.
writeConfig file db = do
fp <- mkPath file
BS.writeFile fp $ BL.toStrict $ encode db | # LANGUAGE DeriveGeneric #
module Config where
import Control.Exception (IOException, try)
import Data.Aeson (FromJSON, ToJSON, eitherDecode, encode)
import qualified Data.ByteString as BS (ByteString, readFile, writeFile)
import qualified Data.ByteString.Lazy as BL (ByteString, fromStrict, toStrict)
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import GHC.Generics
import System.Directory
(XdgDirectory(XdgConfig), createDirectoryIfMissing, getXdgDirectory)
import Discord.Types (ChannelId)
| OwenConfig represents the configuration of Owenbot !
data OwenConfig = OwenConfig
{ owenConfigToken :: T.Text
, owenConfigDevs :: [T.Text]
these two do n't do anything yet
, owenConfigRepoDir :: Maybe FilePath
, owenConfigStartupChan :: ChannelId
}
deriving (Generic, Show)
instance FromJSON OwenConfig
instance ToJSON OwenConfig
getConfigDir :: IO FilePath
getConfigDir = getXdgDirectory XdgConfig "owen"
| Takes a filename and reads from it into a data structure .
readConfig :: IO OwenConfig
readConfig = do
createDirectoryIfMissing True <$> getConfigDir
fp <- (<> "/config.json") <$> getConfigDir
json <- BS.readFile fp
case eitherDecode (BL.fromStrict json) of
Left e ->
error
$ "Incorrect config format, can't continue running Owen:\n[ERROR] "
<> e
Right cfg -> pure cfg
writeConfig : : ToJSON a = > String - > a - > IO ( )
|
1d6e98dc726ddaa88ed26b7ba120d3d0ee53f3e041cedee9b3a288cc4547052c | haskell-tools/haskell-tools | Quoted.hs | # LANGUAGE TemplateHaskell #
module TH.Quoted where
import qualified Text.Read.Lex (Lexeme)
$(let x = ''Text.Read.Lex.Lexeme in return [])
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/refactor/examples/TH/Quoted.hs | haskell | # LANGUAGE TemplateHaskell #
module TH.Quoted where
import qualified Text.Read.Lex (Lexeme)
$(let x = ''Text.Read.Lex.Lexeme in return [])
|
|
8e48bab51ee4eac5a200fe448dee61e5cb123b9e425f4e58810d21b0fc0bf685 | bobzhang/fan | S.ml | open Fan.Syntax;
module Ast = Camlp4Ast;
open FanUtil;
let open FanParsers in begin
pa_r (module Fan);
pa_rp (module Fan);
pa_q (module Fan);
pa_g (module Fan);
pa_l (module Fan);
pa_m (module Fan);
end;
Fan.iter_and_take_callbacks (fun (_,f) -> f ()) ;
let t e s = Gram.parse_string e FanLoc.string_loc s;
(* {:extend.create|Gram s v|}; *)
with " patt "
(* {:extend|Gram *)
(* s: *)
(* ["`"; a_ident{s} -> {| `$s |} *)
| " ` " ; a_ident{v } ; ` ANT ( ( " " | " anti " as n ) , s )
- > { | ` $ v $ ( anti : mk_anti ~c:"patt " n s)| }
| " ` " ; a_ident{s } ; ` STR(_,v ) - > { | ` $ s $ str : v | }
(* |"`"; a_ident{s}; `LID x -> {| `$s $lid:x |} *)
(* |"`"; a_ident{s}; "("; L1 v SEP ","{v}; ")" -> *)
(* match v with *)
(* [ [x] -> {| `$s $x |} *)
| [ x::xs ] - > { | ` $ s ( $ x,$list : xs ) | }
(* | _ -> assert false ] ] *)
(* v: *)
(* [ `STR(_,s) -> {| $str:s|} *)
(* | `LID x -> (\* {| $(id:{:ident|$lid:x|}) |} *\) {| $lid:x|} *)
| S{p1 } ; " | " ; S{p2 } - > { |$p1 | $ p2 | }
| " ( " ; S{p1 } ; " as " ; S{p2 } ; " ) " - > { | ( $ p1 as $ p2 ) | }
(* ] *)
(* |}; *)
(* (\* *)
t s " ` A ( ( \"x\"|\"y\ " as n),s ) " ;
(* t s "`A $x"; *)
(* t s `UID ("First"|"Last" as x ) *)
Comparing two ant
(* *\) *)
(* Gram.dump Format.std_formatter expr; *)
(* {:delete|Gram ident:[`UID i]|}; *)
( \ * { : delete|Gram expr:[TRY ; S ; " ) " ] | } ; * \ )
(* t expr "A.B.C.D.c"; *)
{:extend.create|Gram a b a_eoi |} ;
{:extend|Gram
a:
[ TRY module_longident_dot_lparen{s} -> s
| b{s} -> s ]
b "ident":
[
(* a_UIDENT{i} -> {| $uid:i |} *)
(* | *) a_LIDENT{i} -> {| $lid:i |}
| `UID i -> {|$uid:i|}
| `UID i; "."; S{j} -> {| $uid:i.$j |}
(* | a_UIDENT{i}; "."; S{j} -> {| $uid:i.$j |} *)]
[ ` LID i - > { | $ lid : }
| ` UID s ; " . " ; S{j } - > { |$uid : s.$j| } ]
a_eoi: [a{i} ; `EOI -> i]
|};
(* {:extend.create|Gram c|} ; *)
(* with "ident"{:extend|Gram local:d; *)
c : [ d { x } ; " ( " - > { | ]
(* d:[`UID x -> x ] *)
(* |}; *)
t a_eoi "A.C.U.b" ;
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/todoml/testr/S.ml | ocaml | {:extend.create|Gram s v|};
{:extend|Gram
s:
["`"; a_ident{s} -> {| `$s |}
|"`"; a_ident{s}; `LID x -> {| `$s $lid:x |}
|"`"; a_ident{s}; "("; L1 v SEP ","{v}; ")" ->
match v with
[ [x] -> {| `$s $x |}
| _ -> assert false ] ]
v:
[ `STR(_,s) -> {| $str:s|}
| `LID x -> (\* {| $(id:{:ident|$lid:x|}) |} *\) {| $lid:x|}
]
|};
(\*
t s "`A $x";
t s `UID ("First"|"Last" as x )
*\)
Gram.dump Format.std_formatter expr;
{:delete|Gram ident:[`UID i]|};
t expr "A.B.C.D.c";
a_UIDENT{i} -> {| $uid:i |}
|
| a_UIDENT{i}; "."; S{j} -> {| $uid:i.$j |}
{:extend.create|Gram c|} ;
with "ident"{:extend|Gram local:d;
d:[`UID x -> x ]
|}; | open Fan.Syntax;
module Ast = Camlp4Ast;
open FanUtil;
let open FanParsers in begin
pa_r (module Fan);
pa_rp (module Fan);
pa_q (module Fan);
pa_g (module Fan);
pa_l (module Fan);
pa_m (module Fan);
end;
Fan.iter_and_take_callbacks (fun (_,f) -> f ()) ;
let t e s = Gram.parse_string e FanLoc.string_loc s;
with " patt "
| " ` " ; a_ident{v } ; ` ANT ( ( " " | " anti " as n ) , s )
- > { | ` $ v $ ( anti : mk_anti ~c:"patt " n s)| }
| " ` " ; a_ident{s } ; ` STR(_,v ) - > { | ` $ s $ str : v | }
| [ x::xs ] - > { | ` $ s ( $ x,$list : xs ) | }
| S{p1 } ; " | " ; S{p2 } - > { |$p1 | $ p2 | }
| " ( " ; S{p1 } ; " as " ; S{p2 } ; " ) " - > { | ( $ p1 as $ p2 ) | }
t s " ` A ( ( \"x\"|\"y\ " as n),s ) " ;
Comparing two ant
( \ * { : delete|Gram expr:[TRY ; S ; " ) " ] | } ; * \ )
{:extend.create|Gram a b a_eoi |} ;
{:extend|Gram
a:
[ TRY module_longident_dot_lparen{s} -> s
| b{s} -> s ]
b "ident":
[
| `UID i -> {|$uid:i|}
| `UID i; "."; S{j} -> {| $uid:i.$j |}
[ ` LID i - > { | $ lid : }
| ` UID s ; " . " ; S{j } - > { |$uid : s.$j| } ]
a_eoi: [a{i} ; `EOI -> i]
|};
c : [ d { x } ; " ( " - > { | ]
t a_eoi "A.C.U.b" ;
|
08f2630fa66f364d7212d5ff5520548fbb64665e67e8558fc24cb9a99ae9a8de | naushadh/hello-world | Lib.hs | {-# LANGUAGE OverloadedStrings #-}
module Lib
( dbFour
, PSQL.defaultConnectInfo
, PSQL.ConnectInfo(..)
) where
import qualified Database.PostgreSQL.Simple as PSQL
dbFour :: PSQL.ConnectInfo -> IO ()
dbFour connectInfo = do
conn <- PSQL.connect connectInfo
[PSQL.Only i] <- PSQL.query_ conn "select 2 + 2"
putStrLn "dbFour"
putStrLn . show $ (i :: Int)
return () | null | https://raw.githubusercontent.com/naushadh/hello-world/742b24ed9be53d95a7f6f9177b44132c635b78ab/hello-postgresql/src/Lib.hs | haskell | # LANGUAGE OverloadedStrings # |
module Lib
( dbFour
, PSQL.defaultConnectInfo
, PSQL.ConnectInfo(..)
) where
import qualified Database.PostgreSQL.Simple as PSQL
dbFour :: PSQL.ConnectInfo -> IO ()
dbFour connectInfo = do
conn <- PSQL.connect connectInfo
[PSQL.Only i] <- PSQL.query_ conn "select 2 + 2"
putStrLn "dbFour"
putStrLn . show $ (i :: Int)
return () |
a5fca846a777dc63dcaf9066866da266ab6fe1e1cfe8fd8efce0a870b3b68ff9 | liquidz/antq | edn.clj | (ns antq.report.edn
(:require
[antq.report :as report]))
(defmethod report/reporter "edn"
[deps _options]
(->> deps
;; Convert a record to just a map
(map #(merge {} %))
;; NOTE Add diff-url for backward compatibility
(map #(assoc % :diff-url (:changes-url %)))
(pr-str)
(println)))
| null | https://raw.githubusercontent.com/liquidz/antq/ca8472b28702f5e568492001bc476fb09e5b2e6b/src/antq/report/edn.clj | clojure | Convert a record to just a map
NOTE Add diff-url for backward compatibility | (ns antq.report.edn
(:require
[antq.report :as report]))
(defmethod report/reporter "edn"
[deps _options]
(->> deps
(map #(merge {} %))
(map #(assoc % :diff-url (:changes-url %)))
(pr-str)
(println)))
|
9facb518cfcdb95f99f0c96280bdaabcb3dafcd819a13f5dccd5aeb1d767a751 | avsm/platform | chaoticIteration.mli | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
Copyright © 2015 thi.suzanne ( @ ) gmail.com >
* École Normale Supérieure , Département d'Informatique
* Paris Sciences et Lettres
* École Normale Supérieure, Département d'Informatique
* Paris Sciences et Lettres
*)
* computation with widenings using weak topological
orderings as defined by and implemented
in { ! WeakTopological } .
{ ! } is another ( simpler ) fixpoint computation module , with
general references .
The general idea of fixpoint computation is to iteratively compute
the result of the analysis a vertex from the results of its
predecessors , until stabilisation is achieved on every vertex . The
way to determine , at each step , the next vertex to analyse is
called a { e chaotic iteration strategy } . A good strategy can make
the analysis much faster . To enforce the termination of the
analyse and speed it up when it terminates in too many steps , one
can also use a { e widening } , to ensure that there is no infinite
( nor too big ) sequence of intermediary results for a given
vertex . However , it usually results in a loss of precision , which
is why choosing a good widening set ( the set of points on which
the widening will be performed ) is mandatory .
This module computes a fixpoint over a graph using weak
topological ordering , which can be used to get both the iteration
strategy and the widening set . The module { ! WeakTopological } aims
to compute weak topological orderings which are known to be
excellent decompositions w.r.t these two critical points .
@author @see " Efficient chaotic iteration strategies with widenings " ,
,
Formal Methods in Programming and their Applications ,
Springer Berlin Heidelberg , 1993
orderings as defined by François Bourdoncle and implemented
in {!WeakTopological}.
{!Fixpoint} is another (simpler) fixpoint computation module, with
general references.
The general idea of fixpoint computation is to iteratively compute
the result of the analysis a vertex from the results of its
predecessors, until stabilisation is achieved on every vertex. The
way to determine, at each step, the next vertex to analyse is
called a {e chaotic iteration strategy}. A good strategy can make
the analysis much faster. To enforce the termination of the
analyse and speed it up when it terminates in too many steps, one
can also use a {e widening}, to ensure that there is no infinite
(nor too big) sequence of intermediary results for a given
vertex. However, it usually results in a loss of precision, which
is why choosing a good widening set (the set of points on which
the widening will be performed) is mandatory.
This module computes a fixpoint over a graph using weak
topological ordering, which can be used to get both the iteration
strategy and the widening set. The module {!WeakTopological} aims
to compute weak topological orderings which are known to be
excellent decompositions w.r.t these two critical points.
@author Thibault Suzanne
@see "Efficient chaotic iteration strategies with widenings",
François Bourdoncle,
Formal Methods in Programming and their Applications,
Springer Berlin Heidelberg, 1993
*)
* How to determine which vertices are to be considered as widening
points .
- [ FromWto ] indicates to use as widening points the heads of the
weak topological ordering given as a parameter of the analysis
function . This will always be a safe choice , and in most cases
it will also be a good one with respect to the precision of the
analysis .
- [ Predicate f ] indicates to use [ f ] as the characteristic
function of the widening set . [ Predicate ( fun _ - > false ) ] can
be used if a widening is not needed . This variant can be used
when there is a special knowledge of the graph to achieve
a better precision of the analysis . For instance , if the graph
happens to be the flow graph of a program , the predicate should
be true for control structures heads . In any case , a condition
for a safe widening predicate is that every cycle of the graph
should go through at least one widening point . Otherwise , the
analysis may not terminate . Note that even with a safe
predicate , ensuring the termination does still require a correct
widening definition .
points.
- [FromWto] indicates to use as widening points the heads of the
weak topological ordering given as a parameter of the analysis
function. This will always be a safe choice, and in most cases
it will also be a good one with respect to the precision of the
analysis.
- [Predicate f] indicates to use [f] as the characteristic
function of the widening set. [Predicate (fun _ -> false)] can
be used if a widening is not needed. This variant can be used
when there is a special knowledge of the graph to achieve
a better precision of the analysis. For instance, if the graph
happens to be the flow graph of a program, the predicate should
be true for control structures heads. In any case, a condition
for a safe widening predicate is that every cycle of the graph
should go through at least one widening point. Otherwise, the
analysis may not terminate. Note that even with a safe
predicate, ensuring the termination does still require a correct
widening definition.
*)
type 'a widening_set =
| FromWto
| Predicate of ('a -> bool)
* Minimal graph signature for the algorithm .
Sub - signature of [ Traverse . G ] .
Sub-signature of [Traverse.G]. *)
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
val src : t -> V.t
end
val fold_pred_e : (E.t -> 'a -> 'a) -> t -> V.t -> 'a -> 'a
end
(** Parameters of the analysis. *)
module type Data = sig
type t
(** Information stored at each vertex. *)
type edge
(** Edge of the graph. *)
val join : t -> t -> t
(** Operation to join data when several paths meet. *)
val equal : t -> t -> bool
(** Equality test for data. *)
val analyze : edge -> t -> t
* How to analyze one edge : given an edge and the data stored at
its origin , it must compute the resulting data to be stored at
its destination .
its origin, it must compute the resulting data to be stored at
its destination. *)
val widening : t -> t -> t
(** The widening operator. [fun _ x -> x] is correct and is
equivalent to not doing widening. Note that to enforce
termination, the following property should hold: for all
sequence [x_0, x_1, ...] of data, the sequence defined by [y_0 =
x_0; y_{i+1} = widening y_i x_i] stabilizes in finite time. *)
end
module Make
(G : G)
(D : Data with type edge = G.E.t) :
sig
module M : Map.S with type key = G.V.t
(** Map used to store the result of the analysis *)
val recurse :
G.t ->
G.V.t WeakTopological.t ->
(G.V.t -> D.t) ->
G.V.t widening_set ->
int ->
D.t M.t
* [ recurse g wto init widening_set widening_delay ] computes the
fixpoint of the analysis of a graph . This function uses the
recursive iteration strategy : it recursively stabilizes the
subcomponents of every component every time the component is
stabilized ( cf . Bourdoncle 's paper ) .
@param g The graph to analyse .
@param wto A weak topological ordering of the vertices of [ g ] .
@param widening_set On which points to do the widening .
@param widening_delay How many computations steps will be done
before using widening to speed up the stabilisation . This
counter is reset when entering each component , and is shared
between all outermost vertices of this component . A negative
value means [ 0 ] .
@param init How to compute the initial analysis data .
@return A map from vertices of [ g ] to their analysis result .
fixpoint of the analysis of a graph. This function uses the
recursive iteration strategy: it recursively stabilizes the
subcomponents of every component every time the component is
stabilized (cf. Bourdoncle's paper).
@param g The graph to analyse.
@param wto A weak topological ordering of the vertices of [g].
@param widening_set On which points to do the widening.
@param widening_delay How many computations steps will be done
before using widening to speed up the stabilisation. This
counter is reset when entering each component, and is shared
between all outermost vertices of this component. A negative
value means [0].
@param init How to compute the initial analysis data.
@return A map from vertices of [g] to their analysis result.
*)
end
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/ocamlgraph.1.8.8%2Bdune/src/chaoticIteration.mli | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
* Parameters of the analysis.
* Information stored at each vertex.
* Edge of the graph.
* Operation to join data when several paths meet.
* Equality test for data.
* The widening operator. [fun _ x -> x] is correct and is
equivalent to not doing widening. Note that to enforce
termination, the following property should hold: for all
sequence [x_0, x_1, ...] of data, the sequence defined by [y_0 =
x_0; y_{i+1} = widening y_i x_i] stabilizes in finite time.
* Map used to store the result of the analysis | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
Copyright © 2015 thi.suzanne ( @ ) gmail.com >
* École Normale Supérieure , Département d'Informatique
* Paris Sciences et Lettres
* École Normale Supérieure, Département d'Informatique
* Paris Sciences et Lettres
*)
* computation with widenings using weak topological
orderings as defined by and implemented
in { ! WeakTopological } .
{ ! } is another ( simpler ) fixpoint computation module , with
general references .
The general idea of fixpoint computation is to iteratively compute
the result of the analysis a vertex from the results of its
predecessors , until stabilisation is achieved on every vertex . The
way to determine , at each step , the next vertex to analyse is
called a { e chaotic iteration strategy } . A good strategy can make
the analysis much faster . To enforce the termination of the
analyse and speed it up when it terminates in too many steps , one
can also use a { e widening } , to ensure that there is no infinite
( nor too big ) sequence of intermediary results for a given
vertex . However , it usually results in a loss of precision , which
is why choosing a good widening set ( the set of points on which
the widening will be performed ) is mandatory .
This module computes a fixpoint over a graph using weak
topological ordering , which can be used to get both the iteration
strategy and the widening set . The module { ! WeakTopological } aims
to compute weak topological orderings which are known to be
excellent decompositions w.r.t these two critical points .
@author @see " Efficient chaotic iteration strategies with widenings " ,
,
Formal Methods in Programming and their Applications ,
Springer Berlin Heidelberg , 1993
orderings as defined by François Bourdoncle and implemented
in {!WeakTopological}.
{!Fixpoint} is another (simpler) fixpoint computation module, with
general references.
The general idea of fixpoint computation is to iteratively compute
the result of the analysis a vertex from the results of its
predecessors, until stabilisation is achieved on every vertex. The
way to determine, at each step, the next vertex to analyse is
called a {e chaotic iteration strategy}. A good strategy can make
the analysis much faster. To enforce the termination of the
analyse and speed it up when it terminates in too many steps, one
can also use a {e widening}, to ensure that there is no infinite
(nor too big) sequence of intermediary results for a given
vertex. However, it usually results in a loss of precision, which
is why choosing a good widening set (the set of points on which
the widening will be performed) is mandatory.
This module computes a fixpoint over a graph using weak
topological ordering, which can be used to get both the iteration
strategy and the widening set. The module {!WeakTopological} aims
to compute weak topological orderings which are known to be
excellent decompositions w.r.t these two critical points.
@author Thibault Suzanne
@see "Efficient chaotic iteration strategies with widenings",
François Bourdoncle,
Formal Methods in Programming and their Applications,
Springer Berlin Heidelberg, 1993
*)
* How to determine which vertices are to be considered as widening
points .
- [ FromWto ] indicates to use as widening points the heads of the
weak topological ordering given as a parameter of the analysis
function . This will always be a safe choice , and in most cases
it will also be a good one with respect to the precision of the
analysis .
- [ Predicate f ] indicates to use [ f ] as the characteristic
function of the widening set . [ Predicate ( fun _ - > false ) ] can
be used if a widening is not needed . This variant can be used
when there is a special knowledge of the graph to achieve
a better precision of the analysis . For instance , if the graph
happens to be the flow graph of a program , the predicate should
be true for control structures heads . In any case , a condition
for a safe widening predicate is that every cycle of the graph
should go through at least one widening point . Otherwise , the
analysis may not terminate . Note that even with a safe
predicate , ensuring the termination does still require a correct
widening definition .
points.
- [FromWto] indicates to use as widening points the heads of the
weak topological ordering given as a parameter of the analysis
function. This will always be a safe choice, and in most cases
it will also be a good one with respect to the precision of the
analysis.
- [Predicate f] indicates to use [f] as the characteristic
function of the widening set. [Predicate (fun _ -> false)] can
be used if a widening is not needed. This variant can be used
when there is a special knowledge of the graph to achieve
a better precision of the analysis. For instance, if the graph
happens to be the flow graph of a program, the predicate should
be true for control structures heads. In any case, a condition
for a safe widening predicate is that every cycle of the graph
should go through at least one widening point. Otherwise, the
analysis may not terminate. Note that even with a safe
predicate, ensuring the termination does still require a correct
widening definition.
*)
type 'a widening_set =
| FromWto
| Predicate of ('a -> bool)
* Minimal graph signature for the algorithm .
Sub - signature of [ Traverse . G ] .
Sub-signature of [Traverse.G]. *)
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
val src : t -> V.t
end
val fold_pred_e : (E.t -> 'a -> 'a) -> t -> V.t -> 'a -> 'a
end
module type Data = sig
type t
type edge
val join : t -> t -> t
val equal : t -> t -> bool
val analyze : edge -> t -> t
* How to analyze one edge : given an edge and the data stored at
its origin , it must compute the resulting data to be stored at
its destination .
its origin, it must compute the resulting data to be stored at
its destination. *)
val widening : t -> t -> t
end
module Make
(G : G)
(D : Data with type edge = G.E.t) :
sig
module M : Map.S with type key = G.V.t
val recurse :
G.t ->
G.V.t WeakTopological.t ->
(G.V.t -> D.t) ->
G.V.t widening_set ->
int ->
D.t M.t
* [ recurse g wto init widening_set widening_delay ] computes the
fixpoint of the analysis of a graph . This function uses the
recursive iteration strategy : it recursively stabilizes the
subcomponents of every component every time the component is
stabilized ( cf . Bourdoncle 's paper ) .
@param g The graph to analyse .
@param wto A weak topological ordering of the vertices of [ g ] .
@param widening_set On which points to do the widening .
@param widening_delay How many computations steps will be done
before using widening to speed up the stabilisation . This
counter is reset when entering each component , and is shared
between all outermost vertices of this component . A negative
value means [ 0 ] .
@param init How to compute the initial analysis data .
@return A map from vertices of [ g ] to their analysis result .
fixpoint of the analysis of a graph. This function uses the
recursive iteration strategy: it recursively stabilizes the
subcomponents of every component every time the component is
stabilized (cf. Bourdoncle's paper).
@param g The graph to analyse.
@param wto A weak topological ordering of the vertices of [g].
@param widening_set On which points to do the widening.
@param widening_delay How many computations steps will be done
before using widening to speed up the stabilisation. This
counter is reset when entering each component, and is shared
between all outermost vertices of this component. A negative
value means [0].
@param init How to compute the initial analysis data.
@return A map from vertices of [g] to their analysis result.
*)
end
|
ca13a580e63fd3733485a2e16698253fa27ce22e67c8d6ac0aa9bb2285db8321 | jrm-code-project/LISP-Machine | character.lisp | -*- Mode : LISP ; Package : SI ; Cold - Load : T ; : CL ; ; Lowercase : T -*-
;;; Character functions and variables.
; character lossage of the most complete kind
(defconstant char-code-limit #o400
"Character code values must be less than this.")
(defconstant char-font-limit #o400
"Font codes in characters must be less than this.")
(defconstant char-bits-limit #o20
"All the special bits in a character must be less than this.
They are Control, Meta, Super and Hyper.")
(defconstant char-control-bit 1
"This bit within the bits of a character is the Control bit.")
(defconstant char-meta-bit 2
"This bit, within the bits of a character, is the Meta bit.")
(defconstant char-super-bit 4
"This bit, within the bits of a character, is the Super bit.")
(defconstant char-hyper-bit 8.
"This bit, within the bits of a character, is the Hyper bit.")
(defsubst char-code (char)
"Returns the character code of the character CHAR.
This is sans the font number and meta bits."
(ldb %%ch-char char))
(defsubst char-font (char)
"Returns the font number of character CHAR."
(ldb %%ch-font char))
(defsubst char-bits (char)
"Returns the special bits of the character CHAR."
(%logldb %%kbd-control-meta char))
;These are now microcoded
;(defsubst alpha-char-p (char)
" T if is alphabetic with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
; (or ( #/A (ldb %%ch-char char) #/Z)
; ( #/a (ldb %%ch-char char) #/z))))
;(defsubst upper-case-p (char)
" T if is an upper case letter with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
; ( #/A (ldb %%ch-char char) #/Z)))
;(defsubst lower-case-p (char)
" T if is an upper case letter with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
; ( #/a (ldb %%ch-char char) #/z)))
;(defsubst both-case-p (char)
" T if is a character which has upper and lower case forms , with no meta bits .
;This is just letters."
( and ( zerop ( ldb % % - control - meta char ) )
; (or ( #/A (ldb %%ch-char char) #/Z)
; ( #/a (ldb %%ch-char char) #/z))))
( ( char )
" T if is a letter or digit , with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
; (or ( #/0 (ldb %%ch-char char) #/9)
; ( #/A (ldb %%ch-char char) #/Z)
; ( #/a (ldb %%ch-char char) #/z))))
(defsubst char< (&rest chars)
"T if all the characters are monotonically increasing, considering bits, font and case."
(apply #'< chars))
(defsubst char> (&rest chars)
"T if all the characters are monotonically decreasing, considering bits, font and case."
(apply #'> chars))
(defsubst char<= (&rest chars)
"T if all the characters are monotonically nondecreasing, considering bits, font and case."
(apply #' chars))
(defsubst char>= (&rest chars)
"T if all the characters are monotonically nonincreasing, considering bits, font and case."
(apply #' chars))
(defsubst char (&rest chars)
"T if all the characters are monotonically nondecreasing, considering bits, font and case."
(apply #' chars))
(defsubst char (&rest chars)
"T if all the characters are monotonically nonincreasing, considering bits, font and case."
(apply #' chars))
(defsubst char/= (&rest chars)
"T if all the characters are distinct (no two equal), considering bits, font and case."
(apply #' chars))
(defsubst char= (&rest chars)
"T if all the characters are equal, considering bits, font and case."
(apply #'= chars))
(defsubst char (&rest chars)
"T if all the characters are distinct (no two equal), considering bits, font and case."
(apply #' chars))
(defun standard-char-p (char)
"T if CHAR is one of the ASCII printing characters or the Newline character."
(or (char= char #\Newline)
( (char-int #\space) (char-int char) #o176)))
(defsubst graphic-char-p (char)
"T if CHAR is a graphic character, one which prints as a single glyph.
Things like #\NEWLINE and #\RESUME and #\CONTROL-A are not graphic."
( 0 (char-int char) #o177))
(defsubst string-char-p (char)
"T if CHAR is a character which ordinary strings can contain.
Note that ART-FAT-STRING arrays can contain additional characters,
for which this function nevertheless returns NIL."
( 0 (char-int char) #o377))
;>> flush
(defsubst fat-string-char-p (char)
"T if CHAR is a charater which a fat string can contain."
( 0 (char-int char) #o177777))
(defun digit-char-p (char &optional (radix 10.))
"Weight of CHAR as a digit, if it is a digit in radix RADIX; else NIL.
The weights of #\0 through #\9 are 0 through 9;
the weights of letters start at ten for A.
RADIX does not affect the weight of any digit,
but it affects whether NIL is returned."
(and (zerop (char-bits char))
(let ((basic (char-code char)))
(and (if ( radix 10.)
( (char-int #\0) basic (+ (char-int #\0) radix -1))
(or ( (char-int #\0) basic (char-int #\9))
( (char-int #\A)
(setq basic (char-code (char-upcase char)))
(+ (char-int #\A) radix -11.))))
(if ( basic (char-int #\9))
(- basic (char-int #\0))
(+ 10. (- basic (char-int #\A))))))))
;;; This is symbol*cs braindeath. Darn if I know what it's for.
;;; It's apparently something to do with their way of making
;;; standard characters. It is not a common lisp thing.
(defun char-standard (char)
(declare (ignore char))
t)
(defun char-not-equal (&rest chars)
"T if all the characters are distinct, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((char1 (car tail)))
(dolist (char2 (cdr tail))
(if (char-equal char1 char2)
(return-from char-not-equal nil))))))
;; compiled code usually calls the char-equal microinstruction
(defun char-equal (&rest chars)
"T if all the characters are equal, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(unless (char-equal (car tail) (cadr tail))
(return nil))))
(defun char-lessp (&rest chars)
"T if all the characters are monotonically increasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless (< ch1 ch2) (return nil)))))
(defun char-greaterp (&rest chars)
"T if all the characters are monotonically decreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless (> ch1 ch2) (return nil)))))
(defun char-not-lessp (&rest chars)
"T if all the characters are monotonically nonincreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless ( ch1 ch2) (return nil)))))
(defun char-not-greaterp (&rest chars)
"T if all the characters are monotonically nondecreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless ( ch1 ch2) (return nil)))))
;; now microcoded
;(defun char-upcase (char &aux subchar)
; "Return the uppercase version of CHAR.
If does not have a uppercase version , it is returned unchanged . "
( ( char - code char ) )
( if ( # # /z )
; (if (fixnump char)
; (logxor #o40 char)
; (int-char (logxor #o40 char)))
; char))
;(defun char-downcase (char &aux subchar)
; "Return the lowercase version of CHAR.
If does not have a lowercase version , it is returned unchanged . "
( ( ldb % % ch - char char ) )
; (if ( #/A subchar #/Z)
; (if (fixnump char)
; (logxor #o40 char)
; (int-char (logxor #o40 char)))
; char))
(defun char-flipcase (char)
"If CHAR is an uppercase character, return it's lowercase conterpart, and vice-versa.
Returns CHAR unchanged if CHAR is neither upper now lower case."
(cond ((upper-case-p char) (char-downcase char))
((lower-case-p char) (char-upcase char))
(t char)))
(defun code-char (code &optional (bits 0) (font 0))
"Returns a character whose code comes from CODE, bits from BITS and font from FONT.
CODE can be a number or a character.
NIL is returned if it is not possible to have a character object
with the specified FONT and BITS."
(if (and ( 0 bits (1- char-bits-limit))
( 0 font (1- char-font-limit)))
(%make-pointer dtp-character
(%logdpb bits %%kbd-control-meta
(dpb font %%ch-font code)))
nil))
(deff make-char 'code-char)
(defun digit-char (weight &optional (radix 10.) (font 0))
"Return a character which signifies WEIGHT in radix RADIX, with FONT as specified.
This is always NIL if WEIGHT is RADIX.
Otherwise, for WEIGHT between 0 and 9, you get characters 0 through 9;
for higher weights, you get letters."
(if (not ( 0 weight (1- radix))) nil
(if (not ( 0 font char-font-limit)) nil
(%make-pointer dtp-character
(dpb font %%ch-font (if (< weight 10.)
(+ (char-code #\0) weight)
(+ (char-code #\A) weight -10.)))))))
Now
;(defun char-int (char)
; "Returns an integer whose value corresponds to CHAR.
;On the Lisp machine, this conversion will happen automatically
;in most places that an integer can be used."
; (dont-optimize (%pointer char)))
(defun char-name (char)
"Returns the standard name of CHAR, as a string; or NIL if there is none.
For example, \"RETURN\" for the character Return.
Only works for characters which are not GRAPHIC-CHAR-P (unlike \"a\", for example.)"
;character lossage
(let ((elt (rassq (char-int char) xr-special-character-names)))
(if elt (symbol-name (car elt)))))
(defun name-char (name)
"Returns a character object which is the meaning of NAME as a character name,
or NIL if NAME has none."
(let ((found (cdr (ass 'string-equal name xr-special-character-names))))
(and found (int-char found))))
(defparameter *char-bit-alist*
`((:control . ,%%kbd-control)
(:meta . ,%%kbd-meta)
(:super . ,%%kbd-super)
(:hyper . ,%%kbd-hyper))
"Alist of bit names for CHAR-BIT vs byte specifiers to extract those bits from a character.")
(defun char-bit (char bit-name)
"T if the bit spec'd by BIT-NAME (a keyword) is on in CHAR.
BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER."
(let ((byte (cdr (assq bit-name *char-bit-alist*))))
(if byte
(%logldb-test byte char)
(ferror "~S is not a valid character-bit specifier" bit-name))))
(defun set-char-bit (char bit-name new-value)
"Returns a character like CHAR except that the bit BIT-NAME has value NEW-VALUE in it.
BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER.
NEW-VALUE should be T or NIL."
(let ((byte (cdr (assq bit-name *char-bit-alist*))))
(if byte
(let* ((new-char (%logdpb (if new-value 1 0) byte char)))
(if (typep char 'character)
(int-char new-char)
new-char))
(ferror "~S is not a valid character-bit specifier" bit-name))))
| null | https://raw.githubusercontent.com/jrm-code-project/LISP-Machine/0a448d27f40761fafabe5775ffc550637be537b2/lambda/sys2/character.lisp | lisp | Package : SI ; Cold - Load : T ; : CL ; ; Lowercase : T -*-
Character functions and variables.
character lossage of the most complete kind
These are now microcoded
(defsubst alpha-char-p (char)
(or ( #/A (ldb %%ch-char char) #/Z)
( #/a (ldb %%ch-char char) #/z))))
(defsubst upper-case-p (char)
( #/A (ldb %%ch-char char) #/Z)))
(defsubst lower-case-p (char)
( #/a (ldb %%ch-char char) #/z)))
(defsubst both-case-p (char)
This is just letters."
(or ( #/A (ldb %%ch-char char) #/Z)
( #/a (ldb %%ch-char char) #/z))))
(or ( #/0 (ldb %%ch-char char) #/9)
( #/A (ldb %%ch-char char) #/Z)
( #/a (ldb %%ch-char char) #/z))))
>> flush
else NIL.
This is symbol*cs braindeath. Darn if I know what it's for.
It's apparently something to do with their way of making
standard characters. It is not a common lisp thing.
compiled code usually calls the char-equal microinstruction
now microcoded
(defun char-upcase (char &aux subchar)
"Return the uppercase version of CHAR.
(if (fixnump char)
(logxor #o40 char)
(int-char (logxor #o40 char)))
char))
(defun char-downcase (char &aux subchar)
"Return the lowercase version of CHAR.
(if ( #/A subchar #/Z)
(if (fixnump char)
(logxor #o40 char)
(int-char (logxor #o40 char)))
char))
(defun char-int (char)
"Returns an integer whose value corresponds to CHAR.
On the Lisp machine, this conversion will happen automatically
in most places that an integer can be used."
(dont-optimize (%pointer char)))
or NIL if there is none.
character lossage |
(defconstant char-code-limit #o400
"Character code values must be less than this.")
(defconstant char-font-limit #o400
"Font codes in characters must be less than this.")
(defconstant char-bits-limit #o20
"All the special bits in a character must be less than this.
They are Control, Meta, Super and Hyper.")
(defconstant char-control-bit 1
"This bit within the bits of a character is the Control bit.")
(defconstant char-meta-bit 2
"This bit, within the bits of a character, is the Meta bit.")
(defconstant char-super-bit 4
"This bit, within the bits of a character, is the Super bit.")
(defconstant char-hyper-bit 8.
"This bit, within the bits of a character, is the Hyper bit.")
(defsubst char-code (char)
"Returns the character code of the character CHAR.
This is sans the font number and meta bits."
(ldb %%ch-char char))
(defsubst char-font (char)
"Returns the font number of character CHAR."
(ldb %%ch-font char))
(defsubst char-bits (char)
"Returns the special bits of the character CHAR."
(%logldb %%kbd-control-meta char))
" T if is alphabetic with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
" T if is an upper case letter with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
" T if is an upper case letter with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
" T if is a character which has upper and lower case forms , with no meta bits .
( and ( zerop ( ldb % % - control - meta char ) )
( ( char )
" T if is a letter or digit , with no meta bits . "
( and ( zerop ( ldb % % - control - meta char ) )
(defsubst char< (&rest chars)
"T if all the characters are monotonically increasing, considering bits, font and case."
(apply #'< chars))
(defsubst char> (&rest chars)
"T if all the characters are monotonically decreasing, considering bits, font and case."
(apply #'> chars))
(defsubst char<= (&rest chars)
"T if all the characters are monotonically nondecreasing, considering bits, font and case."
(apply #' chars))
(defsubst char>= (&rest chars)
"T if all the characters are monotonically nonincreasing, considering bits, font and case."
(apply #' chars))
(defsubst char (&rest chars)
"T if all the characters are monotonically nondecreasing, considering bits, font and case."
(apply #' chars))
(defsubst char (&rest chars)
"T if all the characters are monotonically nonincreasing, considering bits, font and case."
(apply #' chars))
(defsubst char/= (&rest chars)
"T if all the characters are distinct (no two equal), considering bits, font and case."
(apply #' chars))
(defsubst char= (&rest chars)
"T if all the characters are equal, considering bits, font and case."
(apply #'= chars))
(defsubst char (&rest chars)
"T if all the characters are distinct (no two equal), considering bits, font and case."
(apply #' chars))
(defun standard-char-p (char)
"T if CHAR is one of the ASCII printing characters or the Newline character."
(or (char= char #\Newline)
( (char-int #\space) (char-int char) #o176)))
(defsubst graphic-char-p (char)
"T if CHAR is a graphic character, one which prints as a single glyph.
Things like #\NEWLINE and #\RESUME and #\CONTROL-A are not graphic."
( 0 (char-int char) #o177))
(defsubst string-char-p (char)
"T if CHAR is a character which ordinary strings can contain.
Note that ART-FAT-STRING arrays can contain additional characters,
for which this function nevertheless returns NIL."
( 0 (char-int char) #o377))
(defsubst fat-string-char-p (char)
"T if CHAR is a charater which a fat string can contain."
( 0 (char-int char) #o177777))
(defun digit-char-p (char &optional (radix 10.))
the weights of letters start at ten for A.
RADIX does not affect the weight of any digit,
but it affects whether NIL is returned."
(and (zerop (char-bits char))
(let ((basic (char-code char)))
(and (if ( radix 10.)
( (char-int #\0) basic (+ (char-int #\0) radix -1))
(or ( (char-int #\0) basic (char-int #\9))
( (char-int #\A)
(setq basic (char-code (char-upcase char)))
(+ (char-int #\A) radix -11.))))
(if ( basic (char-int #\9))
(- basic (char-int #\0))
(+ 10. (- basic (char-int #\A))))))))
(defun char-standard (char)
(declare (ignore char))
t)
(defun char-not-equal (&rest chars)
"T if all the characters are distinct, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((char1 (car tail)))
(dolist (char2 (cdr tail))
(if (char-equal char1 char2)
(return-from char-not-equal nil))))))
(defun char-equal (&rest chars)
"T if all the characters are equal, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(unless (char-equal (car tail) (cadr tail))
(return nil))))
(defun char-lessp (&rest chars)
"T if all the characters are monotonically increasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless (< ch1 ch2) (return nil)))))
(defun char-greaterp (&rest chars)
"T if all the characters are monotonically decreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless (> ch1 ch2) (return nil)))))
(defun char-not-lessp (&rest chars)
"T if all the characters are monotonically nonincreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless ( ch1 ch2) (return nil)))))
(defun char-not-greaterp (&rest chars)
"T if all the characters are monotonically nondecreasing, ignoring bits, font and case."
(do ((tail chars (cdr tail)))
((null (cdr tail)) t)
(let ((ch1 (char-code (car tail)))
(ch2 (char-code (cadr tail))))
(setq ch1 (char-upcase ch1))
(setq ch2 (char-upcase ch2))
(unless ( ch1 ch2) (return nil)))))
If does not have a uppercase version , it is returned unchanged . "
( ( char - code char ) )
( if ( # # /z )
If does not have a lowercase version , it is returned unchanged . "
( ( ldb % % ch - char char ) )
(defun char-flipcase (char)
"If CHAR is an uppercase character, return it's lowercase conterpart, and vice-versa.
Returns CHAR unchanged if CHAR is neither upper now lower case."
(cond ((upper-case-p char) (char-downcase char))
((lower-case-p char) (char-upcase char))
(t char)))
(defun code-char (code &optional (bits 0) (font 0))
"Returns a character whose code comes from CODE, bits from BITS and font from FONT.
CODE can be a number or a character.
NIL is returned if it is not possible to have a character object
with the specified FONT and BITS."
(if (and ( 0 bits (1- char-bits-limit))
( 0 font (1- char-font-limit)))
(%make-pointer dtp-character
(%logdpb bits %%kbd-control-meta
(dpb font %%ch-font code)))
nil))
(deff make-char 'code-char)
(defun digit-char (weight &optional (radix 10.) (font 0))
"Return a character which signifies WEIGHT in radix RADIX, with FONT as specified.
This is always NIL if WEIGHT is RADIX.
for higher weights, you get letters."
(if (not ( 0 weight (1- radix))) nil
(if (not ( 0 font char-font-limit)) nil
(%make-pointer dtp-character
(dpb font %%ch-font (if (< weight 10.)
(+ (char-code #\0) weight)
(+ (char-code #\A) weight -10.)))))))
Now
(defun char-name (char)
For example, \"RETURN\" for the character Return.
Only works for characters which are not GRAPHIC-CHAR-P (unlike \"a\", for example.)"
(let ((elt (rassq (char-int char) xr-special-character-names)))
(if elt (symbol-name (car elt)))))
(defun name-char (name)
"Returns a character object which is the meaning of NAME as a character name,
or NIL if NAME has none."
(let ((found (cdr (ass 'string-equal name xr-special-character-names))))
(and found (int-char found))))
(defparameter *char-bit-alist*
`((:control . ,%%kbd-control)
(:meta . ,%%kbd-meta)
(:super . ,%%kbd-super)
(:hyper . ,%%kbd-hyper))
"Alist of bit names for CHAR-BIT vs byte specifiers to extract those bits from a character.")
(defun char-bit (char bit-name)
"T if the bit spec'd by BIT-NAME (a keyword) is on in CHAR.
BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER."
(let ((byte (cdr (assq bit-name *char-bit-alist*))))
(if byte
(%logldb-test byte char)
(ferror "~S is not a valid character-bit specifier" bit-name))))
(defun set-char-bit (char bit-name new-value)
"Returns a character like CHAR except that the bit BIT-NAME has value NEW-VALUE in it.
BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER.
NEW-VALUE should be T or NIL."
(let ((byte (cdr (assq bit-name *char-bit-alist*))))
(if byte
(let* ((new-char (%logdpb (if new-value 1 0) byte char)))
(if (typep char 'character)
(int-char new-char)
new-char))
(ferror "~S is not a valid character-bit specifier" bit-name))))
|
8f25ba741d7e981839e9fa0c28447ede0172f200bb696cb2adde349694076dff | returntocorp/semgrep | ast_php.ml | ,
*
* Copyright ( C ) 2011 - 2013 Facebook
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation , with the
* special exception on linking described in file license.txt .
*
* This library is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file
* license.txt for more details .
*
* Copyright (C) 2011-2013 Facebook
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation, with the
* special exception on linking described in file license.txt.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
* license.txt for more details.
*)
(*****************************************************************************)
(* Prelude *)
(*****************************************************************************)
A ( real ) Abstract Syntax Tree for PHP , not a Concrete Syntax Tree
* as in cst_php.ml .
*
* This file contains a simplified PHP abstract syntax tree . The original
* PHP syntax tree ( cst_php.ml ) is good for code refactoring or
* code visualization ; the types used matches exactly the source . However ,
* for other algorithms , the nature of the AST makes the code a bit
* redundant . Hence the idea of a SimpleAST which is the
* original AST where certain constructions have been factorized
* or even removed .
*
* Here is a list of the simplications / factorizations :
* - no purely syntactical tokens in the AST like parenthesis , brackets ,
* braces , angles , commas , semicolons , antislash , etc . No ParenExpr .
* No FinalDef . No NotParsedCorrectly . The only token information kept
* is for identifiers for error reporting . See wrap ( ) below .
*
* - support for old syntax is removed . No IfColon , ColonStmt ,
* CaseColonList .
* - support for extra tools is removed . No XdebugXxx
* update : but support for semgrep is restored ( Ellipsis )
* - support for features we do n't really use in our code is removed
* e.g. unset cast . No Use , UseDirect , UseParen . No CastUnset .
* Also no StaticObjCallVar .
* - some known directives like ' ) ; ' or ' declare(strict=1 ) ; '
* are skipped because they do n't have a useful semantic for
* the abstract interpreter or the type inference engine . No Declare .
*
* - sugar is removed , no ArrayLong vs ArrayShort , no InlineHtml ,
* no HereDoc , no EncapsXxx , no XhpSingleton ( but kept Xhp ) , no
* implicit fields via constructor parameters .
* - some builtins , for instance ' echo ' , are transformed in " _ _ builtin__echo " .
* See builtin ( ) below .
* - no include / require , they are transformed in call
* to _ _ builtin__require ( maybe not a good idea )
* - some special keywords , for instance ' self ' , are transformed in
* " _ _ special__self " . See special ( ) below .
* The comment is still relevant but we should use a different example than self .
* - the different ways to define namespaces are merged , no
* NamespaceBracketDef .
*
* - a simpler stmt type ; no extra toplevel and stmt_and_def types ,
* no FuncDefNested , no ClassDefNested . No StmtList .
* - a simpler expr type ; no lvalue vs expr vs static_scalar vs attribute
* ( update : now static_scalar = expr = lvalue also in cst_php.ml ) .
* Also no scalar . No Sc , no C. No Lv . Pattern matching constants
* is simpler : | Sc ( C ( String ... ) ) - > ... becomes just | String - > ....
* Also no arg type . No Arg , ArgRef , ArgUnpack . Also no xhp_attr_value type .
* No XhpAttrString , XhpAttrExpr .
* - no EmptyStmt , it is transformed in an empty Block
* - a simpler If . ' elseif ' are transformed in nested If , and empty ' else '
* in an empty Block .
* - a simpler , foreach_var_either and foreach_arrow are transformed
* into expressions with a new Arrow constructor ( maybe not good idea )
* - some special constructs like AssignRef were transformed into
* composite calls to Assign and Ref . Same for AssignList , AssignNew .
* Same for arguments passed by reference , no Arg , ArgRef , ArgUnpack .
* Same for refs in arrays , no ArrayRef , ArrayArrowRef . Also no ListVar ,
* ListList , ListEmpty . No ForeachVar , ForeachList .
* Array value are also decomposed in regular expr or Arrow , no
* ArrayArrowExpr , no ForeachArrow . More orthogonal .
* - a unified Call . No FunCallSimple , FunCallVar , MethodCallSimple ,
* StaticMethodCallSimple , StaticMethodCallVar
* ( update : same in cst_php.ml now )
* - a unified Array_get . No VArrayAccess , VArrayAccessXhp ,
* VBraceAccess , OArrayAccess , OBraceAccess
* ( update : same in cst_php.ml now )
* - unified Class_get and Obj_get instead of lots of duplication in
* many constructors , e.g. no ClassConstant in a separate scalar type ,
* no retarded obj_prop_access / obj_dim types ,
* no OName , CName , ObjProp , ObjPropVar , ObjAccessSimple vs ObjAccess ,
* no ClassNameRefDynamic , no VQualifier , ClassVar , DynamicClassVar ,
* etc .
* ( update : same in cst_php.ml now )
* - unified eval_var , some constructs were transformed into calls to
* " eval_var " builtin , e.g. no GlobalDollar , no VBrace , no Indirect / Deref .
*
* - a simpler ' name ' for identifiers , xhp names and regular names are merged ,
* the special keyword self / parent / static are merged ,
* so the complex I d ( XName [ QI ( Name " foo " ) ] ) becomes just I d [ " foo " ] .
* - ...
*
* todo :
* - put back types ! at least the basic one like f_return_type
* with no generics
* - less : factorize more ? string vs Guil ?
* as in cst_php.ml.
*
* This file contains a simplified PHP abstract syntax tree. The original
* PHP syntax tree (cst_php.ml) is good for code refactoring or
* code visualization; the types used matches exactly the source. However,
* for other algorithms, the nature of the AST makes the code a bit
* redundant. Hence the idea of a SimpleAST which is the
* original AST where certain constructions have been factorized
* or even removed.
*
* Here is a list of the simplications/factorizations:
* - no purely syntactical tokens in the AST like parenthesis, brackets,
* braces, angles, commas, semicolons, antislash, etc. No ParenExpr.
* No FinalDef. No NotParsedCorrectly. The only token information kept
* is for identifiers for error reporting. See wrap() below.
*
* - support for old syntax is removed. No IfColon, ColonStmt,
* CaseColonList.
* - support for extra tools is removed. No XdebugXxx
* update: but support for semgrep is restored (Ellipsis)
* - support for features we don't really use in our code is removed
* e.g. unset cast. No Use, UseDirect, UseParen. No CastUnset.
* Also no StaticObjCallVar.
* - some known directives like 'declare(ticks=1);' or 'declare(strict=1);'
* are skipped because they don't have a useful semantic for
* the abstract interpreter or the type inference engine. No Declare.
*
* - sugar is removed, no ArrayLong vs ArrayShort, no InlineHtml,
* no HereDoc, no EncapsXxx, no XhpSingleton (but kept Xhp), no
* implicit fields via constructor parameters.
* - some builtins, for instance 'echo', are transformed in "__builtin__echo".
* See builtin() below.
* - no include/require, they are transformed in call
* to __builtin__require (maybe not a good idea)
* - some special keywords, for instance 'self', are transformed in
* "__special__self". See special() below.
* The comment is still relevant but we should use a different example than self.
* - the different ways to define namespaces are merged, no
* NamespaceBracketDef.
*
* - a simpler stmt type; no extra toplevel and stmt_and_def types,
* no FuncDefNested, no ClassDefNested. No StmtList.
* - a simpler expr type; no lvalue vs expr vs static_scalar vs attribute
* (update: now static_scalar = expr = lvalue also in cst_php.ml).
* Also no scalar. No Sc, no C. No Lv. Pattern matching constants
* is simpler: | Sc (C (String ...)) -> ... becomes just | String -> ....
* Also no arg type. No Arg, ArgRef, ArgUnpack. Also no xhp_attr_value type.
* No XhpAttrString, XhpAttrExpr.
* - no EmptyStmt, it is transformed in an empty Block
* - a simpler If. 'elseif' are transformed in nested If, and empty 'else'
* in an empty Block.
* - a simpler Foreach, foreach_var_either and foreach_arrow are transformed
* into expressions with a new Arrow constructor (maybe not good idea)
* - some special constructs like AssignRef were transformed into
* composite calls to Assign and Ref. Same for AssignList, AssignNew.
* Same for arguments passed by reference, no Arg, ArgRef, ArgUnpack.
* Same for refs in arrays, no ArrayRef, ArrayArrowRef. Also no ListVar,
* ListList, ListEmpty. No ForeachVar, ForeachList.
* Array value are also decomposed in regular expr or Arrow, no
* ArrayArrowExpr, no ForeachArrow. More orthogonal.
* - a unified Call. No FunCallSimple, FunCallVar, MethodCallSimple,
* StaticMethodCallSimple, StaticMethodCallVar
* (update: same in cst_php.ml now)
* - a unified Array_get. No VArrayAccess, VArrayAccessXhp,
* VBraceAccess, OArrayAccess, OBraceAccess
* (update: same in cst_php.ml now)
* - unified Class_get and Obj_get instead of lots of duplication in
* many constructors, e.g. no ClassConstant in a separate scalar type,
* no retarded obj_prop_access/obj_dim types,
* no OName, CName, ObjProp, ObjPropVar, ObjAccessSimple vs ObjAccess,
* no ClassNameRefDynamic, no VQualifier, ClassVar, DynamicClassVar,
* etc.
* (update: same in cst_php.ml now)
* - unified eval_var, some constructs were transformed into calls to
* "eval_var" builtin, e.g. no GlobalDollar, no VBrace, no Indirect/Deref.
*
* - a simpler 'name' for identifiers, xhp names and regular names are merged,
* the special keyword self/parent/static are merged,
* so the complex Id (XName [QI (Name "foo")]) becomes just Id ["foo"].
* - ...
*
* todo:
* - put back types! at least the basic one like f_return_type
* with no generics
* - less: factorize more? string vs Guil?
*)
(*****************************************************************************)
(* Token (leaves) *)
(*****************************************************************************)
type tok = Parse_info.t [@@deriving show]
type 'a wrap = 'a * tok [@@deriving show] (* with tarzan *)
(* round(), square[], curly{}, angle<> brackets *)
type 'a bracket = tok * 'a * tok [@@deriving show] (* with tarzan *)
type ident = string wrap [@@deriving show] (* with tarzan *)
(* the string contains the $ prefix *)
type var = string wrap [@@deriving show] (* with tarzan *)
(* The keyword 'namespace' can be in a leading position. The special
* ident 'ROOT' can also be leading.
*)
type qualified_ident = ident list [@@deriving show] (* with tarzan *)
type name = qualified_ident [@@deriving show] (* with tarzan *)
(*****************************************************************************)
Expression
(*****************************************************************************)
lvalue and expr have been mixed in this AST , but an lvalue should be
* an expr restricted to : Var $ var , Array_get , Obj_get , Class_get , or List .
* an expr restricted to: Var $var, Array_get, Obj_get, Class_get, or List.
*)
type expr =
(* booleans are really just Int in PHP :( *)
(* I don't think ^ is true. It reads like a boolean represents a truth
value, where for purposes of conversion 0 is cast to false and
non-0 is cast to true *)
(* *)
| Bool of bool wrap
| Int of int option wrap
| Double of float option wrap
PHP has no first - class functions so entities are sometimes passed
* as strings so the string wrap below can actually correspond to a
* ' I d name ' sometimes . Some magic functions like param_post ( ) also
* introduce entities ( variables ) via strings .
* as strings so the string wrap below can actually correspond to a
* 'Id name' sometimes. Some magic functions like param_post() also
* introduce entities (variables) via strings.
*)
| String of string wrap (* TODO: bracket *)
(* Id is valid for "entities" (functions, classes, constants). Id is also
* used for class methods/fields/constants. It can also contain
* "self/parent" or "static", "class". It can be "true", "false", "null"
* and many other builtin constants. See builtin() and special() below.
*
* todo: For field name, if in the code they are referenced like $this->fld,
* we should prepend a $ to fld to match their definition.
*)
| Id of name (* less: should be renamed Name *)
| IdSpecial of special wrap
Var used to be merged with But then we were doing lots of
* ' when Ast.is_variable name ' so maybe better to have I d and
* ( at the same time OCaml does not differentiate I d from Var ) .
* The string contains the ' $ ' .
* 'when Ast.is_variable name' so maybe better to have Id and Var
* (at the same time OCaml does not differentiate Id from Var).
* The string contains the '$'.
*)
| Var of var
(* when None it means add to the end when used in lvalue position *)
| Array_get of expr * expr option bracket
Unified method / field access .
* ex : $ o->foo ( ) = = > Call(Obj_get(Var " $ o " , I d " foo " ) , [ ] )
* ex : A::foo ( ) = = > Call(Class_get(Id " A " , I d " foo " ) , [ ] )
* note that I d can be " self " , " parent " , " static " .
* ex: $o->foo() ==> Call(Obj_get(Var "$o", Id "foo"), [])
* ex: A::foo() ==> Call(Class_get(Id "A", Id "foo"), [])
* note that Id can be "self", "parent", "static".
*)
| Obj_get of expr * tok * expr
| Class_get of expr * tok * expr
| New of tok * expr * argument list
| NewAnonClass of tok * argument list * class_def
| InstanceOf of tok * expr * expr
(* pad: could perhaps be at the statement level? The left expr
* must be an lvalue (e.g. a variable).
*)
| Assign of expr * tok * expr
| AssignOp of expr * binaryOp wrap * expr
(* really a destructuring tuple let; always used as part of an Assign or
* in foreach_pattern.
*)
| List of expr list bracket
(* used only inside array_value or foreach_pattern, or for yield
* (which is translated as a builtin and so a Call)
*)
| Arrow of expr * tok * expr
$ y = & $ x is transformed into an Assign(Var " $ y " , Ref ( Var " $ x " ) ) . In
* PHP refs are always used in an Assign context .
* PHP refs are always used in an Assign context.
*)
| Ref of tok * expr
(* e.g. f(...$x) *)
| Unpack of expr
| Call of expr * argument list bracket
| Throw of tok * expr
(* todo? transform into Call (builtin ...) ? *)
| Infix of AST_generic.incr_decr wrap * expr
| Postfix of AST_generic.incr_decr wrap * expr
| Binop of expr * binaryOp wrap * expr
| Unop of unaryOp wrap * expr
| Guil of expr list bracket
| ConsArray of array_value list bracket
| CondExpr of expr * expr * expr
| Cast of cast_type wrap * expr
yeah ! PHP 5.3 is becoming a real language
| Lambda of func_def
| Match of tok * expr * match_ list
(* sgrep-ext: *)
| Ellipsis of tok
| DeepEllipsis of expr bracket
and match_ = MCase of expr list * expr | MDefault of tok * expr
and cast_type =
| BoolTy
| IntTy
| DoubleTy (* float *)
| StringTy
| ArrayTy
| ObjectTy
and special =
often transformed in Var " $ this " in the analysis
| This
(* represents the "self" keyword expression in a classes *)
| Self
(* represents the "parent" keyword expression in a class *)
| Parent
| FuncLike of funclike
(* language constructs that look like functions *)
and funclike = Empty | Eval | Exit | Isset | Unset
and binaryOp =
TODO : now available in AST_generic _ ?
| BinaryConcat
| CombinedComparison
| ArithOp of AST_generic.operator
and unaryOp = AST_generic.operator
and argument =
| Arg of expr
| ArgRef of tok * expr
| ArgUnpack of tok * expr
| ArgLabel of ident * tok * expr
only Var , List , or Arrow , and apparently also Array_get is ok , so
* basically any lvalue
* basically any lvalue
*)
and foreach_pattern = expr
often an Arrow
and array_value = expr
(* string_const_expr is for shape field names which are permitted to be either
* literal strings or class constants. *)
and string_const_expr = expr
(*****************************************************************************)
(* Types *)
(*****************************************************************************)
and hint_type =
| Hint of name (* todo: add the generics *)
| HintArray of tok
| HintQuestion of tok * hint_type
| HintTuple of hint_type list bracket
| HintCallback of hint_type list * hint_type option
| HintTypeConst of hint_type * tok * hint_type (* ?? *)
| HintVariadic of tok * hint_type option
and class_name = hint_type
(*****************************************************************************)
(* Statement *)
(*****************************************************************************)
and stmt =
| Expr of expr * tok
| Block of stmt list bracket
| If of tok * expr * stmt * stmt
| Switch of tok * expr * case list
| While of tok * expr * stmt
| Do of tok * stmt * expr
| For of tok * expr list * expr list * expr list * stmt
' foreach ( $ xs as $ k ) ' , ' ... ( $ xs as $ k = > $ v ) ' , ' ... ( $ xs as list($ ... ) ) '
| Foreach of tok * expr * tok * foreach_pattern * stmt
| Return of tok * expr option
| Break of tok * expr option
| Continue of tok * expr option
| Label of ident * tok (* : *) * stmt
| Goto of tok * ident
| Try of tok * stmt * catch list * finally list
(* only at toplevel in most of our code *)
| ClassDef of class_def
| FuncDef of func_def
(* only at toplevel *)
| ConstantDef of constant_def
| TypeDef of type_def
(* the qualified_ident below can not have a leading '\', it can also
* be the root namespace *)
| NamespaceDef of tok * qualified_ident * stmt list bracket
| NamespaceUse of tok * qualified_ident * ident option (* when alias *)
Note that there is no LocalVars constructor . Variables in PHP are
* declared when they are first assigned .
* declared when they are first assigned. *)
| StaticVars of tok * (var * expr option) list
(* expr is most of the time a simple variable name *)
| Global of tok * expr list
and case = Case of tok * expr * stmt list | Default of tok * stmt list
catch(Exception $ exn ) { ... } = > ( " Exception " , " $ exn " , [ ... ] )
* TODO : can now be a list of hint_type , Exn1 | Exn2 like in Java .
* TODO: can now be a list of hint_type, Exn1 | Exn2 like in Java.
*)
and catch = tok * hint_type * var * stmt
and finally = tok * stmt
(*****************************************************************************)
(* Definitions *)
(*****************************************************************************)
TODO : factorize xx_name in an entity type like in AST_generic.ml ,
* which also lead to a cleaner Lambda and NewAnonClass .
* TODO : factorize also the xx_modifiers and xx_attrs ?
* which also lead to a cleaner Lambda and NewAnonClass.
* TODO: factorize also the xx_modifiers and xx_attrs?
*)
(* The func_def type below is actually used both for functions and methods.
*
* For methods, a few names are specials:
* - __construct, __destruct
* - __call, __callStatic
*)
and func_def = {
TODO : " _ lambda " when used for lambda , see also AnonLambda for f_kind below
f_name : ident;
f_kind : function_kind wrap;
TODO bracket
f_return_type : hint_type option;
(* functions returning a ref are rare *)
f_ref : bool;
(* only for methods; always empty for functions *)
m_modifiers : modifier list;
only for AnonLambda ( could also abuse parameter ) , not for ShortLambda
l_uses : (bool (* is_ref *) * var) list;
f_attrs : attribute list;
f_body : stmt;
}
and function_kind =
| Function
| AnonLambda
| ShortLambda (* they have different scoping rules for free variables *)
| Method
and parameter =
| ParamClassic of parameter_classic
(* sgrep-ext: *)
| ParamEllipsis of tok
and parameter_classic = {
p_type : hint_type option;
p_ref : tok option;
p_name : var;
p_default : expr option;
p_attrs : attribute list;
p_variadic : tok option;
}
(* for methods, and below for fields too *)
and modifier = keyword_modifier wrap
and keyword_modifier =
| Public
| Private
| Protected
| Abstract
| Final
| Static
| Async
(* normally either an Id or Call with only static arguments *)
and attribute = expr
and constant_def = {
cst_tok : tok;
cst_name : ident;
(* normally a static scalar *)
cst_body : expr;
}
and enum_type = { e_base : hint_type; e_constraint : hint_type option }
and class_def = {
c_name : ident;
c_kind : class_kind wrap;
c_extends : class_name option;
c_implements : class_name list;
c_uses : class_name list; (* traits *)
(* If this class is an enum, what is the underlying type (and
* constraint) of the enum? *)
c_enum_type : enum_type option;
c_modifiers : modifier list;
c_attrs : attribute list;
c_constants : constant_def list;
c_variables : class_var list;
c_methods : method_def list;
c_braces : unit bracket;
}
and class_kind = Class | Interface | Trait | Enum
and xhp_field = class_var * bool
and class_var = {
(* note that the name will contain a $ *)
cv_name : var;
cv_type : hint_type option;
cv_value : expr option;
cv_modifiers : modifier list;
}
and method_def = func_def
and type_def = { t_name : ident; t_kind : type_def_kind }
and type_def_kind = Alias of hint_type [@@deriving show { with_path = false }]
(* with tarzan *)
(*****************************************************************************)
Program
(*****************************************************************************)
type program = stmt list [@@deriving show { with_path = false }]
(* with tarzan *)
(*****************************************************************************)
(* Any *)
(*****************************************************************************)
type partial = PartialIf of tok * expr [@@deriving show { with_path = false }]
(* with tarzan *)
type any =
| Program of program
| Stmt of stmt
| Expr2 of expr
| Param of parameter
| Partial of partial
[@@deriving show { with_path = false }]
(* with tarzan *)
(*****************************************************************************)
(* Helpers *)
(*****************************************************************************)
let unwrap x = fst x
let wrap_fake s = (s, Parse_info.fake_info s)
TODO : replace builtin ( ) by IdSpecial like I do in AST_generic.ml
* builtin ( ) is used for :
* - ' eval ' , and implicitly generated eval / reflection like functions :
* " eval_var " ( e.g. for echo $ $ x , echo $ { " x" . " } ) ,
* - ' clone ' ,
* - ' exit ' , ' yield ' , ' yield_break ' TODO ' yield_from ? '
* - ' unset ' , ' isset ' , ' empty '
*
*
* - ' echo ' , ' print ' ,
* - ' @ ' , ' ` ' ,
* - ' include ' , ' require ' , ' include_once ' , ' require_once ' .
* - _ _ LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/
*
* See also data / php_stdlib / pfff.php which declares those builtins .
* See also tests / php / semantic/ for example of uses of those builtins .
*
* coupling : if modify the string , git grep it because it 's probably
* used in patterns too .
* builtin() is used for:
* - 'eval', and implicitly generated eval/reflection like functions:
* "eval_var" (e.g. for echo $$x, echo ${"x"."y"}),
* - 'clone',
* - 'exit', 'yield', 'yield_break' TODO 'yield_from?'
* - 'unset', 'isset', 'empty'
*
*
* - 'echo', 'print',
* - '@', '`',
* - 'include', 'require', 'include_once', 'require_once'.
* - __LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/
*
* See also data/php_stdlib/pfff.php which declares those builtins.
* See also tests/php/semantic/ for example of uses of those builtins.
*
* coupling: if modify the string, git grep it because it's probably
* used in patterns too.
*)
let builtin x = "__builtin__" ^ x
for ' self'/'parent ' , ' static ' , ' lambda ' , ' namespace ' , root namespace ' \ ' ,
* ' class ' as in C::class
* TODO : transform in IdSpecial !
* 'class' as in C::class
* TODO: transform in IdSpecial!
*)
let special x = "__special__" ^ x
AST helpers
let has_modifier cv = List.length cv.cv_modifiers > 0
let is_static modifiers = List.mem Static (List.map unwrap modifiers)
let is_private modifiers = List.mem Private (List.map unwrap modifiers)
let string_of_xhp_tag xs = ":" ^ Common.join ":" xs
let str_of_ident (s, _) = s
let tok_of_ident (_, x) = x
exception TodoNamespace of tok
let str_of_name = function
| [ id ] -> str_of_ident id
| [] -> raise Common.Impossible
| x :: _xs -> raise (TodoNamespace (tok_of_ident x))
let tok_of_name = function
| [ id ] -> tok_of_ident id
| [] -> raise Common.Impossible
pick first one
| x :: _xs -> tok_of_ident x
(* we sometimes need to remove the '$' prefix *)
let remove_first_char s = String.sub s 1 (String.length s - 1)
let str_of_class_name x =
match x with
| Hint name -> str_of_name name
| _ -> raise Common.Impossible
let name_of_class_name x =
match x with
| Hint [ name ] -> name
| Hint [] -> raise Common.Impossible
| Hint name -> raise (TodoNamespace (tok_of_name name))
| _ -> raise Common.Impossible
| null | https://raw.githubusercontent.com/returntocorp/semgrep/dcea978347df81cbc8f2c2b49b80c1980f6194cf/languages/php/ast/ast_php.ml | ocaml | ***************************************************************************
Prelude
***************************************************************************
***************************************************************************
Token (leaves)
***************************************************************************
with tarzan
round(), square[], curly{}, angle<> brackets
with tarzan
with tarzan
the string contains the $ prefix
with tarzan
The keyword 'namespace' can be in a leading position. The special
* ident 'ROOT' can also be leading.
with tarzan
with tarzan
***************************************************************************
***************************************************************************
booleans are really just Int in PHP :(
I don't think ^ is true. It reads like a boolean represents a truth
value, where for purposes of conversion 0 is cast to false and
non-0 is cast to true
TODO: bracket
Id is valid for "entities" (functions, classes, constants). Id is also
* used for class methods/fields/constants. It can also contain
* "self/parent" or "static", "class". It can be "true", "false", "null"
* and many other builtin constants. See builtin() and special() below.
*
* todo: For field name, if in the code they are referenced like $this->fld,
* we should prepend a $ to fld to match their definition.
less: should be renamed Name
when None it means add to the end when used in lvalue position
pad: could perhaps be at the statement level? The left expr
* must be an lvalue (e.g. a variable).
really a destructuring tuple let; always used as part of an Assign or
* in foreach_pattern.
used only inside array_value or foreach_pattern, or for yield
* (which is translated as a builtin and so a Call)
e.g. f(...$x)
todo? transform into Call (builtin ...) ?
sgrep-ext:
float
represents the "self" keyword expression in a classes
represents the "parent" keyword expression in a class
language constructs that look like functions
string_const_expr is for shape field names which are permitted to be either
* literal strings or class constants.
***************************************************************************
Types
***************************************************************************
todo: add the generics
??
***************************************************************************
Statement
***************************************************************************
:
only at toplevel in most of our code
only at toplevel
the qualified_ident below can not have a leading '\', it can also
* be the root namespace
when alias
expr is most of the time a simple variable name
***************************************************************************
Definitions
***************************************************************************
The func_def type below is actually used both for functions and methods.
*
* For methods, a few names are specials:
* - __construct, __destruct
* - __call, __callStatic
functions returning a ref are rare
only for methods; always empty for functions
is_ref
they have different scoping rules for free variables
sgrep-ext:
for methods, and below for fields too
normally either an Id or Call with only static arguments
normally a static scalar
traits
If this class is an enum, what is the underlying type (and
* constraint) of the enum?
note that the name will contain a $
with tarzan
***************************************************************************
***************************************************************************
with tarzan
***************************************************************************
Any
***************************************************************************
with tarzan
with tarzan
***************************************************************************
Helpers
***************************************************************************
we sometimes need to remove the '$' prefix | ,
*
* Copyright ( C ) 2011 - 2013 Facebook
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation , with the
* special exception on linking described in file license.txt .
*
* This library is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file
* license.txt for more details .
*
* Copyright (C) 2011-2013 Facebook
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation, with the
* special exception on linking described in file license.txt.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
* license.txt for more details.
*)
A ( real ) Abstract Syntax Tree for PHP , not a Concrete Syntax Tree
* as in cst_php.ml .
*
* This file contains a simplified PHP abstract syntax tree . The original
* PHP syntax tree ( cst_php.ml ) is good for code refactoring or
* code visualization ; the types used matches exactly the source . However ,
* for other algorithms , the nature of the AST makes the code a bit
* redundant . Hence the idea of a SimpleAST which is the
* original AST where certain constructions have been factorized
* or even removed .
*
* Here is a list of the simplications / factorizations :
* - no purely syntactical tokens in the AST like parenthesis , brackets ,
* braces , angles , commas , semicolons , antislash , etc . No ParenExpr .
* No FinalDef . No NotParsedCorrectly . The only token information kept
* is for identifiers for error reporting . See wrap ( ) below .
*
* - support for old syntax is removed . No IfColon , ColonStmt ,
* CaseColonList .
* - support for extra tools is removed . No XdebugXxx
* update : but support for semgrep is restored ( Ellipsis )
* - support for features we do n't really use in our code is removed
* e.g. unset cast . No Use , UseDirect , UseParen . No CastUnset .
* Also no StaticObjCallVar .
* - some known directives like ' ) ; ' or ' declare(strict=1 ) ; '
* are skipped because they do n't have a useful semantic for
* the abstract interpreter or the type inference engine . No Declare .
*
* - sugar is removed , no ArrayLong vs ArrayShort , no InlineHtml ,
* no HereDoc , no EncapsXxx , no XhpSingleton ( but kept Xhp ) , no
* implicit fields via constructor parameters .
* - some builtins , for instance ' echo ' , are transformed in " _ _ builtin__echo " .
* See builtin ( ) below .
* - no include / require , they are transformed in call
* to _ _ builtin__require ( maybe not a good idea )
* - some special keywords , for instance ' self ' , are transformed in
* " _ _ special__self " . See special ( ) below .
* The comment is still relevant but we should use a different example than self .
* - the different ways to define namespaces are merged , no
* NamespaceBracketDef .
*
* - a simpler stmt type ; no extra toplevel and stmt_and_def types ,
* no FuncDefNested , no ClassDefNested . No StmtList .
* - a simpler expr type ; no lvalue vs expr vs static_scalar vs attribute
* ( update : now static_scalar = expr = lvalue also in cst_php.ml ) .
* Also no scalar . No Sc , no C. No Lv . Pattern matching constants
* is simpler : | Sc ( C ( String ... ) ) - > ... becomes just | String - > ....
* Also no arg type . No Arg , ArgRef , ArgUnpack . Also no xhp_attr_value type .
* No XhpAttrString , XhpAttrExpr .
* - no EmptyStmt , it is transformed in an empty Block
* - a simpler If . ' elseif ' are transformed in nested If , and empty ' else '
* in an empty Block .
* - a simpler , foreach_var_either and foreach_arrow are transformed
* into expressions with a new Arrow constructor ( maybe not good idea )
* - some special constructs like AssignRef were transformed into
* composite calls to Assign and Ref . Same for AssignList , AssignNew .
* Same for arguments passed by reference , no Arg , ArgRef , ArgUnpack .
* Same for refs in arrays , no ArrayRef , ArrayArrowRef . Also no ListVar ,
* ListList , ListEmpty . No ForeachVar , ForeachList .
* Array value are also decomposed in regular expr or Arrow , no
* ArrayArrowExpr , no ForeachArrow . More orthogonal .
* - a unified Call . No FunCallSimple , FunCallVar , MethodCallSimple ,
* StaticMethodCallSimple , StaticMethodCallVar
* ( update : same in cst_php.ml now )
* - a unified Array_get . No VArrayAccess , VArrayAccessXhp ,
* VBraceAccess , OArrayAccess , OBraceAccess
* ( update : same in cst_php.ml now )
* - unified Class_get and Obj_get instead of lots of duplication in
* many constructors , e.g. no ClassConstant in a separate scalar type ,
* no retarded obj_prop_access / obj_dim types ,
* no OName , CName , ObjProp , ObjPropVar , ObjAccessSimple vs ObjAccess ,
* no ClassNameRefDynamic , no VQualifier , ClassVar , DynamicClassVar ,
* etc .
* ( update : same in cst_php.ml now )
* - unified eval_var , some constructs were transformed into calls to
* " eval_var " builtin , e.g. no GlobalDollar , no VBrace , no Indirect / Deref .
*
* - a simpler ' name ' for identifiers , xhp names and regular names are merged ,
* the special keyword self / parent / static are merged ,
* so the complex I d ( XName [ QI ( Name " foo " ) ] ) becomes just I d [ " foo " ] .
* - ...
*
* todo :
* - put back types ! at least the basic one like f_return_type
* with no generics
* - less : factorize more ? string vs Guil ?
* as in cst_php.ml.
*
* This file contains a simplified PHP abstract syntax tree. The original
* PHP syntax tree (cst_php.ml) is good for code refactoring or
* code visualization; the types used matches exactly the source. However,
* for other algorithms, the nature of the AST makes the code a bit
* redundant. Hence the idea of a SimpleAST which is the
* original AST where certain constructions have been factorized
* or even removed.
*
* Here is a list of the simplications/factorizations:
* - no purely syntactical tokens in the AST like parenthesis, brackets,
* braces, angles, commas, semicolons, antislash, etc. No ParenExpr.
* No FinalDef. No NotParsedCorrectly. The only token information kept
* is for identifiers for error reporting. See wrap() below.
*
* - support for old syntax is removed. No IfColon, ColonStmt,
* CaseColonList.
* - support for extra tools is removed. No XdebugXxx
* update: but support for semgrep is restored (Ellipsis)
* - support for features we don't really use in our code is removed
* e.g. unset cast. No Use, UseDirect, UseParen. No CastUnset.
* Also no StaticObjCallVar.
* - some known directives like 'declare(ticks=1);' or 'declare(strict=1);'
* are skipped because they don't have a useful semantic for
* the abstract interpreter or the type inference engine. No Declare.
*
* - sugar is removed, no ArrayLong vs ArrayShort, no InlineHtml,
* no HereDoc, no EncapsXxx, no XhpSingleton (but kept Xhp), no
* implicit fields via constructor parameters.
* - some builtins, for instance 'echo', are transformed in "__builtin__echo".
* See builtin() below.
* - no include/require, they are transformed in call
* to __builtin__require (maybe not a good idea)
* - some special keywords, for instance 'self', are transformed in
* "__special__self". See special() below.
* The comment is still relevant but we should use a different example than self.
* - the different ways to define namespaces are merged, no
* NamespaceBracketDef.
*
* - a simpler stmt type; no extra toplevel and stmt_and_def types,
* no FuncDefNested, no ClassDefNested. No StmtList.
* - a simpler expr type; no lvalue vs expr vs static_scalar vs attribute
* (update: now static_scalar = expr = lvalue also in cst_php.ml).
* Also no scalar. No Sc, no C. No Lv. Pattern matching constants
* is simpler: | Sc (C (String ...)) -> ... becomes just | String -> ....
* Also no arg type. No Arg, ArgRef, ArgUnpack. Also no xhp_attr_value type.
* No XhpAttrString, XhpAttrExpr.
* - no EmptyStmt, it is transformed in an empty Block
* - a simpler If. 'elseif' are transformed in nested If, and empty 'else'
* in an empty Block.
* - a simpler Foreach, foreach_var_either and foreach_arrow are transformed
* into expressions with a new Arrow constructor (maybe not good idea)
* - some special constructs like AssignRef were transformed into
* composite calls to Assign and Ref. Same for AssignList, AssignNew.
* Same for arguments passed by reference, no Arg, ArgRef, ArgUnpack.
* Same for refs in arrays, no ArrayRef, ArrayArrowRef. Also no ListVar,
* ListList, ListEmpty. No ForeachVar, ForeachList.
* Array value are also decomposed in regular expr or Arrow, no
* ArrayArrowExpr, no ForeachArrow. More orthogonal.
* - a unified Call. No FunCallSimple, FunCallVar, MethodCallSimple,
* StaticMethodCallSimple, StaticMethodCallVar
* (update: same in cst_php.ml now)
* - a unified Array_get. No VArrayAccess, VArrayAccessXhp,
* VBraceAccess, OArrayAccess, OBraceAccess
* (update: same in cst_php.ml now)
* - unified Class_get and Obj_get instead of lots of duplication in
* many constructors, e.g. no ClassConstant in a separate scalar type,
* no retarded obj_prop_access/obj_dim types,
* no OName, CName, ObjProp, ObjPropVar, ObjAccessSimple vs ObjAccess,
* no ClassNameRefDynamic, no VQualifier, ClassVar, DynamicClassVar,
* etc.
* (update: same in cst_php.ml now)
* - unified eval_var, some constructs were transformed into calls to
* "eval_var" builtin, e.g. no GlobalDollar, no VBrace, no Indirect/Deref.
*
* - a simpler 'name' for identifiers, xhp names and regular names are merged,
* the special keyword self/parent/static are merged,
* so the complex Id (XName [QI (Name "foo")]) becomes just Id ["foo"].
* - ...
*
* todo:
* - put back types! at least the basic one like f_return_type
* with no generics
* - less: factorize more? string vs Guil?
*)
type tok = Parse_info.t [@@deriving show]
Expression
lvalue and expr have been mixed in this AST , but an lvalue should be
* an expr restricted to : Var $ var , Array_get , Obj_get , Class_get , or List .
* an expr restricted to: Var $var, Array_get, Obj_get, Class_get, or List.
*)
type expr =
| Bool of bool wrap
| Int of int option wrap
| Double of float option wrap
PHP has no first - class functions so entities are sometimes passed
* as strings so the string wrap below can actually correspond to a
* ' I d name ' sometimes . Some magic functions like param_post ( ) also
* introduce entities ( variables ) via strings .
* as strings so the string wrap below can actually correspond to a
* 'Id name' sometimes. Some magic functions like param_post() also
* introduce entities (variables) via strings.
*)
| IdSpecial of special wrap
Var used to be merged with But then we were doing lots of
* ' when Ast.is_variable name ' so maybe better to have I d and
* ( at the same time OCaml does not differentiate I d from Var ) .
* The string contains the ' $ ' .
* 'when Ast.is_variable name' so maybe better to have Id and Var
* (at the same time OCaml does not differentiate Id from Var).
* The string contains the '$'.
*)
| Var of var
| Array_get of expr * expr option bracket
Unified method / field access .
* ex : $ o->foo ( ) = = > Call(Obj_get(Var " $ o " , I d " foo " ) , [ ] )
* ex : A::foo ( ) = = > Call(Class_get(Id " A " , I d " foo " ) , [ ] )
* note that I d can be " self " , " parent " , " static " .
* ex: $o->foo() ==> Call(Obj_get(Var "$o", Id "foo"), [])
* ex: A::foo() ==> Call(Class_get(Id "A", Id "foo"), [])
* note that Id can be "self", "parent", "static".
*)
| Obj_get of expr * tok * expr
| Class_get of expr * tok * expr
| New of tok * expr * argument list
| NewAnonClass of tok * argument list * class_def
| InstanceOf of tok * expr * expr
| Assign of expr * tok * expr
| AssignOp of expr * binaryOp wrap * expr
| List of expr list bracket
| Arrow of expr * tok * expr
$ y = & $ x is transformed into an Assign(Var " $ y " , Ref ( Var " $ x " ) ) . In
* PHP refs are always used in an Assign context .
* PHP refs are always used in an Assign context.
*)
| Ref of tok * expr
| Unpack of expr
| Call of expr * argument list bracket
| Throw of tok * expr
| Infix of AST_generic.incr_decr wrap * expr
| Postfix of AST_generic.incr_decr wrap * expr
| Binop of expr * binaryOp wrap * expr
| Unop of unaryOp wrap * expr
| Guil of expr list bracket
| ConsArray of array_value list bracket
| CondExpr of expr * expr * expr
| Cast of cast_type wrap * expr
yeah ! PHP 5.3 is becoming a real language
| Lambda of func_def
| Match of tok * expr * match_ list
| Ellipsis of tok
| DeepEllipsis of expr bracket
and match_ = MCase of expr list * expr | MDefault of tok * expr
and cast_type =
| BoolTy
| IntTy
| StringTy
| ArrayTy
| ObjectTy
and special =
often transformed in Var " $ this " in the analysis
| This
| Self
| Parent
| FuncLike of funclike
and funclike = Empty | Eval | Exit | Isset | Unset
and binaryOp =
TODO : now available in AST_generic _ ?
| BinaryConcat
| CombinedComparison
| ArithOp of AST_generic.operator
and unaryOp = AST_generic.operator
and argument =
| Arg of expr
| ArgRef of tok * expr
| ArgUnpack of tok * expr
| ArgLabel of ident * tok * expr
only Var , List , or Arrow , and apparently also Array_get is ok , so
* basically any lvalue
* basically any lvalue
*)
and foreach_pattern = expr
often an Arrow
and array_value = expr
and string_const_expr = expr
and hint_type =
| HintArray of tok
| HintQuestion of tok * hint_type
| HintTuple of hint_type list bracket
| HintCallback of hint_type list * hint_type option
| HintVariadic of tok * hint_type option
and class_name = hint_type
and stmt =
| Expr of expr * tok
| Block of stmt list bracket
| If of tok * expr * stmt * stmt
| Switch of tok * expr * case list
| While of tok * expr * stmt
| Do of tok * stmt * expr
| For of tok * expr list * expr list * expr list * stmt
' foreach ( $ xs as $ k ) ' , ' ... ( $ xs as $ k = > $ v ) ' , ' ... ( $ xs as list($ ... ) ) '
| Foreach of tok * expr * tok * foreach_pattern * stmt
| Return of tok * expr option
| Break of tok * expr option
| Continue of tok * expr option
| Goto of tok * ident
| Try of tok * stmt * catch list * finally list
| ClassDef of class_def
| FuncDef of func_def
| ConstantDef of constant_def
| TypeDef of type_def
| NamespaceDef of tok * qualified_ident * stmt list bracket
Note that there is no LocalVars constructor . Variables in PHP are
* declared when they are first assigned .
* declared when they are first assigned. *)
| StaticVars of tok * (var * expr option) list
| Global of tok * expr list
and case = Case of tok * expr * stmt list | Default of tok * stmt list
catch(Exception $ exn ) { ... } = > ( " Exception " , " $ exn " , [ ... ] )
* TODO : can now be a list of hint_type , Exn1 | Exn2 like in Java .
* TODO: can now be a list of hint_type, Exn1 | Exn2 like in Java.
*)
and catch = tok * hint_type * var * stmt
and finally = tok * stmt
TODO : factorize xx_name in an entity type like in AST_generic.ml ,
* which also lead to a cleaner Lambda and NewAnonClass .
* TODO : factorize also the xx_modifiers and xx_attrs ?
* which also lead to a cleaner Lambda and NewAnonClass.
* TODO: factorize also the xx_modifiers and xx_attrs?
*)
and func_def = {
TODO : " _ lambda " when used for lambda , see also AnonLambda for f_kind below
f_name : ident;
f_kind : function_kind wrap;
TODO bracket
f_return_type : hint_type option;
f_ref : bool;
m_modifiers : modifier list;
only for AnonLambda ( could also abuse parameter ) , not for ShortLambda
f_attrs : attribute list;
f_body : stmt;
}
and function_kind =
| Function
| AnonLambda
| Method
and parameter =
| ParamClassic of parameter_classic
| ParamEllipsis of tok
and parameter_classic = {
p_type : hint_type option;
p_ref : tok option;
p_name : var;
p_default : expr option;
p_attrs : attribute list;
p_variadic : tok option;
}
and modifier = keyword_modifier wrap
and keyword_modifier =
| Public
| Private
| Protected
| Abstract
| Final
| Static
| Async
and attribute = expr
and constant_def = {
cst_tok : tok;
cst_name : ident;
cst_body : expr;
}
and enum_type = { e_base : hint_type; e_constraint : hint_type option }
and class_def = {
c_name : ident;
c_kind : class_kind wrap;
c_extends : class_name option;
c_implements : class_name list;
c_enum_type : enum_type option;
c_modifiers : modifier list;
c_attrs : attribute list;
c_constants : constant_def list;
c_variables : class_var list;
c_methods : method_def list;
c_braces : unit bracket;
}
and class_kind = Class | Interface | Trait | Enum
and xhp_field = class_var * bool
and class_var = {
cv_name : var;
cv_type : hint_type option;
cv_value : expr option;
cv_modifiers : modifier list;
}
and method_def = func_def
and type_def = { t_name : ident; t_kind : type_def_kind }
and type_def_kind = Alias of hint_type [@@deriving show { with_path = false }]
Program
type program = stmt list [@@deriving show { with_path = false }]
type partial = PartialIf of tok * expr [@@deriving show { with_path = false }]
type any =
| Program of program
| Stmt of stmt
| Expr2 of expr
| Param of parameter
| Partial of partial
[@@deriving show { with_path = false }]
let unwrap x = fst x
let wrap_fake s = (s, Parse_info.fake_info s)
TODO : replace builtin ( ) by IdSpecial like I do in AST_generic.ml
* builtin ( ) is used for :
* - ' eval ' , and implicitly generated eval / reflection like functions :
* " eval_var " ( e.g. for echo $ $ x , echo $ { " x" . " } ) ,
* - ' clone ' ,
* - ' exit ' , ' yield ' , ' yield_break ' TODO ' yield_from ? '
* - ' unset ' , ' isset ' , ' empty '
*
*
* - ' echo ' , ' print ' ,
* - ' @ ' , ' ` ' ,
* - ' include ' , ' require ' , ' include_once ' , ' require_once ' .
* - _ _ LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/
*
* See also data / php_stdlib / pfff.php which declares those builtins .
* See also tests / php / semantic/ for example of uses of those builtins .
*
* coupling : if modify the string , git grep it because it 's probably
* used in patterns too .
* builtin() is used for:
* - 'eval', and implicitly generated eval/reflection like functions:
* "eval_var" (e.g. for echo $$x, echo ${"x"."y"}),
* - 'clone',
* - 'exit', 'yield', 'yield_break' TODO 'yield_from?'
* - 'unset', 'isset', 'empty'
*
*
* - 'echo', 'print',
* - '@', '`',
* - 'include', 'require', 'include_once', 'require_once'.
* - __LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/
*
* See also data/php_stdlib/pfff.php which declares those builtins.
* See also tests/php/semantic/ for example of uses of those builtins.
*
* coupling: if modify the string, git grep it because it's probably
* used in patterns too.
*)
let builtin x = "__builtin__" ^ x
for ' self'/'parent ' , ' static ' , ' lambda ' , ' namespace ' , root namespace ' \ ' ,
* ' class ' as in C::class
* TODO : transform in IdSpecial !
* 'class' as in C::class
* TODO: transform in IdSpecial!
*)
let special x = "__special__" ^ x
AST helpers
let has_modifier cv = List.length cv.cv_modifiers > 0
let is_static modifiers = List.mem Static (List.map unwrap modifiers)
let is_private modifiers = List.mem Private (List.map unwrap modifiers)
let string_of_xhp_tag xs = ":" ^ Common.join ":" xs
let str_of_ident (s, _) = s
let tok_of_ident (_, x) = x
exception TodoNamespace of tok
let str_of_name = function
| [ id ] -> str_of_ident id
| [] -> raise Common.Impossible
| x :: _xs -> raise (TodoNamespace (tok_of_ident x))
let tok_of_name = function
| [ id ] -> tok_of_ident id
| [] -> raise Common.Impossible
pick first one
| x :: _xs -> tok_of_ident x
let remove_first_char s = String.sub s 1 (String.length s - 1)
let str_of_class_name x =
match x with
| Hint name -> str_of_name name
| _ -> raise Common.Impossible
let name_of_class_name x =
match x with
| Hint [ name ] -> name
| Hint [] -> raise Common.Impossible
| Hint name -> raise (TodoNamespace (tok_of_name name))
| _ -> raise Common.Impossible
|
7c767a0a935701e0abd2f928089d50a44b5ecaa8fcd293efc25f905db83be597 | juhp/fbrnch | Bump.hs | module Cmd.Bump
( bumpPkgs,
)
where
import Branches
import Common
import Common.System
import Git
import Koji
import Package
import System.IO.Extra
-- FIXME --force
FIXME --target
FIXME detect rpmautospec and add empty commit
bumpPkgs :: Bool -> Maybe CommitOpt -> (BranchesReq,[String]) -> IO ()
bumpPkgs local mopt =
withPackagesByBranches (boolHeader local) False (if local then cleanGit else cleanGitFetchActive)
AnyNumber bumpPkg
where
bumpPkg :: Package -> AnyBranch -> IO ()
bumpPkg pkg br = do
dead <- doesFileExist "dead.package"
if dead
then putStrLn "dead package"
else do
spec <- localBranchSpecFile pkg br
rbr <-
case br of
RelBranch rbr -> return rbr
OtherBranch _ -> systemBranch
newnvr <- pkgNameVerRel' rbr spec
moldnvr <-
if local
then do
withTempFile $ \tempfile -> do
git "show" ["origin:" ++ spec] >>= writeFile tempfile
pkgNameVerRel rbr tempfile
else
case br of
RelBranch rbr' ->
let tag = branchDestTag rbr' in
kojiLatestNVR tag $ unPackage pkg
FIXME fallback to local ?
_ -> return Nothing
if equivNVR newnvr (fromMaybe "" moldnvr)
then do
git_ "log" ["origin..HEAD", "--pretty=oneline"]
let clmsg =
case mopt of
Just (CommitMsg msg) -> msg
_ -> "rebuild"
FIXME check for rpmautospec first
cmd_ "rpmdev-bumpspec" ["-c", clmsg, spec]
let copts =
case mopt of
Nothing -> ["-m", "bump release"]
Just opt ->
case opt of
CommitMsg msg -> ["-m", msg]
FIXME reject amend if already pushed
CommitAmend -> ["--amend", "--no-edit"]
-- FIXME quiet commit?
git_ "commit" $ "-a" : copts
else putStrLn "already bumped"
| null | https://raw.githubusercontent.com/juhp/fbrnch/c724daa9e24a999328c3f7cad0213dafdf8183a8/src/Cmd/Bump.hs | haskell | FIXME --force
target
FIXME quiet commit? | module Cmd.Bump
( bumpPkgs,
)
where
import Branches
import Common
import Common.System
import Git
import Koji
import Package
import System.IO.Extra
FIXME detect rpmautospec and add empty commit
bumpPkgs :: Bool -> Maybe CommitOpt -> (BranchesReq,[String]) -> IO ()
bumpPkgs local mopt =
withPackagesByBranches (boolHeader local) False (if local then cleanGit else cleanGitFetchActive)
AnyNumber bumpPkg
where
bumpPkg :: Package -> AnyBranch -> IO ()
bumpPkg pkg br = do
dead <- doesFileExist "dead.package"
if dead
then putStrLn "dead package"
else do
spec <- localBranchSpecFile pkg br
rbr <-
case br of
RelBranch rbr -> return rbr
OtherBranch _ -> systemBranch
newnvr <- pkgNameVerRel' rbr spec
moldnvr <-
if local
then do
withTempFile $ \tempfile -> do
git "show" ["origin:" ++ spec] >>= writeFile tempfile
pkgNameVerRel rbr tempfile
else
case br of
RelBranch rbr' ->
let tag = branchDestTag rbr' in
kojiLatestNVR tag $ unPackage pkg
FIXME fallback to local ?
_ -> return Nothing
if equivNVR newnvr (fromMaybe "" moldnvr)
then do
git_ "log" ["origin..HEAD", "--pretty=oneline"]
let clmsg =
case mopt of
Just (CommitMsg msg) -> msg
_ -> "rebuild"
FIXME check for rpmautospec first
cmd_ "rpmdev-bumpspec" ["-c", clmsg, spec]
let copts =
case mopt of
Nothing -> ["-m", "bump release"]
Just opt ->
case opt of
CommitMsg msg -> ["-m", msg]
FIXME reject amend if already pushed
CommitAmend -> ["--amend", "--no-edit"]
git_ "commit" $ "-a" : copts
else putStrLn "already bumped"
|
5658eac1e5d0215e2d7da9bb890275d1db4273fca868b9fe64b331e348b7d35f | slindley/effect-handlers | ParameterisedState.hs | -- state using parameterised handlers
# LANGUAGE TypeFamilies , NoMonomorphismRestriction ,
FlexibleContexts , TypeOperators , ScopedTypeVariables #
FlexibleContexts, TypeOperators, ScopedTypeVariables #-}
import ParameterisedHandlers
data Get s = Get
instance Op (Get s) where
type Param (Get s) = ()
type Return (Get s) = s
get = applyOp Get
data Put s = Put
instance Op (Put s) where
type Param (Put s) = s
type Return (Put s) = ()
put = applyOp Put
-- handle state in the standard way
handleState :: Monad m => s -> Comp (Get s, (Put s, ())) a -> m a
handleState = handleStateWith Empty
-- The handleStateWith function generalises handleState to support
-- horizontal composition, either for throwing other effects or for
-- composing with compatible effects such as random choice.
handleStateWith :: (Get s `NotIn` e, Put s `NotIn` e, Monad m) =>
OpHandler e (m a) s -> s -> Comp (Get s, (Put s, e)) a -> m a
handleStateWith h s comp =
handle s comp
(Get |-> (\() k -> k s s) :<:
Put |-> (\s k -> k s ()) :<: h,
return)
data Mode = Handle | Forward
mcbrideState mode (s :: Int) comp =
handle mode comp
((Get |->
case mode of
Handle ->
(\() k -> mcbrideState Forward s (k Forward s))
Forward ->
(\p k -> App makeWitness Get p (k Forward))) :<:
(Put |->
case mode of
Handle ->
(\s k -> mcbrideState Forward s (k Forward ()))
Forward ->
(\p k -> App makeWitness Put p (k Forward))) :<: Empty,
return)
getInt :: In (Get Int) e => () -> Comp e Int
getInt = get
putInt :: In (Put Int) e => Int -> Comp e ()
putInt = put
count :: Comp (Get Int, (Put Int, ())) ()
count =
do {n <- get ();
if n == (0 :: Int) then return ()
else do {put (n-1); count}}
| null | https://raw.githubusercontent.com/slindley/effect-handlers/39d0d09582d198dd6210177a0896db55d92529f4/Examples/experimental/ParameterisedState.hs | haskell | state using parameterised handlers
handle state in the standard way
The handleStateWith function generalises handleState to support
horizontal composition, either for throwing other effects or for
composing with compatible effects such as random choice. |
# LANGUAGE TypeFamilies , NoMonomorphismRestriction ,
FlexibleContexts , TypeOperators , ScopedTypeVariables #
FlexibleContexts, TypeOperators, ScopedTypeVariables #-}
import ParameterisedHandlers
data Get s = Get
instance Op (Get s) where
type Param (Get s) = ()
type Return (Get s) = s
get = applyOp Get
data Put s = Put
instance Op (Put s) where
type Param (Put s) = s
type Return (Put s) = ()
put = applyOp Put
handleState :: Monad m => s -> Comp (Get s, (Put s, ())) a -> m a
handleState = handleStateWith Empty
handleStateWith :: (Get s `NotIn` e, Put s `NotIn` e, Monad m) =>
OpHandler e (m a) s -> s -> Comp (Get s, (Put s, e)) a -> m a
handleStateWith h s comp =
handle s comp
(Get |-> (\() k -> k s s) :<:
Put |-> (\s k -> k s ()) :<: h,
return)
data Mode = Handle | Forward
mcbrideState mode (s :: Int) comp =
handle mode comp
((Get |->
case mode of
Handle ->
(\() k -> mcbrideState Forward s (k Forward s))
Forward ->
(\p k -> App makeWitness Get p (k Forward))) :<:
(Put |->
case mode of
Handle ->
(\s k -> mcbrideState Forward s (k Forward ()))
Forward ->
(\p k -> App makeWitness Put p (k Forward))) :<: Empty,
return)
getInt :: In (Get Int) e => () -> Comp e Int
getInt = get
putInt :: In (Put Int) e => Int -> Comp e ()
putInt = put
count :: Comp (Get Int, (Put Int, ())) ()
count =
do {n <- get ();
if n == (0 :: Int) then return ()
else do {put (n-1); count}}
|
01b791c12e9e919d592ad75dfaec4416b712a56ae3db4aa0100697fc2c0e4826 | spechub/Hets | ParseAS.hs | # LANGUAGE TupleSections #
module OWL2.ParseAS where
import Prelude hiding (lookup)
import OWL2.AS as AS
import Common.AnnoParser (newlineOrEof, commentLine)
import Common.IRI hiding (parseIRI)
import Common.Parsec
import Common.Lexer (getNumber, value, nestCommentOut)
import qualified Common.GlobalAnnotations as GA (PrefixMap)
import Text.ParserCombinators.Parsec
import Data.Char
import Data.Map (union, fromList)
| @followedBy c p@ first parses @p@ then looks ahead for Does n't consume
any input on failure .
any input on failure. -}
followedBy :: CharParser st b -> CharParser st a -> CharParser st a
followedBy cond p = try $ do
r <- p
lookAhead cond
return r
-- | Performs an arbitrary lookahead over choices of parsers
arbitraryLookaheadOption :: [CharParser st a] -> CharParser st a
arbitraryLookaheadOption p = try $ lookAhead $ choice p
| @manyN n p@ parses @n@ or more occurences of @p@
manyN :: Int -> CharParser st a -> CharParser st [a]
manyN n p =
foldr (\ _ r -> p <:> r) (return []) [1 .. n] <++>
many p
| alias for @return Nothing@
never :: CharParser st (Maybe a)
never = return Nothing
-- # Basic constructs
-- | Parses a comment
comment :: CharParser st String
comment = try $ do
char '#'
manyTill anyChar newlineOrEof
-- | Skips trailing whitespaces and comments
skips :: CharParser st a -> CharParser st a
skips = (<< skips')
| and comments
skips' :: CharParser st ()
skips' = skipMany (forget space <|> forget comment <|> forget commentLine <|> forget nestCommentOut)
-- | Parses plain string with skip
keyword :: String -> CharParser st ()
keyword s = try $ skips (string s >> notFollowedBy alphaNum)
-- | Parses a full iri
fullIri :: CharParser st IRI
fullIri = angles iriParser
ncNameStart :: Char -> Bool
ncNameStart c = isAlpha c || c == '_'
-- | rfc3987 plus '+' from scheme (scheme does not allow the dots)
ncNameChar :: Char -> Bool
ncNameChar c = isAlphaNum c || elem c ".+-_\183"
| Parses a prefix name ( PNAME_NS of )
prefix :: CharParser st String
prefix = skips $ option "" (satisfy ncNameStart <:> many (satisfy ncNameChar))
<< char ':'
-- | Parses an abbreviated or full iri
parseIRI :: GA.PrefixMap -> CharParser st IRI
parseIRI pm = skips (expandIRI pm <$> (fullIri <|> compoundIriCurie) <?> "IRI")
| @parseEnclosedWithKeyword k p@ parses the keyword @k@ followed @p@
enclosed in parentheses . Skips spaces and comments before and after @p@.
enclosed in parentheses. Skips spaces and comments before and after @p@. -}
parseEnclosedWithKeyword :: String -> CharParser st a -> CharParser st a
parseEnclosedWithKeyword s p = do
keyword s
skips $ char '('
r <- skips p
skips $ char ')'
return r
parsePrefixDeclaration :: CharParser st (String, IRI)
parsePrefixDeclaration = parseEnclosedWithKeyword "Prefix" $ do
p <- prefix
skips $ char '='
iri <- fullIri
return $ (p, iri)
parseDirectlyImportsDocument :: GA.PrefixMap -> CharParser st IRI
parseDirectlyImportsDocument pm =
parseEnclosedWithKeyword "Import" (parseIRI pm) <?> "Import"
-- # Entities, Literals, and Individuals
-- ## Entities
parseEntity' :: GA.PrefixMap -> EntityType -> String -> CharParser st Entity
parseEntity' pm t k = parseEnclosedWithKeyword k $ do
iri <- parseIRI pm
return $ mkEntity t iri
parseEntity :: GA.PrefixMap -> CharParser st Entity
parseEntity pm =
parseEntity' pm Class "Class" <|>
parseEntity' pm Datatype "Datatype" <|>
parseEntity' pm ObjectProperty "ObjectProperty" <|>
parseEntity' pm DataProperty "DataProperty" <|>
parseEntity' pm AnnotationProperty "AnnotationProperty" <|>
parseEntity' pm NamedIndividual "NamedIndividual" <?>
"Entity"
# # Literals
charOrEscaped :: CharParser st Char
charOrEscaped = (try $ string "\\\"" >> return '"')
<|> (try $ string "\\\\" >> return '\\') <|> anyChar
parseTypeSignature :: GA.PrefixMap -> CharParser st IRI
parseTypeSignature pm = do
string "^^"
parseIRI pm
parseLanguageTag :: CharParser st String
parseLanguageTag = do
char '@'
many1 (letter <|> char '-')
parseLiteral :: GA.PrefixMap -> CharParser st Literal
parseLiteral pm = do
char '"'
s <- manyTill charOrEscaped (try $ char '"')
typ <- (Typed <$> parseTypeSignature pm) <|>
(Untyped <$> optionMaybe parseLanguageTag)
return $ Literal s typ
-- ## Individuals
parseAnonymousIndividual :: GA.PrefixMap -> CharParser st AnonymousIndividual
parseAnonymousIndividual pm = skips $ expandIRI pm <$> iriCurie
parseIndividual :: GA.PrefixMap -> CharParser st Individual
parseIndividual pm = parseIRI pm
<|> parseAnonymousIndividual pm
<?> "Individual"
-- # Annotations
parseAnnotationValue :: GA.PrefixMap -> CharParser st AnnotationValue
parseAnnotationValue pm =
(parseLiteral pm >>= return . AnnValLit) <|>
(parseIRI pm >>= return . AnnValue) <|>
(parseAnonymousIndividual pm >>= return . AnnAnInd) <?>
"AnnotationValue"
parseAnnotationSubject :: GA.PrefixMap -> CharParser st AnnotationSubject
parseAnnotationSubject pm =
(AnnSubAnInd <$> parseAnonymousIndividual pm) <|>
(AnnSubIri <$> parseIRI pm)
parseAnnotations :: GA.PrefixMap -> CharParser st [Annotation]
parseAnnotations pm = many $ parseAnnotation pm
parseAnnotation :: GA.PrefixMap -> CharParser st Annotation
parseAnnotation pm = (flip (<?>)) "Annotation" $
parseEnclosedWithKeyword "Annotation" $ do
an <- (many (parseAnnotation pm))
property <- (parseIRI pm)
v <- parseAnnotationValue pm
return $ Annotation an property v
-- ## Data Range
parseDataJunction' ::
GA.PrefixMap -> String -> JunctionType -> CharParser st DataRange
parseDataJunction' pm k t = parseEnclosedWithKeyword k $
DataJunction t <$> manyN 2 (parseDataRange pm)
parseDataJunction :: GA.PrefixMap -> CharParser st DataRange
parseDataJunction pm =
parseDataJunction' pm "DataUnionOf" UnionOf <|>
parseDataJunction' pm "DataIntersectionOf" IntersectionOf
parseDataComplementOf :: GA.PrefixMap -> CharParser st DataRange
parseDataComplementOf pm = parseEnclosedWithKeyword "DataComplementOf" $
DataComplementOf <$> parseDataRange pm
parseDataOneOf :: GA.PrefixMap -> CharParser st DataRange
parseDataOneOf pm = parseEnclosedWithKeyword "DataOneOf" $
DataOneOf <$> many1 (parseLiteral pm)
parseDatatypeResComponent ::
GA.PrefixMap -> CharParser st (ConstrainingFacet, RestrictionValue)
parseDatatypeResComponent pm =
(,) <$>
(parseIRI pm) <*>
(parseLiteral pm)
parseDatatypeRestriction :: GA.PrefixMap -> CharParser st DataRange
parseDatatypeRestriction pm =
parseEnclosedWithKeyword "DatatypeRestriction" $ do
dataType <- (parseIRI pm)
restrictions <- many1 (parseDatatypeResComponent pm)
return $ DataType dataType restrictions
parseDataRange :: GA.PrefixMap -> CharParser st DataRange
parseDataRange pm =
(parseDataJunction pm) <|>
(parseDataComplementOf pm) <|>
(parseDataOneOf pm) <|>
(parseDatatypeRestriction pm) <|>
(DataType <$> (parseIRI pm) <*> return []) <?>
"DataRange"
-- # Axioms
# # Declaration
parseDeclaration :: GA.PrefixMap -> CharParser st Axiom
parseDeclaration pm = parseEnclosedWithKeyword "Declaration" $ do
annotations <- many (parseAnnotation pm)
entity <- (parseEntity pm)
return $ Declaration annotations entity
# # ClassExpressions
parseObjectIntersectionOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectIntersectionOf pm = parseEnclosedWithKeyword "ObjectIntersectionOf" $
ObjectJunction IntersectionOf <$> manyN 2 (parseClassExpression pm)
parseObjectUnionOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectUnionOf pm = parseEnclosedWithKeyword "ObjectUnionOf" $
ObjectJunction UnionOf <$> manyN 2 (parseClassExpression pm)
parseObjectComplementOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectComplementOf pm = parseEnclosedWithKeyword "ObjectComplementOf" $
ObjectComplementOf <$> (parseClassExpression pm)
parseObjectOneOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectOneOf pm = parseEnclosedWithKeyword "ObjectOneOf" $
ObjectOneOf <$> many1 (parseIndividual pm)
parseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseObjectProperty pm = ObjectProp <$> (parseIRI pm)
parseInverseObjectProperty ::
GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseInverseObjectProperty pm = parseEnclosedWithKeyword "ObjectInverseOf" $
ObjectInverseOf <$> (parseObjectProperty pm)
parseObjectPropertyExpression ::
GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseObjectPropertyExpression pm =
(parseInverseObjectProperty pm) <|>
(parseObjectProperty pm) <?>
"ObjectPropertyExpression"
parseObjectSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectSomeValuesFrom pm =
parseEnclosedWithKeyword "ObjectSomeValuesFrom" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
classExpr <- (parseClassExpression pm)
return $ ObjectValuesFrom SomeValuesFrom objectPropertyExpr classExpr
parseObjectAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectAllValuesFrom pm =
parseEnclosedWithKeyword "ObjectAllValuesFrom" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
classExpr <- (parseClassExpression pm)
return $ ObjectValuesFrom AllValuesFrom objectPropertyExpr classExpr
parseObjectHasValue :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectHasValue pm = parseEnclosedWithKeyword "ObjectHasValue" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
val <- (parseIndividual pm)
return $ ObjectHasValue objectPropertyExpr val
parseObjectHasSelf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectHasSelf pm = parseEnclosedWithKeyword "ObjectHasSelf" $
ObjectHasSelf <$> (parseObjectPropertyExpression pm)
parseCardinality' :: CardinalityType
-> String
-> CharParser st a
-> CharParser st b
-> CharParser st (Cardinality a b)
parseCardinality' c k pa pb = parseEnclosedWithKeyword k $ do
n <- skips $ value 10 <$> getNumber
objectPropertyExpr <- pa
classExpr <- optionMaybe pb
return $ Cardinality c n objectPropertyExpr classExpr
parseObjectCardinality :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectCardinality pm = ObjectCardinality <$> (
cardinality "ObjectMinCardinality" MinCardinality <|>
cardinality "ObjectMaxCardinality" MaxCardinality <|>
cardinality "ObjectExactCardinality" ExactCardinality
)
where cardinality s t = parseCardinality' t s a b
a = (parseObjectPropertyExpression pm)
b = (parseClassExpression pm)
parseDataCardinality :: GA.PrefixMap -> CharParser st ClassExpression
parseDataCardinality pm = DataCardinality <$> (
cardinality "DataMinCardinality" MinCardinality <|>
cardinality "DataMaxCardinality" MaxCardinality <|>
cardinality "DataExactCardinality" ExactCardinality
)
where cardinality s t = parseCardinality' t s a b
a = (parseIRI pm)
b = (parseDataRange pm)
parseDataSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseDataSomeValuesFrom pm = parseEnclosedWithKeyword "DataSomeValuesFrom" $ do
exprs <- many1 (followedBy
((parseDataRange pm))
((parseIRI pm)))
range <- (parseDataRange pm)
return $ DataValuesFrom SomeValuesFrom exprs range
parseDataAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseDataAllValuesFrom pm = parseEnclosedWithKeyword "DataAllValuesFrom" $ do
exprs <- many1 (followedBy (parseDataRange pm) ((parseIRI pm)))
range <- (parseDataRange pm)
return $ DataValuesFrom AllValuesFrom exprs range
parseDataHasValue :: GA.PrefixMap -> CharParser st ClassExpression
parseDataHasValue pm = parseEnclosedWithKeyword "DataHasValue" $
DataHasValue <$> (parseIRI pm) <*> (parseLiteral pm)
parseClassExpression :: GA.PrefixMap -> CharParser st ClassExpression
parseClassExpression pm =
(parseObjectIntersectionOf pm) <|>
(parseObjectUnionOf pm) <|>
(parseObjectComplementOf pm) <|>
(parseObjectOneOf pm) <|>
(parseObjectCardinality pm) <|>
(parseObjectSomeValuesFrom pm) <|>
(parseObjectAllValuesFrom pm) <|>
(parseObjectHasValue pm) <|>
(parseObjectHasSelf pm) <|>
(parseDataSomeValuesFrom pm) <|>
(parseDataAllValuesFrom pm) <|>
(parseDataHasValue pm) <|>
(parseDataCardinality pm) <|>
(Expression <$> (parseIRI pm)) <?>
"ClassExpression"
-- ## Class Axioms
parseSubClassOf :: GA.PrefixMap -> CharParser st ClassAxiom
parseSubClassOf pm = parseEnclosedWithKeyword "SubClassOf" $ do
annotations <- many (parseAnnotation pm)
subClassExpression <- (parseClassExpression pm)
superClassExpression <- (parseClassExpression pm)
return $ SubClassOf annotations subClassExpression superClassExpression
parseEquivalentClasses :: GA.PrefixMap -> CharParser st ClassAxiom
parseEquivalentClasses pm = parseEnclosedWithKeyword "EquivalentClasses" $
EquivalentClasses <$>
(parseAnnotations pm) <*>
manyN 2 (parseClassExpression pm)
parseDisjointClasses :: GA.PrefixMap -> CharParser st ClassAxiom
parseDisjointClasses pm = parseEnclosedWithKeyword "DisjointClasses" $
DisjointClasses <$>
(parseAnnotations pm) <*>
manyN 2 (parseClassExpression pm)
parseDisjointUnion :: GA.PrefixMap -> CharParser st ClassAxiom
parseDisjointUnion pm = parseEnclosedWithKeyword "DisjointUnion" $
DisjointUnion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
manyN 2 (parseClassExpression pm)
parseClassAxiom :: GA.PrefixMap -> CharParser st Axiom
parseClassAxiom pm = ClassAxiom <$> (
(parseSubClassOf pm) <|>
(parseEquivalentClasses pm) <|>
(parseDisjointClasses pm) <|>
(parseDisjointUnion pm) <?> "ClassAxiom"
)
# # Object Property Axioms
parseEquivalentObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseEquivalentObjectProperties pm =
parseEnclosedWithKeyword "EquivalentObjectProperties" $
EquivalentObjectProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseObjectPropertyExpression pm)
parseDisjointObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseDisjointObjectProperties pm =
parseEnclosedWithKeyword "DisjointObjectProperties" $
DisjointObjectProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseObjectPropertyExpression pm)
parseObjectPropertyDomain :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseObjectPropertyDomain pm =
parseEnclosedWithKeyword "ObjectPropertyDomain" $
ObjectPropertyDomain <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseClassExpression pm)
parseObjectPropertyRange ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseObjectPropertyRange pm =
parseEnclosedWithKeyword "ObjectPropertyRange" $
ObjectPropertyRange <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseClassExpression pm)
parseInverseObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseInverseObjectProperties pm =
parseEnclosedWithKeyword "InverseObjectProperties" $
InverseObjectProperties <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseObjectPropertyExpression pm)
# # # SubObjectPropertyOf
parseObjectPropertyExpressionChain ::
GA.PrefixMap -> CharParser st PropertyExpressionChain
parseObjectPropertyExpressionChain pm =
parseEnclosedWithKeyword "ObjectPropertyChain" $
many1 (parseObjectPropertyExpression pm)
parseSubObjectPropertyExpression ::
GA.PrefixMap -> CharParser st SubObjectPropertyExpression
parseSubObjectPropertyExpression pm =
SubObjPropExpr_exprchain <$> (parseObjectPropertyExpressionChain pm) <|>
SubObjPropExpr_obj <$> (parseObjectPropertyExpression pm) <?>
"SubObjectPropertyExpression"
parseSubObjectPropertyOf ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseSubObjectPropertyOf pm = parseEnclosedWithKeyword "SubObjectPropertyOf" $
SubObjectPropertyOf <$>
(parseAnnotations pm) <*>
(parseSubObjectPropertyExpression pm) <*>
(parseObjectPropertyExpression pm)
-- | Helper function for *C*ommon*O*bject*P*roperty*A*xioms
parseCOPA :: GA.PrefixMap -> (
AxiomAnnotations -> ObjectPropertyExpression -> ObjectPropertyAxiom
) -> String -> CharParser st ObjectPropertyAxiom
parseCOPA pm c s = parseEnclosedWithKeyword s $
c <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm)
parseObjectPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom
parseObjectPropertyAxiom pm = ObjectPropertyAxiom <$> (
(parseSubObjectPropertyOf pm) <|>
(parseEquivalentObjectProperties pm) <|>
(parseDisjointObjectProperties pm) <|>
(parseObjectPropertyDomain pm) <|>
(parseObjectPropertyRange pm) <|>
(parseInverseObjectProperties pm) <|>
parseCOPA pm FunctionalObjectProperty "FunctionalObjectProperty" <|>
parseCOPA pm InverseFunctionalObjectProperty
"InverseFunctionalObjectProperty" <|>
parseCOPA pm ReflexiveObjectProperty "ReflexiveObjectProperty" <|>
parseCOPA pm IrreflexiveObjectProperty "IrreflexiveObjectProperty" <|>
parseCOPA pm SymmetricObjectProperty "SymmetricObjectProperty" <|>
parseCOPA pm AsymmetricObjectProperty "AsymmetricObjectProperty" <|>
parseCOPA pm TransitiveObjectProperty "TransitiveObjectProperty" <?>
"ObjectPropertyAxiom"
)
# # DataPropertyAxioms
parseSubDataPropertyOf :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseSubDataPropertyOf pm = parseEnclosedWithKeyword "SubDataPropertyOf" $
SubDataPropertyOf <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
(parseIRI pm)
parseEquivalentDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseEquivalentDataProperties pm =
parseEnclosedWithKeyword "EquivalentDataProperties" $
EquivalentDataProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseIRI pm)
parseDisjointDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDisjointDataProperties pm =
parseEnclosedWithKeyword "DisjointDataProperties" $
DisjointDataProperties <$>
parseAnnotations pm <*>
manyN 2 (parseIRI pm)
parseDataPropertyDomain :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDataPropertyDomain pm =
parseEnclosedWithKeyword "DataPropertyDomain" $
DataPropertyDomain <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseClassExpression pm
parseDataPropertyRange :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDataPropertyRange pm =
parseEnclosedWithKeyword "DataPropertyRange" $
DataPropertyRange <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseDataRange pm
parseFunctionalDataProperty :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseFunctionalDataProperty pm =
parseEnclosedWithKeyword "FunctionalDataProperty" $
FunctionalDataProperty <$>
parseAnnotations pm <*>
(parseIRI pm)
parseDataPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom
parseDataPropertyAxiom pm = DataPropertyAxiom <$> (
parseSubDataPropertyOf pm <|>
parseEquivalentDataProperties pm <|>
parseDisjointDataProperties pm <|>
parseDataPropertyDomain pm <|>
parseDataPropertyRange pm <|>
parseFunctionalDataProperty pm <?>
"DataPropertyAxiom"
)
-- ## Data Type Definition
parseDataTypeDefinition :: GA.PrefixMap -> CharParser st Axiom
parseDataTypeDefinition pm = parseEnclosedWithKeyword "DatatypeDefinition" $
DatatypeDefinition <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseDataRange pm
skipChar :: Char -> CharParser st ()
skipChar = forget . skips . char
parensP :: CharParser st a -> CharParser st a
parensP = between (skipChar '(') (skipChar ')')
# # HasKey
parseHasKey :: GA.PrefixMap -> CharParser st Axiom
parseHasKey pm = parseEnclosedWithKeyword "HasKey" $ do
annotations <- (parseAnnotations pm)
classExpr <- (parseClassExpression pm)
objectPropertyExprs <- parensP $ many (parseObjectPropertyExpression pm)
dataPropertyExprs <- parensP $ many (parseIRI pm)
return $ HasKey annotations classExpr objectPropertyExprs dataPropertyExprs
# # Assertion
parseSameIndividual :: GA.PrefixMap -> CharParser st Assertion
parseSameIndividual pm = parseEnclosedWithKeyword "SameIndividual" $
SameIndividual <$>
(parseAnnotations pm) <*>
manyN 2 (parseIndividual pm)
parseDifferentIndividuals :: GA.PrefixMap -> CharParser st Assertion
parseDifferentIndividuals pm = parseEnclosedWithKeyword "DifferentIndividuals" $
DifferentIndividuals <$>
(parseAnnotations pm) <*>
manyN 2 (parseIndividual pm)
parseClassAssertion :: GA.PrefixMap -> CharParser st Assertion
parseClassAssertion pm = parseEnclosedWithKeyword "ClassAssertion" $
ClassAssertion <$>
(parseAnnotations pm) <*>
(parseClassExpression pm) <*>
(parseIndividual pm)
parseObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseObjectPropertyAssertion pm =
parseEnclosedWithKeyword "ObjectPropertyAssertion" $
ObjectPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseIndividual pm) <*>
(parseIndividual pm)
parseNegativeObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseNegativeObjectPropertyAssertion pm =
parseEnclosedWithKeyword "NegativeObjectPropertyAssertion" $
NegativeObjectPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseIndividual pm) <*>
(parseIndividual pm)
parseDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseDataPropertyAssertion pm =
parseEnclosedWithKeyword "DataPropertyAssertion" $
DataPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIndividual pm) <*>
(parseLiteral pm)
parseNegativeDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseNegativeDataPropertyAssertion pm =
parseEnclosedWithKeyword "NegativeDataPropertyAssertion" $
NegativeDataPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIndividual pm) <*>
(parseLiteral pm)
parseAssertion :: GA.PrefixMap -> CharParser st Axiom
parseAssertion pm = Assertion <$> (
(parseSameIndividual pm) <|>
(parseDifferentIndividuals pm) <|>
(parseClassAssertion pm) <|>
(parseObjectPropertyAssertion pm) <|>
(parseNegativeObjectPropertyAssertion pm) <|>
(parseDataPropertyAssertion pm) <|>
(parseNegativeDataPropertyAssertion pm)
)
parseAnnotationAssertion :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationAssertion pm = parseEnclosedWithKeyword "AnnotationAssertion" $
AnnotationAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseAnnotationSubject pm) <*>
(parseAnnotationValue pm)
parseSubAnnotationPropertyOf :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseSubAnnotationPropertyOf pm =
parseEnclosedWithKeyword "SubAnnotationPropertyOf" $
SubAnnotationPropertyOf <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationPropertyDomain :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationPropertyDomain pm =
parseEnclosedWithKeyword "AnnotationPropertyDomain" $
AnnotationPropertyDomain <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationPropertyRange :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationPropertyRange pm =
parseEnclosedWithKeyword "AnnotationPropertyRange" $
AnnotationPropertyRange <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationAxiom :: GA.PrefixMap -> CharParser st Axiom
parseAnnotationAxiom pm = AnnotationAxiom <$> (
(parseAnnotationAssertion pm) <|>
(parseSubAnnotationPropertyOf pm) <|>
(parseAnnotationPropertyDomain pm) <|>
(parseAnnotationPropertyRange pm)
)
parseIndividualArg :: GA.PrefixMap -> CharParser st IndividualArg
parseIndividualArg pm =
-- Apparently the keyword is "Variable" instead of "IndividualVariable"
IVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|>
IArg <$> parseAnonymousIndividual pm
parseDataArg :: GA.PrefixMap -> CharParser st DataArg
parseDataArg pm =
Apparently the keyword is " Literal " instead of " LiteralVariable "
DVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|>
DArg <$> parseLiteral pm
parseClassAtom :: GA.PrefixMap -> CharParser st Atom
parseClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $
ClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm
parseDataRangeAtom :: GA.PrefixMap -> CharParser st Atom
parseDataRangeAtom pm = parseEnclosedWithKeyword "DataRangeAtom" $
DataRangeAtom <$> parseDataRange pm <*> parseDataArg pm
parseObjectPropertyAtom :: GA.PrefixMap -> CharParser st Atom
parseObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $
ObjectPropertyAtom <$>
parseObjectPropertyExpression pm <*>
parseIndividualArg pm <*>
parseIndividualArg pm
parseDataPropertyAtom :: GA.PrefixMap -> CharParser st Atom
parseDataPropertyAtom pm = parseEnclosedWithKeyword "DataPropertyAtom" $
DataPropertyAtom <$>
parseIRI pm <*>
parseIndividualArg pm <*>
parseDataArg pm
parseBuiltInAtom :: GA.PrefixMap -> CharParser st Atom
parseBuiltInAtom pm = parseEnclosedWithKeyword "BuiltInAtom" $
BuiltInAtom <$> parseIRI pm <*> many1 (parseDataArg pm)
parseSameIndividualAtom :: GA.PrefixMap -> CharParser st Atom
parseSameIndividualAtom pm = parseEnclosedWithKeyword "SameIndividualAtom" $
SameIndividualAtom <$> parseIndividualArg pm <*> parseIndividualArg pm
parseDifferentIndividualsAtom :: GA.PrefixMap -> CharParser st Atom
parseDifferentIndividualsAtom pm = parseEnclosedWithKeyword "DifferentIndividualsAtom" $
DifferentIndividualsAtom <$> parseIndividualArg pm <*> parseIndividualArg pm
parseAtom :: GA.PrefixMap -> CharParser st Atom
parseAtom pm =
parseClassAtom pm <|>
parseDataRangeAtom pm <|>
parseObjectPropertyAtom pm <|>
parseDataPropertyAtom pm <|>
parseBuiltInAtom pm<|>
parseSameIndividualAtom pm <|>
parseDifferentIndividualsAtom pm <?>
"Atom"
parseBody :: GA.PrefixMap -> CharParser st Body
parseBody pm = do
parseEnclosedWithKeyword "Body" $ many (parseAtom pm)
parseHead :: GA.PrefixMap -> CharParser st Body
parseHead pm = do
parseEnclosedWithKeyword "Head" $ many (parseAtom pm)
parseDLSafeRule :: GA.PrefixMap -> CharParser st Rule
parseDLSafeRule pm = parseEnclosedWithKeyword "DLSafeRule" $
DLSafeRule <$>
parseAnnotations pm <*>
parseBody pm <*>
parseHead pm
parseDGClassAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $
DGClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm
parseDGObjectPropertyAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $
DGObjectPropertyAtom <$>
parseObjectPropertyExpression pm <*>
parseIndividualArg pm <*>
parseIndividualArg pm
parseDGAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGAtom pm = parseDGClassAtom pm <|> parseDGObjectPropertyAtom pm
parseDGBody :: GA.PrefixMap -> CharParser st DGBody
parseDGBody pm = parseEnclosedWithKeyword "Body" $ many (parseDGAtom pm)
parseDGHead :: GA.PrefixMap -> CharParser st DGHead
parseDGHead pm = parseEnclosedWithKeyword "Head" $ many (parseDGAtom pm)
parseDGRule :: GA.PrefixMap -> CharParser st Rule
parseDGRule pm = parseEnclosedWithKeyword "DescriptionGraphRule" $
DGRule <$> parseAnnotations pm <*> parseDGBody pm <*> parseDGHead pm
parseRule :: GA.PrefixMap -> CharParser st Axiom
parseRule pm = Rule <$> (parseDLSafeRule pm <|> parseDGRule pm)
parseDGNodeAssertion :: GA.PrefixMap -> CharParser st DGNodeAssertion
parseDGNodeAssertion pm = parseEnclosedWithKeyword "NodeAssertion" $
DGNodeAssertion <$> parseIRI pm <*> parseIRI pm
parseDGNodes :: GA.PrefixMap -> CharParser st DGNodes
parseDGNodes pm = parseEnclosedWithKeyword "Nodes" $
many1 (parseDGNodeAssertion pm)
parseDGEdgeAssertion :: GA.PrefixMap -> CharParser st DGEdgeAssertion
parseDGEdgeAssertion pm = parseEnclosedWithKeyword "EdgeAssertion" $
DGEdgeAssertion <$> parseIRI pm <*> parseIRI pm <*> parseIRI pm
parseDGEdes :: GA.PrefixMap -> CharParser st DGEdges
parseDGEdes pm = parseEnclosedWithKeyword "Edges" $
many1 (parseDGEdgeAssertion pm)
parseMainClasses :: GA.PrefixMap -> CharParser st MainClasses
parseMainClasses pm = parseEnclosedWithKeyword "MainClasses" $
many1 (parseIRI pm)
parseDGAxiom :: GA.PrefixMap -> CharParser st Axiom
parseDGAxiom pm = parseEnclosedWithKeyword "DescriptionGraph" $
DGAxiom <$>
parseAnnotations pm <*>
parseIRI pm <*>
parseDGNodes pm <*>
parseDGEdes pm <*>
parseMainClasses pm
parseAxiom :: GA.PrefixMap -> CharParser st Axiom
parseAxiom pm =
(parseDeclaration pm) <|>
(parseClassAxiom pm) <|>
(parseObjectPropertyAxiom pm) <|>
(parseDataPropertyAxiom pm) <|>
(parseDataTypeDefinition pm) <|>
(parseHasKey pm) <|>
(parseAssertion pm) <|>
(parseAnnotationAxiom pm) <|>
(parseRule pm) <|>
(parseDGAxiom pm) <?>
"Axiom"
parseOntology :: GA.PrefixMap -> CharParser st Ontology
parseOntology pm =
let parseIriIfNotImportOrAxiomOrAnnotation =
(arbitraryLookaheadOption [
forget (parseDirectlyImportsDocument pm),
forget (parseAnnotation pm),
forget (parseAxiom pm)
] >> never) <|>
optionMaybe (parseIRI pm)
in
parseEnclosedWithKeyword "Ontology" $ do
ontologyIri <- parseIriIfNotImportOrAxiomOrAnnotation
versionIri <- parseIriIfNotImportOrAxiomOrAnnotation
imports <- many (parseDirectlyImportsDocument pm)
annotations <- many (parseAnnotation pm)
axs <- many (parseAxiom pm)
return $ Ontology ontologyIri versionIri (imports) annotations axs
| Parses an OntologyDocument from Owl2 Functional Syntax
parseOntologyDocument :: GA.PrefixMap -> CharParser st OntologyDocument
parseOntologyDocument gapm = do
skips'
prefixes <- many parsePrefixDeclaration
let pm = union gapm (fromList prefixes)
onto <- parseOntology pm
return $ OntologyDocument (OntologyMetadata AS) pm onto
| null | https://raw.githubusercontent.com/spechub/Hets/c27bd92f22f3b92e792eff0adaa3baec9d61c2b1/OWL2/ParseAS.hs | haskell | | Performs an arbitrary lookahead over choices of parsers
# Basic constructs
| Parses a comment
| Skips trailing whitespaces and comments
| Parses plain string with skip
| Parses a full iri
| rfc3987 plus '+' from scheme (scheme does not allow the dots)
| Parses an abbreviated or full iri
# Entities, Literals, and Individuals
## Entities
## Individuals
# Annotations
## Data Range
# Axioms
## Class Axioms
| Helper function for *C*ommon*O*bject*P*roperty*A*xioms
## Data Type Definition
Apparently the keyword is "Variable" instead of "IndividualVariable" | # LANGUAGE TupleSections #
module OWL2.ParseAS where
import Prelude hiding (lookup)
import OWL2.AS as AS
import Common.AnnoParser (newlineOrEof, commentLine)
import Common.IRI hiding (parseIRI)
import Common.Parsec
import Common.Lexer (getNumber, value, nestCommentOut)
import qualified Common.GlobalAnnotations as GA (PrefixMap)
import Text.ParserCombinators.Parsec
import Data.Char
import Data.Map (union, fromList)
| @followedBy c p@ first parses @p@ then looks ahead for Does n't consume
any input on failure .
any input on failure. -}
followedBy :: CharParser st b -> CharParser st a -> CharParser st a
followedBy cond p = try $ do
r <- p
lookAhead cond
return r
arbitraryLookaheadOption :: [CharParser st a] -> CharParser st a
arbitraryLookaheadOption p = try $ lookAhead $ choice p
| @manyN n p@ parses @n@ or more occurences of @p@
manyN :: Int -> CharParser st a -> CharParser st [a]
manyN n p =
foldr (\ _ r -> p <:> r) (return []) [1 .. n] <++>
many p
| alias for @return Nothing@
never :: CharParser st (Maybe a)
never = return Nothing
comment :: CharParser st String
comment = try $ do
char '#'
manyTill anyChar newlineOrEof
skips :: CharParser st a -> CharParser st a
skips = (<< skips')
| and comments
skips' :: CharParser st ()
skips' = skipMany (forget space <|> forget comment <|> forget commentLine <|> forget nestCommentOut)
keyword :: String -> CharParser st ()
keyword s = try $ skips (string s >> notFollowedBy alphaNum)
fullIri :: CharParser st IRI
fullIri = angles iriParser
ncNameStart :: Char -> Bool
ncNameStart c = isAlpha c || c == '_'
ncNameChar :: Char -> Bool
ncNameChar c = isAlphaNum c || elem c ".+-_\183"
| Parses a prefix name ( PNAME_NS of )
prefix :: CharParser st String
prefix = skips $ option "" (satisfy ncNameStart <:> many (satisfy ncNameChar))
<< char ':'
parseIRI :: GA.PrefixMap -> CharParser st IRI
parseIRI pm = skips (expandIRI pm <$> (fullIri <|> compoundIriCurie) <?> "IRI")
| @parseEnclosedWithKeyword k p@ parses the keyword @k@ followed @p@
enclosed in parentheses . Skips spaces and comments before and after @p@.
enclosed in parentheses. Skips spaces and comments before and after @p@. -}
parseEnclosedWithKeyword :: String -> CharParser st a -> CharParser st a
parseEnclosedWithKeyword s p = do
keyword s
skips $ char '('
r <- skips p
skips $ char ')'
return r
parsePrefixDeclaration :: CharParser st (String, IRI)
parsePrefixDeclaration = parseEnclosedWithKeyword "Prefix" $ do
p <- prefix
skips $ char '='
iri <- fullIri
return $ (p, iri)
parseDirectlyImportsDocument :: GA.PrefixMap -> CharParser st IRI
parseDirectlyImportsDocument pm =
parseEnclosedWithKeyword "Import" (parseIRI pm) <?> "Import"
parseEntity' :: GA.PrefixMap -> EntityType -> String -> CharParser st Entity
parseEntity' pm t k = parseEnclosedWithKeyword k $ do
iri <- parseIRI pm
return $ mkEntity t iri
parseEntity :: GA.PrefixMap -> CharParser st Entity
parseEntity pm =
parseEntity' pm Class "Class" <|>
parseEntity' pm Datatype "Datatype" <|>
parseEntity' pm ObjectProperty "ObjectProperty" <|>
parseEntity' pm DataProperty "DataProperty" <|>
parseEntity' pm AnnotationProperty "AnnotationProperty" <|>
parseEntity' pm NamedIndividual "NamedIndividual" <?>
"Entity"
# # Literals
charOrEscaped :: CharParser st Char
charOrEscaped = (try $ string "\\\"" >> return '"')
<|> (try $ string "\\\\" >> return '\\') <|> anyChar
parseTypeSignature :: GA.PrefixMap -> CharParser st IRI
parseTypeSignature pm = do
string "^^"
parseIRI pm
parseLanguageTag :: CharParser st String
parseLanguageTag = do
char '@'
many1 (letter <|> char '-')
parseLiteral :: GA.PrefixMap -> CharParser st Literal
parseLiteral pm = do
char '"'
s <- manyTill charOrEscaped (try $ char '"')
typ <- (Typed <$> parseTypeSignature pm) <|>
(Untyped <$> optionMaybe parseLanguageTag)
return $ Literal s typ
parseAnonymousIndividual :: GA.PrefixMap -> CharParser st AnonymousIndividual
parseAnonymousIndividual pm = skips $ expandIRI pm <$> iriCurie
parseIndividual :: GA.PrefixMap -> CharParser st Individual
parseIndividual pm = parseIRI pm
<|> parseAnonymousIndividual pm
<?> "Individual"
parseAnnotationValue :: GA.PrefixMap -> CharParser st AnnotationValue
parseAnnotationValue pm =
(parseLiteral pm >>= return . AnnValLit) <|>
(parseIRI pm >>= return . AnnValue) <|>
(parseAnonymousIndividual pm >>= return . AnnAnInd) <?>
"AnnotationValue"
parseAnnotationSubject :: GA.PrefixMap -> CharParser st AnnotationSubject
parseAnnotationSubject pm =
(AnnSubAnInd <$> parseAnonymousIndividual pm) <|>
(AnnSubIri <$> parseIRI pm)
parseAnnotations :: GA.PrefixMap -> CharParser st [Annotation]
parseAnnotations pm = many $ parseAnnotation pm
parseAnnotation :: GA.PrefixMap -> CharParser st Annotation
parseAnnotation pm = (flip (<?>)) "Annotation" $
parseEnclosedWithKeyword "Annotation" $ do
an <- (many (parseAnnotation pm))
property <- (parseIRI pm)
v <- parseAnnotationValue pm
return $ Annotation an property v
parseDataJunction' ::
GA.PrefixMap -> String -> JunctionType -> CharParser st DataRange
parseDataJunction' pm k t = parseEnclosedWithKeyword k $
DataJunction t <$> manyN 2 (parseDataRange pm)
parseDataJunction :: GA.PrefixMap -> CharParser st DataRange
parseDataJunction pm =
parseDataJunction' pm "DataUnionOf" UnionOf <|>
parseDataJunction' pm "DataIntersectionOf" IntersectionOf
parseDataComplementOf :: GA.PrefixMap -> CharParser st DataRange
parseDataComplementOf pm = parseEnclosedWithKeyword "DataComplementOf" $
DataComplementOf <$> parseDataRange pm
parseDataOneOf :: GA.PrefixMap -> CharParser st DataRange
parseDataOneOf pm = parseEnclosedWithKeyword "DataOneOf" $
DataOneOf <$> many1 (parseLiteral pm)
parseDatatypeResComponent ::
GA.PrefixMap -> CharParser st (ConstrainingFacet, RestrictionValue)
parseDatatypeResComponent pm =
(,) <$>
(parseIRI pm) <*>
(parseLiteral pm)
parseDatatypeRestriction :: GA.PrefixMap -> CharParser st DataRange
parseDatatypeRestriction pm =
parseEnclosedWithKeyword "DatatypeRestriction" $ do
dataType <- (parseIRI pm)
restrictions <- many1 (parseDatatypeResComponent pm)
return $ DataType dataType restrictions
parseDataRange :: GA.PrefixMap -> CharParser st DataRange
parseDataRange pm =
(parseDataJunction pm) <|>
(parseDataComplementOf pm) <|>
(parseDataOneOf pm) <|>
(parseDatatypeRestriction pm) <|>
(DataType <$> (parseIRI pm) <*> return []) <?>
"DataRange"
# # Declaration
parseDeclaration :: GA.PrefixMap -> CharParser st Axiom
parseDeclaration pm = parseEnclosedWithKeyword "Declaration" $ do
annotations <- many (parseAnnotation pm)
entity <- (parseEntity pm)
return $ Declaration annotations entity
# # ClassExpressions
parseObjectIntersectionOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectIntersectionOf pm = parseEnclosedWithKeyword "ObjectIntersectionOf" $
ObjectJunction IntersectionOf <$> manyN 2 (parseClassExpression pm)
parseObjectUnionOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectUnionOf pm = parseEnclosedWithKeyword "ObjectUnionOf" $
ObjectJunction UnionOf <$> manyN 2 (parseClassExpression pm)
parseObjectComplementOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectComplementOf pm = parseEnclosedWithKeyword "ObjectComplementOf" $
ObjectComplementOf <$> (parseClassExpression pm)
parseObjectOneOf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectOneOf pm = parseEnclosedWithKeyword "ObjectOneOf" $
ObjectOneOf <$> many1 (parseIndividual pm)
parseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseObjectProperty pm = ObjectProp <$> (parseIRI pm)
parseInverseObjectProperty ::
GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseInverseObjectProperty pm = parseEnclosedWithKeyword "ObjectInverseOf" $
ObjectInverseOf <$> (parseObjectProperty pm)
parseObjectPropertyExpression ::
GA.PrefixMap -> CharParser st ObjectPropertyExpression
parseObjectPropertyExpression pm =
(parseInverseObjectProperty pm) <|>
(parseObjectProperty pm) <?>
"ObjectPropertyExpression"
parseObjectSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectSomeValuesFrom pm =
parseEnclosedWithKeyword "ObjectSomeValuesFrom" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
classExpr <- (parseClassExpression pm)
return $ ObjectValuesFrom SomeValuesFrom objectPropertyExpr classExpr
parseObjectAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectAllValuesFrom pm =
parseEnclosedWithKeyword "ObjectAllValuesFrom" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
classExpr <- (parseClassExpression pm)
return $ ObjectValuesFrom AllValuesFrom objectPropertyExpr classExpr
parseObjectHasValue :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectHasValue pm = parseEnclosedWithKeyword "ObjectHasValue" $ do
objectPropertyExpr <- (parseObjectPropertyExpression pm)
val <- (parseIndividual pm)
return $ ObjectHasValue objectPropertyExpr val
parseObjectHasSelf :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectHasSelf pm = parseEnclosedWithKeyword "ObjectHasSelf" $
ObjectHasSelf <$> (parseObjectPropertyExpression pm)
parseCardinality' :: CardinalityType
-> String
-> CharParser st a
-> CharParser st b
-> CharParser st (Cardinality a b)
parseCardinality' c k pa pb = parseEnclosedWithKeyword k $ do
n <- skips $ value 10 <$> getNumber
objectPropertyExpr <- pa
classExpr <- optionMaybe pb
return $ Cardinality c n objectPropertyExpr classExpr
parseObjectCardinality :: GA.PrefixMap -> CharParser st ClassExpression
parseObjectCardinality pm = ObjectCardinality <$> (
cardinality "ObjectMinCardinality" MinCardinality <|>
cardinality "ObjectMaxCardinality" MaxCardinality <|>
cardinality "ObjectExactCardinality" ExactCardinality
)
where cardinality s t = parseCardinality' t s a b
a = (parseObjectPropertyExpression pm)
b = (parseClassExpression pm)
parseDataCardinality :: GA.PrefixMap -> CharParser st ClassExpression
parseDataCardinality pm = DataCardinality <$> (
cardinality "DataMinCardinality" MinCardinality <|>
cardinality "DataMaxCardinality" MaxCardinality <|>
cardinality "DataExactCardinality" ExactCardinality
)
where cardinality s t = parseCardinality' t s a b
a = (parseIRI pm)
b = (parseDataRange pm)
parseDataSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseDataSomeValuesFrom pm = parseEnclosedWithKeyword "DataSomeValuesFrom" $ do
exprs <- many1 (followedBy
((parseDataRange pm))
((parseIRI pm)))
range <- (parseDataRange pm)
return $ DataValuesFrom SomeValuesFrom exprs range
parseDataAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression
parseDataAllValuesFrom pm = parseEnclosedWithKeyword "DataAllValuesFrom" $ do
exprs <- many1 (followedBy (parseDataRange pm) ((parseIRI pm)))
range <- (parseDataRange pm)
return $ DataValuesFrom AllValuesFrom exprs range
parseDataHasValue :: GA.PrefixMap -> CharParser st ClassExpression
parseDataHasValue pm = parseEnclosedWithKeyword "DataHasValue" $
DataHasValue <$> (parseIRI pm) <*> (parseLiteral pm)
parseClassExpression :: GA.PrefixMap -> CharParser st ClassExpression
parseClassExpression pm =
(parseObjectIntersectionOf pm) <|>
(parseObjectUnionOf pm) <|>
(parseObjectComplementOf pm) <|>
(parseObjectOneOf pm) <|>
(parseObjectCardinality pm) <|>
(parseObjectSomeValuesFrom pm) <|>
(parseObjectAllValuesFrom pm) <|>
(parseObjectHasValue pm) <|>
(parseObjectHasSelf pm) <|>
(parseDataSomeValuesFrom pm) <|>
(parseDataAllValuesFrom pm) <|>
(parseDataHasValue pm) <|>
(parseDataCardinality pm) <|>
(Expression <$> (parseIRI pm)) <?>
"ClassExpression"
parseSubClassOf :: GA.PrefixMap -> CharParser st ClassAxiom
parseSubClassOf pm = parseEnclosedWithKeyword "SubClassOf" $ do
annotations <- many (parseAnnotation pm)
subClassExpression <- (parseClassExpression pm)
superClassExpression <- (parseClassExpression pm)
return $ SubClassOf annotations subClassExpression superClassExpression
parseEquivalentClasses :: GA.PrefixMap -> CharParser st ClassAxiom
parseEquivalentClasses pm = parseEnclosedWithKeyword "EquivalentClasses" $
EquivalentClasses <$>
(parseAnnotations pm) <*>
manyN 2 (parseClassExpression pm)
parseDisjointClasses :: GA.PrefixMap -> CharParser st ClassAxiom
parseDisjointClasses pm = parseEnclosedWithKeyword "DisjointClasses" $
DisjointClasses <$>
(parseAnnotations pm) <*>
manyN 2 (parseClassExpression pm)
parseDisjointUnion :: GA.PrefixMap -> CharParser st ClassAxiom
parseDisjointUnion pm = parseEnclosedWithKeyword "DisjointUnion" $
DisjointUnion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
manyN 2 (parseClassExpression pm)
parseClassAxiom :: GA.PrefixMap -> CharParser st Axiom
parseClassAxiom pm = ClassAxiom <$> (
(parseSubClassOf pm) <|>
(parseEquivalentClasses pm) <|>
(parseDisjointClasses pm) <|>
(parseDisjointUnion pm) <?> "ClassAxiom"
)
# # Object Property Axioms
parseEquivalentObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseEquivalentObjectProperties pm =
parseEnclosedWithKeyword "EquivalentObjectProperties" $
EquivalentObjectProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseObjectPropertyExpression pm)
parseDisjointObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseDisjointObjectProperties pm =
parseEnclosedWithKeyword "DisjointObjectProperties" $
DisjointObjectProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseObjectPropertyExpression pm)
parseObjectPropertyDomain :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseObjectPropertyDomain pm =
parseEnclosedWithKeyword "ObjectPropertyDomain" $
ObjectPropertyDomain <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseClassExpression pm)
parseObjectPropertyRange ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseObjectPropertyRange pm =
parseEnclosedWithKeyword "ObjectPropertyRange" $
ObjectPropertyRange <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseClassExpression pm)
parseInverseObjectProperties ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseInverseObjectProperties pm =
parseEnclosedWithKeyword "InverseObjectProperties" $
InverseObjectProperties <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseObjectPropertyExpression pm)
# # # SubObjectPropertyOf
parseObjectPropertyExpressionChain ::
GA.PrefixMap -> CharParser st PropertyExpressionChain
parseObjectPropertyExpressionChain pm =
parseEnclosedWithKeyword "ObjectPropertyChain" $
many1 (parseObjectPropertyExpression pm)
parseSubObjectPropertyExpression ::
GA.PrefixMap -> CharParser st SubObjectPropertyExpression
parseSubObjectPropertyExpression pm =
SubObjPropExpr_exprchain <$> (parseObjectPropertyExpressionChain pm) <|>
SubObjPropExpr_obj <$> (parseObjectPropertyExpression pm) <?>
"SubObjectPropertyExpression"
parseSubObjectPropertyOf ::
GA.PrefixMap -> CharParser st ObjectPropertyAxiom
parseSubObjectPropertyOf pm = parseEnclosedWithKeyword "SubObjectPropertyOf" $
SubObjectPropertyOf <$>
(parseAnnotations pm) <*>
(parseSubObjectPropertyExpression pm) <*>
(parseObjectPropertyExpression pm)
parseCOPA :: GA.PrefixMap -> (
AxiomAnnotations -> ObjectPropertyExpression -> ObjectPropertyAxiom
) -> String -> CharParser st ObjectPropertyAxiom
parseCOPA pm c s = parseEnclosedWithKeyword s $
c <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm)
parseObjectPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom
parseObjectPropertyAxiom pm = ObjectPropertyAxiom <$> (
(parseSubObjectPropertyOf pm) <|>
(parseEquivalentObjectProperties pm) <|>
(parseDisjointObjectProperties pm) <|>
(parseObjectPropertyDomain pm) <|>
(parseObjectPropertyRange pm) <|>
(parseInverseObjectProperties pm) <|>
parseCOPA pm FunctionalObjectProperty "FunctionalObjectProperty" <|>
parseCOPA pm InverseFunctionalObjectProperty
"InverseFunctionalObjectProperty" <|>
parseCOPA pm ReflexiveObjectProperty "ReflexiveObjectProperty" <|>
parseCOPA pm IrreflexiveObjectProperty "IrreflexiveObjectProperty" <|>
parseCOPA pm SymmetricObjectProperty "SymmetricObjectProperty" <|>
parseCOPA pm AsymmetricObjectProperty "AsymmetricObjectProperty" <|>
parseCOPA pm TransitiveObjectProperty "TransitiveObjectProperty" <?>
"ObjectPropertyAxiom"
)
# # DataPropertyAxioms
parseSubDataPropertyOf :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseSubDataPropertyOf pm = parseEnclosedWithKeyword "SubDataPropertyOf" $
SubDataPropertyOf <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
(parseIRI pm)
parseEquivalentDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseEquivalentDataProperties pm =
parseEnclosedWithKeyword "EquivalentDataProperties" $
EquivalentDataProperties <$>
(parseAnnotations pm) <*>
manyN 2 (parseIRI pm)
parseDisjointDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDisjointDataProperties pm =
parseEnclosedWithKeyword "DisjointDataProperties" $
DisjointDataProperties <$>
parseAnnotations pm <*>
manyN 2 (parseIRI pm)
parseDataPropertyDomain :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDataPropertyDomain pm =
parseEnclosedWithKeyword "DataPropertyDomain" $
DataPropertyDomain <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseClassExpression pm
parseDataPropertyRange :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseDataPropertyRange pm =
parseEnclosedWithKeyword "DataPropertyRange" $
DataPropertyRange <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseDataRange pm
parseFunctionalDataProperty :: GA.PrefixMap -> CharParser st DataPropertyAxiom
parseFunctionalDataProperty pm =
parseEnclosedWithKeyword "FunctionalDataProperty" $
FunctionalDataProperty <$>
parseAnnotations pm <*>
(parseIRI pm)
parseDataPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom
parseDataPropertyAxiom pm = DataPropertyAxiom <$> (
parseSubDataPropertyOf pm <|>
parseEquivalentDataProperties pm <|>
parseDisjointDataProperties pm <|>
parseDataPropertyDomain pm <|>
parseDataPropertyRange pm <|>
parseFunctionalDataProperty pm <?>
"DataPropertyAxiom"
)
parseDataTypeDefinition :: GA.PrefixMap -> CharParser st Axiom
parseDataTypeDefinition pm = parseEnclosedWithKeyword "DatatypeDefinition" $
DatatypeDefinition <$>
parseAnnotations pm <*>
(parseIRI pm) <*>
parseDataRange pm
skipChar :: Char -> CharParser st ()
skipChar = forget . skips . char
parensP :: CharParser st a -> CharParser st a
parensP = between (skipChar '(') (skipChar ')')
# # HasKey
parseHasKey :: GA.PrefixMap -> CharParser st Axiom
parseHasKey pm = parseEnclosedWithKeyword "HasKey" $ do
annotations <- (parseAnnotations pm)
classExpr <- (parseClassExpression pm)
objectPropertyExprs <- parensP $ many (parseObjectPropertyExpression pm)
dataPropertyExprs <- parensP $ many (parseIRI pm)
return $ HasKey annotations classExpr objectPropertyExprs dataPropertyExprs
# # Assertion
parseSameIndividual :: GA.PrefixMap -> CharParser st Assertion
parseSameIndividual pm = parseEnclosedWithKeyword "SameIndividual" $
SameIndividual <$>
(parseAnnotations pm) <*>
manyN 2 (parseIndividual pm)
parseDifferentIndividuals :: GA.PrefixMap -> CharParser st Assertion
parseDifferentIndividuals pm = parseEnclosedWithKeyword "DifferentIndividuals" $
DifferentIndividuals <$>
(parseAnnotations pm) <*>
manyN 2 (parseIndividual pm)
parseClassAssertion :: GA.PrefixMap -> CharParser st Assertion
parseClassAssertion pm = parseEnclosedWithKeyword "ClassAssertion" $
ClassAssertion <$>
(parseAnnotations pm) <*>
(parseClassExpression pm) <*>
(parseIndividual pm)
parseObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseObjectPropertyAssertion pm =
parseEnclosedWithKeyword "ObjectPropertyAssertion" $
ObjectPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseIndividual pm) <*>
(parseIndividual pm)
parseNegativeObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseNegativeObjectPropertyAssertion pm =
parseEnclosedWithKeyword "NegativeObjectPropertyAssertion" $
NegativeObjectPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseObjectPropertyExpression pm) <*>
(parseIndividual pm) <*>
(parseIndividual pm)
parseDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseDataPropertyAssertion pm =
parseEnclosedWithKeyword "DataPropertyAssertion" $
DataPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIndividual pm) <*>
(parseLiteral pm)
parseNegativeDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion
parseNegativeDataPropertyAssertion pm =
parseEnclosedWithKeyword "NegativeDataPropertyAssertion" $
NegativeDataPropertyAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIndividual pm) <*>
(parseLiteral pm)
parseAssertion :: GA.PrefixMap -> CharParser st Axiom
parseAssertion pm = Assertion <$> (
(parseSameIndividual pm) <|>
(parseDifferentIndividuals pm) <|>
(parseClassAssertion pm) <|>
(parseObjectPropertyAssertion pm) <|>
(parseNegativeObjectPropertyAssertion pm) <|>
(parseDataPropertyAssertion pm) <|>
(parseNegativeDataPropertyAssertion pm)
)
parseAnnotationAssertion :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationAssertion pm = parseEnclosedWithKeyword "AnnotationAssertion" $
AnnotationAssertion <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseAnnotationSubject pm) <*>
(parseAnnotationValue pm)
parseSubAnnotationPropertyOf :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseSubAnnotationPropertyOf pm =
parseEnclosedWithKeyword "SubAnnotationPropertyOf" $
SubAnnotationPropertyOf <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationPropertyDomain :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationPropertyDomain pm =
parseEnclosedWithKeyword "AnnotationPropertyDomain" $
AnnotationPropertyDomain <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationPropertyRange :: GA.PrefixMap -> CharParser st AnnotationAxiom
parseAnnotationPropertyRange pm =
parseEnclosedWithKeyword "AnnotationPropertyRange" $
AnnotationPropertyRange <$>
(parseAnnotations pm) <*>
(parseIRI pm) <*>
(parseIRI pm)
parseAnnotationAxiom :: GA.PrefixMap -> CharParser st Axiom
parseAnnotationAxiom pm = AnnotationAxiom <$> (
(parseAnnotationAssertion pm) <|>
(parseSubAnnotationPropertyOf pm) <|>
(parseAnnotationPropertyDomain pm) <|>
(parseAnnotationPropertyRange pm)
)
parseIndividualArg :: GA.PrefixMap -> CharParser st IndividualArg
parseIndividualArg pm =
IVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|>
IArg <$> parseAnonymousIndividual pm
parseDataArg :: GA.PrefixMap -> CharParser st DataArg
parseDataArg pm =
Apparently the keyword is " Literal " instead of " LiteralVariable "
DVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|>
DArg <$> parseLiteral pm
parseClassAtom :: GA.PrefixMap -> CharParser st Atom
parseClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $
ClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm
parseDataRangeAtom :: GA.PrefixMap -> CharParser st Atom
parseDataRangeAtom pm = parseEnclosedWithKeyword "DataRangeAtom" $
DataRangeAtom <$> parseDataRange pm <*> parseDataArg pm
parseObjectPropertyAtom :: GA.PrefixMap -> CharParser st Atom
parseObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $
ObjectPropertyAtom <$>
parseObjectPropertyExpression pm <*>
parseIndividualArg pm <*>
parseIndividualArg pm
parseDataPropertyAtom :: GA.PrefixMap -> CharParser st Atom
parseDataPropertyAtom pm = parseEnclosedWithKeyword "DataPropertyAtom" $
DataPropertyAtom <$>
parseIRI pm <*>
parseIndividualArg pm <*>
parseDataArg pm
parseBuiltInAtom :: GA.PrefixMap -> CharParser st Atom
parseBuiltInAtom pm = parseEnclosedWithKeyword "BuiltInAtom" $
BuiltInAtom <$> parseIRI pm <*> many1 (parseDataArg pm)
parseSameIndividualAtom :: GA.PrefixMap -> CharParser st Atom
parseSameIndividualAtom pm = parseEnclosedWithKeyword "SameIndividualAtom" $
SameIndividualAtom <$> parseIndividualArg pm <*> parseIndividualArg pm
parseDifferentIndividualsAtom :: GA.PrefixMap -> CharParser st Atom
parseDifferentIndividualsAtom pm = parseEnclosedWithKeyword "DifferentIndividualsAtom" $
DifferentIndividualsAtom <$> parseIndividualArg pm <*> parseIndividualArg pm
parseAtom :: GA.PrefixMap -> CharParser st Atom
parseAtom pm =
parseClassAtom pm <|>
parseDataRangeAtom pm <|>
parseObjectPropertyAtom pm <|>
parseDataPropertyAtom pm <|>
parseBuiltInAtom pm<|>
parseSameIndividualAtom pm <|>
parseDifferentIndividualsAtom pm <?>
"Atom"
parseBody :: GA.PrefixMap -> CharParser st Body
parseBody pm = do
parseEnclosedWithKeyword "Body" $ many (parseAtom pm)
parseHead :: GA.PrefixMap -> CharParser st Body
parseHead pm = do
parseEnclosedWithKeyword "Head" $ many (parseAtom pm)
parseDLSafeRule :: GA.PrefixMap -> CharParser st Rule
parseDLSafeRule pm = parseEnclosedWithKeyword "DLSafeRule" $
DLSafeRule <$>
parseAnnotations pm <*>
parseBody pm <*>
parseHead pm
parseDGClassAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $
DGClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm
parseDGObjectPropertyAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $
DGObjectPropertyAtom <$>
parseObjectPropertyExpression pm <*>
parseIndividualArg pm <*>
parseIndividualArg pm
parseDGAtom :: GA.PrefixMap -> CharParser st DGAtom
parseDGAtom pm = parseDGClassAtom pm <|> parseDGObjectPropertyAtom pm
parseDGBody :: GA.PrefixMap -> CharParser st DGBody
parseDGBody pm = parseEnclosedWithKeyword "Body" $ many (parseDGAtom pm)
parseDGHead :: GA.PrefixMap -> CharParser st DGHead
parseDGHead pm = parseEnclosedWithKeyword "Head" $ many (parseDGAtom pm)
parseDGRule :: GA.PrefixMap -> CharParser st Rule
parseDGRule pm = parseEnclosedWithKeyword "DescriptionGraphRule" $
DGRule <$> parseAnnotations pm <*> parseDGBody pm <*> parseDGHead pm
parseRule :: GA.PrefixMap -> CharParser st Axiom
parseRule pm = Rule <$> (parseDLSafeRule pm <|> parseDGRule pm)
parseDGNodeAssertion :: GA.PrefixMap -> CharParser st DGNodeAssertion
parseDGNodeAssertion pm = parseEnclosedWithKeyword "NodeAssertion" $
DGNodeAssertion <$> parseIRI pm <*> parseIRI pm
parseDGNodes :: GA.PrefixMap -> CharParser st DGNodes
parseDGNodes pm = parseEnclosedWithKeyword "Nodes" $
many1 (parseDGNodeAssertion pm)
parseDGEdgeAssertion :: GA.PrefixMap -> CharParser st DGEdgeAssertion
parseDGEdgeAssertion pm = parseEnclosedWithKeyword "EdgeAssertion" $
DGEdgeAssertion <$> parseIRI pm <*> parseIRI pm <*> parseIRI pm
parseDGEdes :: GA.PrefixMap -> CharParser st DGEdges
parseDGEdes pm = parseEnclosedWithKeyword "Edges" $
many1 (parseDGEdgeAssertion pm)
parseMainClasses :: GA.PrefixMap -> CharParser st MainClasses
parseMainClasses pm = parseEnclosedWithKeyword "MainClasses" $
many1 (parseIRI pm)
parseDGAxiom :: GA.PrefixMap -> CharParser st Axiom
parseDGAxiom pm = parseEnclosedWithKeyword "DescriptionGraph" $
DGAxiom <$>
parseAnnotations pm <*>
parseIRI pm <*>
parseDGNodes pm <*>
parseDGEdes pm <*>
parseMainClasses pm
parseAxiom :: GA.PrefixMap -> CharParser st Axiom
parseAxiom pm =
(parseDeclaration pm) <|>
(parseClassAxiom pm) <|>
(parseObjectPropertyAxiom pm) <|>
(parseDataPropertyAxiom pm) <|>
(parseDataTypeDefinition pm) <|>
(parseHasKey pm) <|>
(parseAssertion pm) <|>
(parseAnnotationAxiom pm) <|>
(parseRule pm) <|>
(parseDGAxiom pm) <?>
"Axiom"
parseOntology :: GA.PrefixMap -> CharParser st Ontology
parseOntology pm =
let parseIriIfNotImportOrAxiomOrAnnotation =
(arbitraryLookaheadOption [
forget (parseDirectlyImportsDocument pm),
forget (parseAnnotation pm),
forget (parseAxiom pm)
] >> never) <|>
optionMaybe (parseIRI pm)
in
parseEnclosedWithKeyword "Ontology" $ do
ontologyIri <- parseIriIfNotImportOrAxiomOrAnnotation
versionIri <- parseIriIfNotImportOrAxiomOrAnnotation
imports <- many (parseDirectlyImportsDocument pm)
annotations <- many (parseAnnotation pm)
axs <- many (parseAxiom pm)
return $ Ontology ontologyIri versionIri (imports) annotations axs
| Parses an OntologyDocument from Owl2 Functional Syntax
parseOntologyDocument :: GA.PrefixMap -> CharParser st OntologyDocument
parseOntologyDocument gapm = do
skips'
prefixes <- many parsePrefixDeclaration
let pm = union gapm (fromList prefixes)
onto <- parseOntology pm
return $ OntologyDocument (OntologyMetadata AS) pm onto
|
cb8cea1d77d4a354285424ae00b7c005b39106930dcd9adb400fa2a4ac0f7136 | yzh44yzh/practical_erlang | mylib_worker.erl | -module(mylib_worker).
-behavior(gen_server).
-export([start_link/0, get_version/0, get_modules/0, get_min_val/0, get_connection_timeout/0, all_apps/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
| null | https://raw.githubusercontent.com/yzh44yzh/practical_erlang/c9eec8cf44e152bf50d9bc6d5cb87fee4764f609/13_application/exercise/src/mylib_worker.erl | erlang | -module(mylib_worker).
-behavior(gen_server).
-export([start_link/0, get_version/0, get_modules/0, get_min_val/0, get_connection_timeout/0, all_apps/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
|
a9507e4dbe1ec744425b8d15120235a056048efe1cd0ec836bbca10d1c819ac6 | karimarttila/clojure | config.clj | (ns simpleserver.util.config
(:require
[aero.core :as aero]
[clojure.java.io :as io])) ;; clj-kondo requires this?
(defn create-config
[]
(aero/read-config (io/resource "config.edn")))
;; Commented out for clj-kondo
;; Testing locally.
#_(comment
(def config (create-config))
(def table-name "session")
(def my-env :dev)
(def my-table-prefix "ss")
)
| null | https://raw.githubusercontent.com/karimarttila/clojure/ee1261b9a8e6be92cb47aeb325f82a278f2c1ed3/webstore-demo/re-frame-demo/src/clj/simpleserver/util/config.clj | clojure | clj-kondo requires this?
Commented out for clj-kondo
Testing locally. | (ns simpleserver.util.config
(:require
[aero.core :as aero]
(defn create-config
[]
(aero/read-config (io/resource "config.edn")))
#_(comment
(def config (create-config))
(def table-name "session")
(def my-env :dev)
(def my-table-prefix "ss")
)
|
9e9c89c914ee51670a65eea8f30b98d54497e410f137deb24baf2f55c1085cec | juxtin/clj-bob | lang.clj | (ns clj-bob.lang
(:refer-clojure :exclude [atom cons + < num if])
(:require [clojure.string :as str]))
(defn if-nil [q a e]
(if (or (nil? q)
(= 'nil q))
(e)
(a)))
(defn if
[Q A E]
(if-nil Q
(fn [] A)
(fn [] E)))
(defrecord Pair [car cdr])
(defmethod print-method Pair [p writer]
(.write writer (format "(%s . %s)" (:car p) (:cdr p))))
(defn s-car [x]
(if (instance? Pair x)
(:car x)
(first x)))
(defn s-cdr [x]
(if (instance? Pair x)
(:cdr x)
(rest x)))
(def s-+ clojure.core/+)
(def s-< clojure.core/<)
(defn cons [h t]
(if (sequential? t)
(apply list (concat [h] t))
(Pair. h t)))
(defn equal
"HAHAHAHA equality in Scheme is very weak."
[x y]
(= (str/lower-case x)
(str/lower-case y)))
(defn pair? [x]
(or (instance? Pair x)
(and (list? x)
(seq x))))
;; this is a bit different
(defn num [x]
(let [num-sym? #(re-find #"^\d+$" (str %))]
(cond
(number? x) x
(num-sym? x) (Integer/parseInt (str x))
:else 0)))
(defn atom [x]
(if (pair? x)
'nil
't))
(defn car [x]
(if (pair? x)
(s-car x)
()))
(defn cdr [x]
(if (pair? x)
(s-cdr x)
()))
(defn equal [x y]
(if (= x y)
't
'nil))
(defn < [x y]
(if (s-< (num x) (num y))
't
'nil))
(defn nat? [x]
(if (and (integer? x)
(< 0 x))
't
'nil))
(def natp nat?)
(defn + [x y]
(s-+ (num x) (num y)))
(defmacro defun
[name args & body]
`(defn ~name ~(vec args) ~@body))
(defmacro dethm
[name args & body]
`(defn ~name ~(vec args) ~@body))
(defn size [x]
(if (atom x)
0
(+ 1 (+ (size (car x)) (size (cdr x))))))
| null | https://raw.githubusercontent.com/juxtin/clj-bob/daec6cae0582e9bb4a72153e97f01fab5872ed1f/src/clj_bob/lang.clj | clojure | this is a bit different | (ns clj-bob.lang
(:refer-clojure :exclude [atom cons + < num if])
(:require [clojure.string :as str]))
(defn if-nil [q a e]
(if (or (nil? q)
(= 'nil q))
(e)
(a)))
(defn if
[Q A E]
(if-nil Q
(fn [] A)
(fn [] E)))
(defrecord Pair [car cdr])
(defmethod print-method Pair [p writer]
(.write writer (format "(%s . %s)" (:car p) (:cdr p))))
(defn s-car [x]
(if (instance? Pair x)
(:car x)
(first x)))
(defn s-cdr [x]
(if (instance? Pair x)
(:cdr x)
(rest x)))
(def s-+ clojure.core/+)
(def s-< clojure.core/<)
(defn cons [h t]
(if (sequential? t)
(apply list (concat [h] t))
(Pair. h t)))
(defn equal
"HAHAHAHA equality in Scheme is very weak."
[x y]
(= (str/lower-case x)
(str/lower-case y)))
(defn pair? [x]
(or (instance? Pair x)
(and (list? x)
(seq x))))
(defn num [x]
(let [num-sym? #(re-find #"^\d+$" (str %))]
(cond
(number? x) x
(num-sym? x) (Integer/parseInt (str x))
:else 0)))
(defn atom [x]
(if (pair? x)
'nil
't))
(defn car [x]
(if (pair? x)
(s-car x)
()))
(defn cdr [x]
(if (pair? x)
(s-cdr x)
()))
(defn equal [x y]
(if (= x y)
't
'nil))
(defn < [x y]
(if (s-< (num x) (num y))
't
'nil))
(defn nat? [x]
(if (and (integer? x)
(< 0 x))
't
'nil))
(def natp nat?)
(defn + [x y]
(s-+ (num x) (num y)))
(defmacro defun
[name args & body]
`(defn ~name ~(vec args) ~@body))
(defmacro dethm
[name args & body]
`(defn ~name ~(vec args) ~@body))
(defn size [x]
(if (atom x)
0
(+ 1 (+ (size (car x)) (size (cdr x))))))
|
0682241e1480796801caa572d6bc9cc343975ae48d530719843bba1b23f5fa46 | kitnil/dotfiles | dotfiles.scm | (use-modules (packages containers)
(packages kubernetes)
(packages networking))
(packages->manifest
(list cisco
plumber
k3d
k9s
kompose
kubectl
kubernetes-helm
nerdctl
virtctl))
| null | https://raw.githubusercontent.com/kitnil/dotfiles/68c67af0cbaa7f56f3e53f660f7e3b46e6d3fb4e/dotfiles/manifests/dotfiles.scm | scheme | (use-modules (packages containers)
(packages kubernetes)
(packages networking))
(packages->manifest
(list cisco
plumber
k3d
k9s
kompose
kubectl
kubernetes-helm
nerdctl
virtctl))
|
|
41f6f42a4460c026a280dbde9405f87aa4a0dacd837d688b40b0cdb34f035f40 | avsm/mirage-duniverse | tcptimer.mli |
* Copyright ( c ) 2012 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2012 Balraj Singh <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
type t
type time = int64
type tr =
| Stoptimer
| Continue of Sequence.t
| ContinueSetPeriod of (time * Sequence.t)
module Make(T:Mirage_time_lwt.S) : sig
val t : period_ns: time -> expire: (Sequence.t -> tr Lwt.t) -> t
val start : t -> ?p:time -> Sequence.t -> unit Lwt.t
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/tcpip/src/tcp/tcptimer.mli | ocaml |
* Copyright ( c ) 2012 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2012 Balraj Singh <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
type t
type time = int64
type tr =
| Stoptimer
| Continue of Sequence.t
| ContinueSetPeriod of (time * Sequence.t)
module Make(T:Mirage_time_lwt.S) : sig
val t : period_ns: time -> expire: (Sequence.t -> tr Lwt.t) -> t
val start : t -> ?p:time -> Sequence.t -> unit Lwt.t
end
|
|
115cd7e38aa11596c46d2ebaf3c18d2b1729a1e68810fe3da6a76c689e54e1e4 | debasishg/hask | Service.hs | -- | All service-related functions.
module Lib.Service
( module Service
) where
import Lib.Service.AccountService as Service (AccountService (..))
| null | https://raw.githubusercontent.com/debasishg/hask/1745ed50c8175cd035e8070c9cb988f4f5063653/h3layer/src/Lib/Service.hs | haskell | | All service-related functions. |
module Lib.Service
( module Service
) where
import Lib.Service.AccountService as Service (AccountService (..))
|
db416a063be971c5827c9d52838b7c0b18972dca0eda900ef6b9434fb2ac762f | alesaccoia/festival_flinger | ogi_aec_diphone.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;<--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
Oregon Health & Science University ; ;
Division of Biomedical Computer Science ; ;
Center for Spoken Language Understanding ; ;
Portland , OR USA ; ;
Copyright ( c ) 2000 ; ;
;; ;;
This module is not part of the CSTR / University of Edinburgh ; ;
;; release of the Festival TTS system. ;;
;; ;;
;; In addition to any conditions disclaimers below, please see the file ;;
" NE Copyright Materials License.txt " distributed with this software ; ;
;; for information on usage and redistribution, and for a DISCLAIMER OF ;;
;; ALL WARRANTIES. ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;<--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Set up diphone voice
OGI AEC diphones : male American English collected May 1997
;; load unit selection routine
(set! ogi_aec_diphone_dir (cdr (assoc 'ogi_aec_diphone voice-locations)))
(set! load-path (cons (path-append ogi_aec_diphone_dir "festvox") load-path))
(set! load-path (cons (path-append libdir "ogi") load-path))
(require 'ogi_configure_voice)
;; select voice
;; this defines all the modules to use when synthesizing text using the current voice
(define (voice_ogi_aec_diphone)
"(voice_ogi_aec_diphone)
Set up the current voice to be an American male AEC using the aec diphone set."
;; set target average pitch and variation
(set! ogi_aec_diphone_f0_mean 105)
(set! ogi_aec_diphone_f0_std 19)
;; set unit selection method
(set! OGI_unitsel OGI_diphone_unitsel)
;; use the grouped file by default
(set! ogi_grouped_or_ungrouped 'grouped)
use or sinLPC signal processing
(set! ogi_resORsinLPC 'resLPC)
;; configure voice using defaults
(ogi_configure_voice 'ogi_aec_diphone)
;; overwrite defaults here:
;; diphone unit selection fallbacks
(set! ogi_di_alt_L '((m= (m)) (n= (n)) (l= (l)) (h (pau))
(j (i:)) (dx (t d)) (& (^))
(k>9r (k)) (k>w (k)) (k>l (k))
(p>9r (p)) (p>w (p)) (p>l (p))
(t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t))
(p>9r<s (p>9r p)) (t<s (t))))
(set! ogi_di_alt_R '((m= (m)) (n= (n)) (l= (l)) (h (pau))
(j (i:)) (dx (t d)) (& (^))
(k>9r (k)) (k>w (k)) (k>l (k))
(p>9r (p)) (p>w (p)) (p>l (p))
(t>9r (t)) (t>w (t)) (t>l (t))
(t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p))
(t<s (t))))
(set! ogi_di_default "pau-h")
;; reslpc ungrouped analysis
;; define analysis parameters for OGIresLPC module
(set! voicename 'ogi_aec_diphone)
;; define analysis parameters for OGIresLPC module
(if (string-equal ogi_grouped_or_ungrouped "grouped")
(set! ogi_resLPC_analysis_params (list
(list 'dbname voice_dirname)
(list 'groupfile (path-append ogi_diphone_dir "group" (string-append voicename "_resLPC.group")))
'(data_type "resLPC")
'(access_mode "ondemand")
))
;; else if ungrouped
(set! ogi_resLPC_analysis_params (list
(list 'dbname voice_dirname)
(list 'unitdic_file (path-append ogi_diphone_dir "ungrouped" "unittable.ms"))
(list 'gain_file (path-append ogi_diphone_dir "festvox" "gain.dat"))
'(phoneset "worldbet")
(list 'base_dir (path-append ogi_diphone_dir "ungrouped/"))
'(lpc_dir "lpc/")
'(lpc_ext ".lsf")
'(exc_dir "lpc/")
'(exc_ext ".res")
'(pm_dir "pm/")
'(pm_ext ".pmv")
'(data_type "resLPC")
'(access_mode "ondemand")
'(samp_freq 16000)
'(sig_band 0.010)
'(isCompressed "Y") ;; if "Y", compress when saving group file
'(preemph 0.96)
))
)
;; initialize signal processing module
(initialize_OGIsynth)
)
;; proclaim voice
(proclaim_voice
'ogi_aec_diphone
'((language english)
(gender male)
(dialect american)
(description
"This voice provides an American English male voice using a
residual excited or sinusoidal LPC diphone synthesis module created at
OGI. It uses a lexicon compiled from MOBY and CMU lexicons, and
other trained modules used by CSTR voices.")
(samplerate 16000)))
comment this out if you want changes in this file to take effect without restarting Festival
(provide 'ogi_aec_diphone)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/voices/english/ogi_aec_diphone/festvox/ogi_aec_diphone.scm | scheme | <--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;
;
;
;
;
;;
;
release of the Festival TTS system. ;;
;;
In addition to any conditions disclaimers below, please see the file ;;
;
for information on usage and redistribution, and for a DISCLAIMER OF ;;
ALL WARRANTIES. ;;
;;
<--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Set up diphone voice
load unit selection routine
select voice
this defines all the modules to use when synthesizing text using the current voice
set target average pitch and variation
set unit selection method
use the grouped file by default
configure voice using defaults
overwrite defaults here:
diphone unit selection fallbacks
reslpc ungrouped analysis
define analysis parameters for OGIresLPC module
define analysis parameters for OGIresLPC module
else if ungrouped
if "Y", compress when saving group file
initialize signal processing module
proclaim voice |
OGI AEC diphones : male American English collected May 1997
(set! ogi_aec_diphone_dir (cdr (assoc 'ogi_aec_diphone voice-locations)))
(set! load-path (cons (path-append ogi_aec_diphone_dir "festvox") load-path))
(set! load-path (cons (path-append libdir "ogi") load-path))
(require 'ogi_configure_voice)
(define (voice_ogi_aec_diphone)
"(voice_ogi_aec_diphone)
Set up the current voice to be an American male AEC using the aec diphone set."
(set! ogi_aec_diphone_f0_mean 105)
(set! ogi_aec_diphone_f0_std 19)
(set! OGI_unitsel OGI_diphone_unitsel)
(set! ogi_grouped_or_ungrouped 'grouped)
use or sinLPC signal processing
(set! ogi_resORsinLPC 'resLPC)
(ogi_configure_voice 'ogi_aec_diphone)
(set! ogi_di_alt_L '((m= (m)) (n= (n)) (l= (l)) (h (pau))
(j (i:)) (dx (t d)) (& (^))
(k>9r (k)) (k>w (k)) (k>l (k))
(p>9r (p)) (p>w (p)) (p>l (p))
(t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t))
(p>9r<s (p>9r p)) (t<s (t))))
(set! ogi_di_alt_R '((m= (m)) (n= (n)) (l= (l)) (h (pau))
(j (i:)) (dx (t d)) (& (^))
(k>9r (k)) (k>w (k)) (k>l (k))
(p>9r (p)) (p>w (p)) (p>l (p))
(t>9r (t)) (t>w (t)) (t>l (t))
(t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p))
(t<s (t))))
(set! ogi_di_default "pau-h")
(set! voicename 'ogi_aec_diphone)
(if (string-equal ogi_grouped_or_ungrouped "grouped")
(set! ogi_resLPC_analysis_params (list
(list 'dbname voice_dirname)
(list 'groupfile (path-append ogi_diphone_dir "group" (string-append voicename "_resLPC.group")))
'(data_type "resLPC")
'(access_mode "ondemand")
))
(set! ogi_resLPC_analysis_params (list
(list 'dbname voice_dirname)
(list 'unitdic_file (path-append ogi_diphone_dir "ungrouped" "unittable.ms"))
(list 'gain_file (path-append ogi_diphone_dir "festvox" "gain.dat"))
'(phoneset "worldbet")
(list 'base_dir (path-append ogi_diphone_dir "ungrouped/"))
'(lpc_dir "lpc/")
'(lpc_ext ".lsf")
'(exc_dir "lpc/")
'(exc_ext ".res")
'(pm_dir "pm/")
'(pm_ext ".pmv")
'(data_type "resLPC")
'(access_mode "ondemand")
'(samp_freq 16000)
'(sig_band 0.010)
'(preemph 0.96)
))
)
(initialize_OGIsynth)
)
(proclaim_voice
'ogi_aec_diphone
'((language english)
(gender male)
(dialect american)
(description
"This voice provides an American English male voice using a
residual excited or sinusoidal LPC diphone synthesis module created at
OGI. It uses a lexicon compiled from MOBY and CMU lexicons, and
other trained modules used by CSTR voices.")
(samplerate 16000)))
comment this out if you want changes in this file to take effect without restarting Festival
(provide 'ogi_aec_diphone)
|
d2aef96194b2e5a861a7f58525e6fe3718ad328b8d907a9367c287dc7f0310e1 | ayato-p/mokuhan | renderer_test.cljc | (ns org.panchromatic.mokuhan.renderer-test
(:require [clojure.test :as t]
[org.panchromatic.mokuhan.renderer :as sut]
[org.panchromatic.mokuhan.ast :as ast]))
(def ^:private delimiters
{:open "{{" :close "}}"})
(t/deftest render-escaped-variable-test
(t/testing "Single path"
(let [v (ast/new-escaped-variable ["x"] delimiters)]
(t/testing "String"
(t/is (= "Hi" (sut/render v {:x "Hi"}))))
(t/testing "Integer"
(t/is (= "42" (sut/render v {:x 42}))))
(t/testing "Boolean"
(t/is (= "true" (sut/render v {:x true})))
(t/is (= "false" (sut/render v {:x false}))))
(t/testing "HTML string"
(t/is (= "&<>'"" (sut/render v {:x "&<>'\""}))))
(t/testing "Map"
(t/is (= "{:foo 1}" (sut/render v {:x {:foo 1}}))))
(t/testing "Vector"
(t/is (= "[1 2]" (sut/render v {:x [1 2]}))))
(t/testing "Object"
(t/is (= "object!" (sut/render v {:x (reify Object (toString [this] "object!"))}))))
(t/testing "nil"
(t/is (= "" (sut/render v {:x nil}))))
(t/testing "missing"
(t/is (= "" (sut/render v {}))))))
(t/testing "Dotted path"
(let [v (ast/new-escaped-variable ["x" "y"] delimiters)]
(t/testing "String"
(t/is (= "Hi" (sut/render v {:x {:y "Hi"}}))))
(t/testing "Integer"
(t/is (= "42" (sut/render v {:x {:y 42}}))))
(t/testing "Boolean"
(t/is (= "true" (sut/render v {:x {:y true}})))
(t/is (= "false" (sut/render v {:x {:y false}}))))
(t/testing "HTML string"
(t/is (= "&<>'"" (sut/render v {:x {:y "&<>'\""}}))))
(t/testing "Map"
(t/is (= "{:foo 1}" (sut/render v {:x {:y {:foo 1}}}))))
(t/testing "Vector"
(t/is (= "[1 2]" (sut/render v {:x {:y [1 2]}}))))
(t/testing "nil"
(t/is (= "" (sut/render v {:x {:y nil}}))))
(t/testing "missing"
(t/is (= "" (sut/render v {:x {}}))))))
(t/testing "Include index of list"
(let [v (ast/new-escaped-variable ["x" 1 "y"] delimiters)]
(t/is (= "42" (sut/render v {:x [{:y 41} {:y 42}]})))
(t/is (= "" (sut/render v {:x [{:y 41}]})))))
(t/testing "Dot"
(let [v (ast/new-escaped-variable ["."] delimiters)]
(t/is (= "{:x 42}" (sut/render v {:x 42}))))))
(t/deftest render-standard-section-test
(t/testing "single path section"
(let [v (-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (ast/new-text "!!")))]
(t/is (= "!!"
(sut/render v {:x true})
(sut/render v {:x {}})
(sut/render v {:x 42})
(sut/render v {:x "Hello"})))
(t/is (= ""
(sut/render v {:x false})
(sut/render v {:x []})
(sut/render v {:x nil})
(sut/render v {})
(sut/render v nil)))
(t/is (= "!!!!" (sut/render v {:x [1 1]})))
(t/is (= "Hello!!" (sut/render v {:x #(str "Hello" %)})))))
(t/testing "dotted path section"
(let [v (-> (ast/new-standard-section ["x" "y"] delimiters)
(update :contents conj (ast/new-text "!!")))]
(t/is (= "!!"
(sut/render v {:x {:y true}})
(sut/render v {:x {:y {}}})
(sut/render v {:x {:y 42}})
(sut/render v {:x {:y "Hello"}})))
(t/is (= ""
(sut/render v {:x {:y false}})
(sut/render v {:x {:y []}})
(sut/render v {:x {:y nil}})
(sut/render v {:x {}})
(sut/render v {:x nil})))
(t/is (= "!!!!" (sut/render v {:x {:y [1 1]}})))
(t/is (= "Hello!!" (sut/render v {:x {:y #(str "Hello" %)}})))))
(t/testing "nested section"
(let [v (-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (-> (ast/new-standard-section ["y"] delimiters)
(update :contents conj (ast/new-text "!!")))))]
(t/is (= "!!" (sut/render v {:x {:y true}})))
(t/is (= "!!!!" (sut/render v {:x {:y [1 1]}})))
(t/is (= "!!!!!!!!" (sut/render v {:x [{:y [1 1]} {:y [1 1]}]})))
(t/is (= "!!!!"
(sut/render v {:x [{:y [1 1]} {:y []}]})
(sut/render v {:x [{:y true} {:y false} {:y true}]})))))
(t/testing "nested and don't use outer key"
(let [v (-> [(-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (-> (ast/new-standard-section ["y"] delimiters)
(update :contents conj (ast/new-text "Hello")))))]
ast/new-mustache)]
(t/is (= "" (sut/render v {:x [{:y false}]
:y true}))))))
| null | https://raw.githubusercontent.com/ayato-p/mokuhan/8f6de17b5c4a3712aa83ba4f37234de86f3c630b/test/org/panchromatic/mokuhan/renderer_test.cljc | clojure | (ns org.panchromatic.mokuhan.renderer-test
(:require [clojure.test :as t]
[org.panchromatic.mokuhan.renderer :as sut]
[org.panchromatic.mokuhan.ast :as ast]))
(def ^:private delimiters
{:open "{{" :close "}}"})
(t/deftest render-escaped-variable-test
(t/testing "Single path"
(let [v (ast/new-escaped-variable ["x"] delimiters)]
(t/testing "String"
(t/is (= "Hi" (sut/render v {:x "Hi"}))))
(t/testing "Integer"
(t/is (= "42" (sut/render v {:x 42}))))
(t/testing "Boolean"
(t/is (= "true" (sut/render v {:x true})))
(t/is (= "false" (sut/render v {:x false}))))
(t/testing "HTML string"
(t/is (= "&<>'"" (sut/render v {:x "&<>'\""}))))
(t/testing "Map"
(t/is (= "{:foo 1}" (sut/render v {:x {:foo 1}}))))
(t/testing "Vector"
(t/is (= "[1 2]" (sut/render v {:x [1 2]}))))
(t/testing "Object"
(t/is (= "object!" (sut/render v {:x (reify Object (toString [this] "object!"))}))))
(t/testing "nil"
(t/is (= "" (sut/render v {:x nil}))))
(t/testing "missing"
(t/is (= "" (sut/render v {}))))))
(t/testing "Dotted path"
(let [v (ast/new-escaped-variable ["x" "y"] delimiters)]
(t/testing "String"
(t/is (= "Hi" (sut/render v {:x {:y "Hi"}}))))
(t/testing "Integer"
(t/is (= "42" (sut/render v {:x {:y 42}}))))
(t/testing "Boolean"
(t/is (= "true" (sut/render v {:x {:y true}})))
(t/is (= "false" (sut/render v {:x {:y false}}))))
(t/testing "HTML string"
(t/is (= "&<>'"" (sut/render v {:x {:y "&<>'\""}}))))
(t/testing "Map"
(t/is (= "{:foo 1}" (sut/render v {:x {:y {:foo 1}}}))))
(t/testing "Vector"
(t/is (= "[1 2]" (sut/render v {:x {:y [1 2]}}))))
(t/testing "nil"
(t/is (= "" (sut/render v {:x {:y nil}}))))
(t/testing "missing"
(t/is (= "" (sut/render v {:x {}}))))))
(t/testing "Include index of list"
(let [v (ast/new-escaped-variable ["x" 1 "y"] delimiters)]
(t/is (= "42" (sut/render v {:x [{:y 41} {:y 42}]})))
(t/is (= "" (sut/render v {:x [{:y 41}]})))))
(t/testing "Dot"
(let [v (ast/new-escaped-variable ["."] delimiters)]
(t/is (= "{:x 42}" (sut/render v {:x 42}))))))
(t/deftest render-standard-section-test
(t/testing "single path section"
(let [v (-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (ast/new-text "!!")))]
(t/is (= "!!"
(sut/render v {:x true})
(sut/render v {:x {}})
(sut/render v {:x 42})
(sut/render v {:x "Hello"})))
(t/is (= ""
(sut/render v {:x false})
(sut/render v {:x []})
(sut/render v {:x nil})
(sut/render v {})
(sut/render v nil)))
(t/is (= "!!!!" (sut/render v {:x [1 1]})))
(t/is (= "Hello!!" (sut/render v {:x #(str "Hello" %)})))))
(t/testing "dotted path section"
(let [v (-> (ast/new-standard-section ["x" "y"] delimiters)
(update :contents conj (ast/new-text "!!")))]
(t/is (= "!!"
(sut/render v {:x {:y true}})
(sut/render v {:x {:y {}}})
(sut/render v {:x {:y 42}})
(sut/render v {:x {:y "Hello"}})))
(t/is (= ""
(sut/render v {:x {:y false}})
(sut/render v {:x {:y []}})
(sut/render v {:x {:y nil}})
(sut/render v {:x {}})
(sut/render v {:x nil})))
(t/is (= "!!!!" (sut/render v {:x {:y [1 1]}})))
(t/is (= "Hello!!" (sut/render v {:x {:y #(str "Hello" %)}})))))
(t/testing "nested section"
(let [v (-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (-> (ast/new-standard-section ["y"] delimiters)
(update :contents conj (ast/new-text "!!")))))]
(t/is (= "!!" (sut/render v {:x {:y true}})))
(t/is (= "!!!!" (sut/render v {:x {:y [1 1]}})))
(t/is (= "!!!!!!!!" (sut/render v {:x [{:y [1 1]} {:y [1 1]}]})))
(t/is (= "!!!!"
(sut/render v {:x [{:y [1 1]} {:y []}]})
(sut/render v {:x [{:y true} {:y false} {:y true}]})))))
(t/testing "nested and don't use outer key"
(let [v (-> [(-> (ast/new-standard-section ["x"] delimiters)
(update :contents conj (-> (ast/new-standard-section ["y"] delimiters)
(update :contents conj (ast/new-text "Hello")))))]
ast/new-mustache)]
(t/is (= "" (sut/render v {:x [{:y false}]
:y true}))))))
|
|
6f4947175e9394e316d0b7679bf7a33f69138a679b5146778f63f7a99cfa0bf6 | monadbobo/ocaml-core | test.ml | type t = {foo:int ; bar : int ; baz : int} with compare
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/compare/sample_output/test.ml | ocaml | type t = {foo:int ; bar : int ; baz : int} with compare
|
|
957195459c8ca38faaab9927d168140725b48dd661f7e529ea9bcc5c468f3a9f | nikomatsakis/a-mir-formality | wf--outlives.rkt | #lang racket
(require redex/reduction-semantics
"../../util.rkt"
"../grammar.rkt"
"../prove.rkt"
"../libcore.rkt"
)
(module+ test
(redex-let*
formality-rust
[(Rust/Program (term ([(crate C { (struct Ref[(type T) (lifetime a)]
where [(T : a)]
{ })
(struct NoRef[(type T) (lifetime a)]
where []
{ })
})] C)))
]
(traced '()
(test-term-true
(rust:can-prove-where-clause-in-program
Rust/Program
(∀ [(type A)]
where []
; key point here:
;
; requires proving `A : 'b`, but that's implied by
Ref < A , ' b > being WF
(for[(lifetime b)] ((Ref < A b >) : b))
)
)
))
(traced '()
(test-term-false
(rust:can-prove-where-clause-in-program
Rust/Program
(∀ [(type A)]
where []
in contrast to previous test , the ` NoRef ` struct does not
; imply a connection between `A` and `b`
(for[(lifetime b)] ((NoRef < A b >) : b))
)
)
))
)
)
| null | https://raw.githubusercontent.com/nikomatsakis/a-mir-formality/71be4d5c4bd5e91d326277eaedd19a7abe3ac76a/racket-src/rust/test/wf--outlives.rkt | racket | key point here:
requires proving `A : 'b`, but that's implied by
imply a connection between `A` and `b` | #lang racket
(require redex/reduction-semantics
"../../util.rkt"
"../grammar.rkt"
"../prove.rkt"
"../libcore.rkt"
)
(module+ test
(redex-let*
formality-rust
[(Rust/Program (term ([(crate C { (struct Ref[(type T) (lifetime a)]
where [(T : a)]
{ })
(struct NoRef[(type T) (lifetime a)]
where []
{ })
})] C)))
]
(traced '()
(test-term-true
(rust:can-prove-where-clause-in-program
Rust/Program
(∀ [(type A)]
where []
Ref < A , ' b > being WF
(for[(lifetime b)] ((Ref < A b >) : b))
)
)
))
(traced '()
(test-term-false
(rust:can-prove-where-clause-in-program
Rust/Program
(∀ [(type A)]
where []
in contrast to previous test , the ` NoRef ` struct does not
(for[(lifetime b)] ((NoRef < A b >) : b))
)
)
))
)
)
|
fee5dd349e94e0dde694da8c30a7cf772f3e9ce5ed542303f2d7e7c307f81b2c | sKabYY/palestra | p68.scm | (load "stream.scm")
; Don't run this file!!!
(define (wrong-pairs s t)
(interleave
(stream-map-n (lambda (x) (list (stream-car s) x)) t)
(wrong-pairs (stream-cdr s) (stream-cdr t))))
;(define hehe (wrong-pairs integers integers))
;(stream-for-n println hehe 10)
(display (stream-car
(wrong-pairs integers integers)
))(newline)
| null | https://raw.githubusercontent.com/sKabYY/palestra/0906cc3a1fb786093a388d5ae7d59120f5aae16c/old1/sicp/3/p68.scm | scheme | Don't run this file!!!
(define hehe (wrong-pairs integers integers))
(stream-for-n println hehe 10) | (load "stream.scm")
(define (wrong-pairs s t)
(interleave
(stream-map-n (lambda (x) (list (stream-car s) x)) t)
(wrong-pairs (stream-cdr s) (stream-cdr t))))
(display (stream-car
(wrong-pairs integers integers)
))(newline)
|
e002d82651e9e1a5af85311d7c0681f286cb3e21d431075194beea685fc86366 | sealchain-project/sealchain | Swagger.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Cardano.Wallet.API.V1.Swagger where
import Universum hiding (get, put)
import Cardano.Wallet.API.Indices (ParamNames)
import Cardano.Wallet.API.Request.Filter
import Cardano.Wallet.API.Request.Pagination
import Cardano.Wallet.API.Request.Sort
import Cardano.Wallet.API.Response
import Cardano.Wallet.API.V1.Generic (gconsName)
import Cardano.Wallet.API.V1.Parameters
import Cardano.Wallet.API.V1.Swagger.Example
import Cardano.Wallet.API.V1.Types
import Cardano.Wallet.TypeLits (KnownSymbols (..))
import Pos.Chain.Update (SoftwareVersion (svNumber))
import Pos.Core.NetworkMagic (NetworkMagic (..))
import Pos.Util.CompileInfo (CompileTimeInfo, ctiGitRevision)
import Pos.Util.Servant (LoggingApi)
import Control.Lens (At, Index, IxValue, at, (?~))
import Data.Aeson (encode)
import Data.Aeson.Encode.Pretty
import Data.Map (Map)
import Data.Swagger hiding (Example)
import Data.Typeable
import Formatting (build, sformat)
import NeatInterpolation
import Servant (Handler, ServantErr (..), Server, StdMethod (..))
import Servant.API.Sub
import Servant.Swagger
import Servant.Swagger.UI (SwaggerSchemaUI')
import Servant.Swagger.UI.Core (swaggerSchemaUIServerImpl)
import Servant.Swagger.UI.ReDoc (redocFiles)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Pos.Core as Core
import qualified Pos.Core.Attributes as Core
import qualified Pos.Crypto.Hashing as Crypto
--
-- Helper functions
--
-- | Surround a Text with another
surroundedBy :: Text -> Text -> Text
surroundedBy wrap context = wrap <> context <> wrap
-- | Display a multi-line code-block inline (e.g. in tables)
inlineCodeBlock :: Text -> Text
inlineCodeBlock txt = "<pre>" <> replaceNewLines (replaceWhiteSpaces txt) <> "</pre>"
where
replaceNewLines = T.replace "\n" "<br/>"
replaceWhiteSpaces = T.replace " " " "
-- | Drill in the 'Swagger' file in an unsafe way to modify a specific operation
-- identified by a tuple (verb, path). The function looks a bit scary to use
-- but is actually rather simple (see example below).
--
-- Note that if the identified path doesn't exist, the function will throw
-- at runtime when trying to read the underlying swagger structure!
--
-- Example:
--
-- swagger
& paths % ~ ( POST , " /api / v1 / wallets " ) ` alterOperation ` ( description ? ~ " foo " )
& paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` alterOperation ` ( description ? ~ " bar " )
--
alterOperation ::
( IxValue m ~ item
, Index m ~ FilePath
, At m
, HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> (StdMethod, FilePath)
-> (Operation -> Operation)
-> m
-> m
alterOperation (verb, path) alter =
at path %~ (Just . unsafeAlterItem)
where
errUnreachableEndpoint :: Text
errUnreachableEndpoint =
"Unreachable endpoint: " <> show verb <> " " <> show path
errUnsupportedVerb :: Text
errUnsupportedVerb =
"Used unsupported verb to identify an endpoint: " <> show verb
unsafeAlterItem ::
( HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> Maybe item
-> item
unsafeAlterItem = maybe
(error errUnreachableEndpoint)
(unsafeLensFor verb %~ (Just . unsafeAlterOperation))
unsafeAlterOperation :: Maybe Operation -> Operation
unsafeAlterOperation = maybe
(error errUnreachableEndpoint)
alter
unsafeLensFor ::
( Functor f
, HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> StdMethod
-> (Maybe Operation -> f (Maybe Operation))
-> item
-> f item
unsafeLensFor = \case
GET -> get
PUT -> put
PATCH -> patch
POST -> post
DELETE -> delete
_ -> error errUnsupportedVerb
-- | A combinator to modify the description of an operation, using
-- 'alterOperation' under the hood.
--
--
-- Example:
--
-- swagger
& paths % ~ ( POST , " /api / v1 / wallets " ) ` setDescription ` " foo "
& paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` setDescription ` " bar "
setDescription
:: (IxValue m ~ PathItem, Index m ~ FilePath, At m)
=> (StdMethod, FilePath)
-> Text
-> m
-> m
setDescription endpoint str =
endpoint `alterOperation` (description ?~ str)
--
-- Instances
--
instance HasSwagger a => HasSwagger (LoggingApi config a) where
toSwagger _ = toSwagger (Proxy @a)
instance
( Typeable res
, KnownSymbols syms
, HasSwagger subApi
, syms ~ ParamNames res params
) => HasSwagger (FilterBy params res :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @subApi)
allOps = map toText $ symbolVals (Proxy @syms)
in swgr & over (operationsOf swgr . parameters) (addFilterOperations allOps)
where
addFilterOperations :: [Text] -> [Referenced Param] -> [Referenced Param]
addFilterOperations ops xs = map (Inline . newParam) ops <> xs
newParam :: Text -> Param
newParam opName =
let typeOfRes = fromString $ show $ typeRep (Proxy @ res)
in Param {
_paramName = opName
, _paramRequired = Nothing
, _paramDescription = Just $ filterDescription typeOfRes
, _paramSchema = ParamOther ParamOtherSchema {
_paramOtherSchemaIn = ParamQuery
, _paramOtherSchemaAllowEmptyValue = Nothing
, _paramOtherSchemaParamSchema = mempty
}
}
filterDescription :: Text -> Text
filterDescription typeOfRes = mconcat
[ "A **FILTER** operation on a " <> typeOfRes <> ". "
, "Filters support a variety of queries on the resource. "
, "These are: \n\n"
, "- `EQ[value]` : only allow values equal to `value`\n"
, "- `LT[value]` : allow resource with attribute less than the `value`\n"
, "- `GT[value]` : allow objects with an attribute greater than the `value`\n"
, "- `GTE[value]` : allow objects with an attribute at least the `value`\n"
, "- `LTE[value]` : allow objects with an attribute at most the `value`\n"
, "- `RANGE[lo,hi]` : allow objects with the attribute in the range between `lo` and `hi`\n"
, "- `IN[a,b,c,d]` : allow objects with the attribute belonging to one provided.\n\n"
]
instance
( Typeable res
, KnownSymbols syms
, syms ~ ParamNames res params
, HasSwagger subApi
) => HasSwagger (SortBy params res :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @subApi)
in swgr & over (operationsOf swgr . parameters) addSortOperation
where
addSortOperation :: [Referenced Param] -> [Referenced Param]
addSortOperation xs = Inline newParam : xs
newParam :: Param
newParam =
let typeOfRes = fromString $ show $ typeRep (Proxy @ res)
allowedKeys = T.intercalate "," (map toText $ symbolVals (Proxy @syms))
in Param {
_paramName = "sort_by"
, _paramRequired = Just False
, _paramDescription = Just (sortDescription typeOfRes allowedKeys)
, _paramSchema = ParamOther ParamOtherSchema {
_paramOtherSchemaIn = ParamQuery
, _paramOtherSchemaAllowEmptyValue = Just True
, _paramOtherSchemaParamSchema = mempty
}
}
instance (HasSwagger subApi) => HasSwagger (WalletRequestParams :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @(WithWalletRequestParams subApi))
in swgr & over (operationsOf swgr . parameters) (map toDescription)
where
toDescription :: Referenced Param -> Referenced Param
toDescription (Inline p@(_paramName -> pName)) =
case M.lookup pName requestParameterToDescription of
Nothing -> Inline p
Just d -> Inline (p & description .~ Just d)
toDescription x = x
instance ToParamSchema WalletId
instance ToSchema Core.Address where
declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions
instance ToParamSchema Core.Address where
toParamSchema _ = mempty
& type_ .~ SwaggerString
instance ToParamSchema (V1 Core.Address) where
toParamSchema _ = toParamSchema (Proxy @Core.Address)
--
Descriptions
--
customQueryFlagToDescription :: Map T.Text T.Text
customQueryFlagToDescription = M.fromList [
("force_ntp_check", forceNtpCheckDescription)
]
requestParameterToDescription :: Map T.Text T.Text
requestParameterToDescription = M.fromList [
("page", pageDescription)
, ("per_page", perPageDescription (fromString $ show maxPerPageEntries) (fromString $ show defaultPerPageEntries))
]
forceNtpCheckDescription :: T.Text
forceNtpCheckDescription = [text|
In some cases, API Clients need to force a new NTP check as a previous result gets cached. A typical use-case is after asking a user to fix its system clock. If this flag is set, request will block until NTP server responds or it will timeout if NTP server is not available within a short delay.
|]
pageDescription :: T.Text
pageDescription = [text|
The page number to fetch for this request. The minimum is **1**. If nothing is specified, **this value defaults to 1** and always shows the first entries in the requested collection.
|]
perPageDescription :: T.Text -> T.Text -> T.Text
perPageDescription maxValue defaultValue = [text|
The number of entries to display for each page. The minimum is **1**, whereas the maximum is **$maxValue**. If nothing is specified, **this value defaults to $defaultValue**.
|]
sortDescription :: Text -> Text -> Text
sortDescription resource allowedKeys = [text|
A **SORT** operation on this $resource. Allowed keys: `$allowedKeys`.
|]
errorsDescription :: Text
errorsDescription = [text|
Error Name / Description | HTTP Error code | Example
-------------------------|-----------------|---------
$errors
|] where
errors = T.intercalate "\n" rows
rows =
-- 'WalletError'
[ mkRow fmtErr $ NotEnoughMoney (ErrAvailableBalanceIsInsufficient 1400)
, mkRow fmtErr $ OutputIsRedeem sampleAddress
, mkRow fmtErr $ UnknownError "Unexpected internal error."
, mkRow fmtErr $ InvalidAddressFormat "Provided address format is not valid."
, mkRow fmtErr WalletNotFound
, mkRow fmtErr $ WalletAlreadyExists exampleWalletId
, mkRow fmtErr AddressNotFound
, mkRow fmtErr $ InvalidPublicKey "Extended public key (for external wallet) is invalid."
, mkRow fmtErr UnsignedTxCreationError
, mkRow fmtErr $ SignedTxSubmitError "Unable to submit externally-signed transaction."
, mkRow fmtErr TooBigTransaction
, mkRow fmtErr TxFailedToStabilize
, mkRow fmtErr TxRedemptionDepleted
, mkRow fmtErr $ TxSafeSignerNotFound sampleAddress
, mkRow fmtErr $ MissingRequiredParams (("wallet_id", "walletId") :| [])
, mkRow fmtErr $ WalletIsNotReadyToProcessPayments genExample
, mkRow fmtErr $ NodeIsStillSyncing genExample
, mkRow fmtErr $ CannotCreateAddress "Cannot create derivation path for new address in external wallet."
, mkRow fmtErr $ RequestThrottled 42
-- 'JSONValidationError'
, mkRow fmtErr $ JSONValidationFailed "Expected String, found Null."
-- 'UnsupportedMimeTypeError'
, mkRow fmtErr $ UnsupportedMimeTypePresent "Expected Content-Type's main MIME-type to be 'application/json'."
, mkRow fmtErr $ UtxoNotEnoughFragmented (ErrUtxoNotEnoughFragmented 1 msgUtxoNotEnoughFragmented)
-- TODO 'MnemonicError' ?
]
mkRow fmt err = T.intercalate "|" (fmt err)
fmtErr err =
[ surroundedBy "`" (gconsName err) <> "<br/>" <> toText (sformat build err)
, show $ errHTTPCode $ toServantError err
, inlineCodeBlock (T.decodeUtf8 $ BL.toStrict $ encodePretty err)
]
sampleAddress = V1 Core.Address
{ Core.addrRoot =
Crypto.unsafeAbstractHash ("asdfasdf" :: String)
, Core.addrAttributes =
Core.mkAttributes $ Core.AddrAttributes Nothing Core.BootstrapEraDistr NetworkMainOrStage
, Core.addrType =
Core.ATPubKey
}
| Shorter version of the doc below , only for Dev & V0 documentations
highLevelShortDescription :: DescriptionEnvironment -> T.Text
highLevelShortDescription DescriptionEnvironment{..} = [text|
This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).
Protocol Version | Git Revision
-------------------|-------------------
$deSoftwareVersion | $deGitRevision
|]
-- | Provide additional insights on V1 documentation
highLevelDescription :: DescriptionEnvironment -> T.Text
highLevelDescription DescriptionEnvironment{..} = [text|
This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).
Protocol Version | Git Revision
-------------------|-------------------
$deSoftwareVersion | $deGitRevision
Getting Started
===============
In the following examples, we will use *curl* to illustrate request to an API running on the default port **8090**.
Please note that wallet web API uses TLS for secure communication. Requests to the API need to
send a client CA certificate that was used when launching the node and identifies the client as
being permitted to invoke the server API.
Creating a New Wallet
---------------------
You can create your first wallet using the [`POST /api/v1/wallets`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets%2Fpost) endpoint as follow:
```
curl -X POST :8090/api/v1/wallets \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
--cert ./scripts/tls-files/client.pem \
--cacert ./scripts/tls-files/ca.crt \
-d '{
"operation": "create",
"backupPhrase": $deMnemonicExample,
"assuranceLevel": "normal",
"name": "MyFirstWallet",
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
> **Warning**: Those 12 mnemonic words given for the backup phrase act as an example. **Do
> not** use them on a production system. See the section below about mnemonic codes for more
> information.
The `spendingPassword` is optional but highly recommended. It a string of 32
characters, encoded in base 16, yielding to an hexadecimal sequence of 64 bytes.
This passphrase is required for sensitive operations on the wallet and adds
an extra security layer to it.
To generate a valid `spendingPassword`, please follow the following steps:
- Pick a long sentence using a wide variety of characters (uppercase, lowercase,
whitespace, punctuation, etc). Using a computer to randomly generate
a passphrase is best, as humans aren't a good source of randomness.
- Compute an appropriate hash of this passphrase. You'll need to use an
algorithm that yields a 32-byte long string (e.g. *SHA256* or *BLAKE2b*).
- Hex-encode the 32-byte hash into a 64-byte sequence of bytes.
As a response, the API provides you with a unique wallet `id` to be used in subsequent
requests. Make sure to store it / write it down. Note that every API response is
[jsend-compliant](); Sealchain also augments responses with
meta-data specific to pagination. More details in the section below about [Pagination](#section/Pagination)
```json
$createWallet
```
You have just created your first wallet. Information about this wallet can be retrieved using the [`GET /api/v1/wallets/{walletId}`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets~1{walletId}%2Fget)
endpoint as follows:
```
curl -X GET :8090/api/v1/wallets/{{walletId}} \
-H "Accept: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
Receiving SEAL (or GD)
-------------
To receive _SEAL_ (or GD) from other users you should provide your address. This address can be obtained
from an account. Each wallet contains at least one account. An account is like a pocket inside
of your wallet. Vew all existing accounts of a wallet by using the [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)
endpoint:
```
curl -X GET :8090/api/v1/wallets/{{walletId}}/accounts?page=1&per_page=10 \
-H "Accept: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
Since you have, for now, only a single wallet, you'll see something like this:
```json
$readAccounts
```
All the wallet's accounts are listed under the `addresses` field. You can communicate one of
these addresses to receive _SEAL_(or GD) on the associated account.
Sending SEAL(or GD)
-----------
In order to send _SEAL_(or GD) from one of your accounts to another address, you must create a new
payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1payment%2Fpost)
endpoint as follows:
```
curl -X POST :8090/api/v1/transactions/payment \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"destinations": [{
"amount": {
"coins": 100000000,
"gds": 100
}
"address": "A7k5bz1QR2...Tx561NNmfF"
}],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Note that, in order to perform a transaction, you need to have enough existing _SEAL_(or GD) on the
source account! The Sealchain API is designed to accomodate multiple recipients payments
out-of-the-box; notice how `destinations` is a list of addresses (and corresponding amounts).
When the transaction succeeds, funds are no longer available in the sources addresses, and are
soon made available to the destinations within a short delay. Note that, you can at any time see
the status of your wallets by using the [`GET /api/v1/transactions/payment`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)
endpoint as follows:
```
curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwUPE...8V3AVTnqGZ\
-H "Accept: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
Here we constrained the request to a specific account. After our previous transaction the output
should look roughly similar to this:
```json
$readTransactions
```
In addition, and because it is not possible to _preview_ a transaction, one can lookup a
transaction's fees using the [`POST /api/v1/transactions/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)
endpoint to get an estimation of those fees.
See [Estimating Transaction Fees](#section/Common-Use-Cases/Estimating-Transaction-Fees) for more details.
Issue GD
-----------
To increase or decrease GD total supply, The issuer (the GD operator) can create a new
payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1issurance%2Fpost)
endpoint as follows:
```
curl -X POST :8090/api/v1/transactions/issurance \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"info": {
"increment": 10000000,
"proof": "692068617665206120746f6e206f6620676f6c647320696e204a50204d6f7267616e2e" -- proof in hex
},
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Pagination
==========
**All GET requests of the API are paginated by default**. Whilst this can be a source of
surprise, is the best way of ensuring the performance of GET requests is not affected by the
size of the data storage.
Version `V1` introduced a different way of requesting information to the API. In particular,
GET requests which returns a _collection_ (i.e. typically a JSON array of resources) lists
extra parameters which can be used to modify the shape of the response. In particular, those
are:
* `page`: (Default value: **1**).
* `per_page`: (Default value: **$deDefaultPerPage**)
For a more accurate description, see the section `Parameters` of each GET request, but as a
brief overview the first two control how many results and which results to access in a
paginated request.
Filtering and Sorting
=====================
`GET` endpoints which list collection of resources supports filters & sort operations, which
are clearly marked in the swagger docs with the `FILTER` or `SORT` labels. The query format is
quite simple, and it goes this way:
Filter Operators
----------------
| Operator | Description | Example |
| - | If **no operator** is passed, this is equivalent to `EQ` (see below). | `balance=10` |
| `EQ` | Retrieves the resources with index _equal_ to the one provided. | `balance=EQ[10]` |
| `LT` | Retrieves the resources with index _less than_ the one provided. | `balance=LT[10]` |
| `LTE` | Retrieves the resources with index _less than equal_ the one provided. | `balance=LTE[10]` |
| `GT` | Retrieves the resources with index _greater than_ the one provided. | `balance=GT[10]` |
| `GTE` | Retrieves the resources with index _greater than equal_ the one provided. | `balance=GTE[10]` |
| `RANGE` | Retrieves the resources with index _within the inclusive range_ [k,k]. | `balance=RANGE[10,20]` |
Sort Operators
--------------
| Operator | Description | Example |
| `ASC` | Sorts the resources with the given index in _ascending_ order. | `sort_by=ASC[balance]` |
| `DES` | Sorts the resources with the given index in _descending_ order. | `sort_by=DES[balance]` |
| - | If **no operator** is passed, this is equivalent to `DES` (see above). | `sort_by=balance` |
Errors
======
In case a request cannot be served by the API, a non-2xx HTTP response will be issued, together
with a [JSend-compliant]() JSON Object describing the error
in detail together with a numeric error code which can be used by API consumers to implement
proper error handling in their application. For example, here's a typical error which might be
issued:
``` json
$deErrorExample
```
Existing Wallet Errors
----------------------
$deWalletErrorTable
Monetary Denomination & Units
=============================
Sealchain's platform currency is called _SEAL_. _SEAL_ has up to **8** decimal places; hence the
smallest monetary unit that can be represented in the Seaichain's blockhain is: 0.00000001.
Sealchain originaly includes stablecoin called GD (GoldDollar), GD has up to **2** decimal places.
> **Warning**: All amounts manipulated in the API are given and expected in smallest monetary unit.
Mnemonic Codes
==============
The full list of accepted mnemonic codes to secure a wallet is defined by the [BIP-39
specifications](-0039.mediawiki). Note that
picking up 12 random words from the list **is not enough** and leads to poor security. Make
sure to carefully follow the steps described in the protocol when you generate words for a new
wallet.
Versioning & Legacy
===================
The API is **versioned**, meaning that is possible to access different versions of the API by adding the _version number_ in the URL.
**For the sake of backward compatibility, we expose the legacy version of the API, available simply as unversioned endpoints.**
This means that _omitting_ the version number would call the old version of the API. Deprecated
endpoints are currently grouped under an appropriate section; they would be removed in upcoming
released, if you're starting a new integration with Sealchain, please ignore these.
Note that Compatibility between major versions is not _guaranteed_, i.e. the request & response formats might differ.
Disable TLS (Not Recommended)
-----------------------------
If needed, you can disable TLS by providing the `--no-tls` flag to the wallet or by running a wallet in debug mode with `--wallet-debug` turned on.
Common Use-Cases
================
Sending Money to Multiple Recipients
------------------------------------
As seen in [Sending SEAL](#section/Getting-Started/Sending-SEAL), you can send _SEAL_ to
another party using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fpost) endpoint.
Important to notice is the type of the field `destinations`: it's a list, enabling you to provide more
than one destination. Each destination is composed of:
- An address
- A corresponding amount
The overall transaction corresponds to the sum of each outputs. For instance, to send money to
two parties simultaneously:
```
curl -X POST :8090/api/v1/transactions \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"destinations": [
{
"amount": 14,
"address": "A7k5bz1QR2...Tx561NNmfF"
},
{
"amount": 42,
"address": "B56n78WKE8...jXAa34NUFz"
}
],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
About UTXO Fragmentation
------------------------
As described in [Sending Money to Multiple Recipients](#section/Common-Use-Cases/Sending-Money-to-Multiple-Recipients), it is possible to send ada to more than one destination. Sealchain only allows a given UTXO to cover at most one single transaction output. As a result,
when the number of transaction outputs is greater than the number the API returns a `UtxoNotEnoughFragmented` error which
looks like the following
```
{
"status": "error",
"diagnostic": {
"details": {
"help": "Utxo is not enough fragmented to handle the number of outputs of this transaction. Query /api/v1/wallets/{walletId}/statistics/utxos endpoint for more information",
"missingUtxos": 1
}
},
"message": "UtxoNotEnoughFragmented"
}
```
To make sure the source account has a sufficient level of UTXO fragmentation (i.e. number of UTXOs),
please monitor the state of the UTXOs as described in [Getting UTXO Statistics](#section/Common-Use-Cases/Getting-Utxo-Statistics). The
number of wallet UTXOs should be no less than the transaction outputs, and the sum of all UTXOs should be enough to cover the total
transaction amount, including fees.
Contrary to a classic accounting model, there's no such thing as spending _part of a UTXO_, and one has to wait for a transaction to be included in a
block before spending the remaining change. This is very similar to using bank notes: one can't spend a USD 20 bill at two different shops at the same time,
even if it is enough to cover both purchases — one has to wait for change from the first transaction before making the second one.
There's no "ideal" level of fragmentation; it depends on one's needs. However, the more UTXOs that are available, the higher the concurrency capacity
of one's wallet, allowing multiple transactions to be made at the same time.
Similarly, there's no practical maximum number of UTXOs, but there is nevertheless a maximum transaction size. By having many small UTXOs,
one is taking the risk of hitting that restriction, should too many inputs be selected to fill a transaction. The only way to
work around this is to make multiple smaller transactions.
Estimating Transaction Fees
---------------------------
When you submit a transaction to the network, some fees apply depending on, but not only, the
selected grouping policy and the available inputs on the source wallet. There's actually a
trade-off between fees, cryptographic security, throughput and privacy. The more inputs are
selected, the bigger is the payload, the bigger are the fees.
The API lets you estimate fees for a given transaction via the [`POST /api/v1/transaction/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)
endpoint. The request payload is identical to the one you would make to create a transaction:
```
curl -X POST :8090/api/v1/transactions/fees \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"destinations": [{
"amount": 14,
"address": "A7k5bz1QR2...Tx561NNmfF"
}],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
}
}'
```
The API resolves with an estimated amount in _SEAL_. This estimation highly depends on the
current state of the ledger and diverges with time.
```json
$readFees
```
Managing Accounts
-----------------
A wallet isn't limited to one account. It can actually be useful to have more than one account
in order to separate business activities. With the API, you can retrieve a specific account,
create new ones, list all existing accounts of a wallet or edit a few things on an existing
account. By default, your wallet comes with a provided account. Let's see how to create a fresh
new account on a wallet using [`POST /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fpost):
```
curl -X POST \
:8090/api/v1/Ae2tdPwUPE...8V3AVTnqGZ/accounts \
-H 'Content-Type: application/json;charset=utf-8' \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"name": "MyOtherAccount",
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Note that the `spendingPassword` here should match the one provided earlier in [Creating a
New Wallet](#section/Getting-Started/Creating-a-New-Wallet).
```json
$createAccount
```
You can always retrieve this account description later if needed via [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts~1{accountId}%2Fget).
For example:
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts/2902829384 \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
For a broader view, the full list of accounts of a given wallet can be retrieved using [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
```json
$readAccounts
```
Partial Representations
-----------------------
The previous endpoint gives you a list of full representations. However, in some cases, it might be interesting to retrieve only a partial representation of an account (e.g. only the balance). There are two extra endpoints one could use to either fetch a given account's balance, and another to retrieve the list of addresses associated to a specific account.
[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/addresses`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1addresses%2Fget)
```json
$readAccountAddresses
```
Note that this endpoint is paginated and allow basic filtering and sorting on
addresses. Similarly, you can retrieve only the account balance with:
[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/amount`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1amount%2Fget)
```json
$readAccountBalance
```
Managing Addresses
------------------
By default, wallets you create are provided with an account which has one default address. It
is possible (and recommended) for an account to manage multiple addresses. Address reuse
actually reduces privacy for it tights more transactions to a small set of addresses.
When paying, the wallet makes many of these choices for you. Addresses are
selected from a wallet's account based on several different strategies and
policies.
To create a new address, use the [`POST /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fpost)
endpoint:
```
curl -X POST \
:8090/api/v1/addresses \
-H 'Content-Type: application/json;charset=utf-8' \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '{
"walletId": "Ae2tdPwUPE...V3AVTnqGZ4",
"accountIndex": 2147483648
}'
```
```json
$createAddress
```
If your wallet is protected with a password, this password is also required in order to create
new addresses for that wallet. In such case, the field `spendingPassword` should match the one
defined earlier to protect your wallet.
Addresses generated as just described are always valid. When the API encounters
an invalid address however (e.g. when provided by another party), it will fail with a
client error.
You can always view all your available addresses across all your wallets by using
[`GET /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fget):
```
curl -X GET :8090/api/v1/addresses \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
```json
$readAddresses
```
Checking Synchronization Progress
---------------------------------
You can control the synchronization progress of the underlying node hosting the wallet's server
via [`GET /api/v1/node-info`](#tag/Info%2Fpaths%2F~1api~1v1~1node-info%2Fget). The output is
rather verbose and gives real-time progress updates about the current node.
```
curl -X GET :8090/api/v1/node-info \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
```json
$readNodeInfo
```
Retrieving Transaction History
------------------------------
If needed, applications may regularly poll the wallet's backend to retrieve the history of
transactions of a given wallet. Using the [`GET /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)
endpoint, you can view the status of all transactions that ever sent or took money from the
wallet.
The following table sums up the available filters (also detailed in the endpoint documentation details):
Filter On | Corresponding Query Parameter(s)
----------------------------| ------------------------------
Wallet | `wallet_id`
Wallet's account | `account_index` + `wallet_id`
Address | `address`
Transaction's creation time | `created_at`
Transaction's id | `id`
For example, in order to retrieve the last 50 transactions of a particular account,
ordered by descending date:
```
curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&account_index=2902829384&sort_by=DES\[created_at\]&per_page=50' \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
For example, in order to retrieve the last 50 transactions, ordered by descending date:
```
curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ &sort_by=DES\[created_at\]&per_page=50' \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
Another example, if you were to look for all transactions made since the 1st of January 2018:
```
curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&created_at=GT\[2018-01-01T00:00:00.00000\]' \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
Getting Utxo statistics
---------------------------------
You can get Utxo statistics of a given wallet using
[`GET /api/v1/wallets/{{walletId}}/statistics/utxos`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1statistics~1utxos%2Fget)
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/statistics/utxos \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem
```
```json
$readUtxoStatistics
```
Make sure to carefully read the section about [Pagination](#section/Pagination) to fully
leverage the API capabilities.
Importing (Unused) Addresses From a Previous Node (or Version)
--------------------------------------------------------------
When restoring a wallet, only the information available on the blockchain can
be retrieved. Some pieces of information aren't stored on
the blockchain and are only defined as _Metadata_ of the wallet backend. This
includes:
- The wallet's name
- The wallet's assurance level
- The wallet's spending password
- The wallet's unused addresses
Unused addresses are not recorded on the blockchain and, in the case of random
derivation, it is unlikely that the same addresses will be generated on two
different node instances. However, some API users may wish to preserve unused
addresses between different instances of the wallet backend.
To enable this, the wallet backend provides an endpoint ([`POST /api/v1/wallets/{{walletId}}/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1addresses%2Fpost))
to import a list of addresses into a given account. Note that this endpoint is
quite lenient when it comes to errors: it tries to import all provided addresses
one by one, and ignores any that can't be imported for whatever reason. The
server will respond with the total number of successes and, if any, a list of
addresses that failed to be imported. Trying to import an address that is already
present will behave as a no-op.
For example:
```
curl -X POST \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/addresses \
-H 'Accept: application/json;charset=utf-8' \
--cacert ./scripts/tls-files/ca.crt \
--cert ./scripts/tls-files/client.pem \
-d '[
"Ae2tdPwUPE...8V3AVTnqGZ",
"Ae2odDwvbA...b6V104CTV8"
]'
```
> **IMPORTANT**: This feature is experimental and performance is
> not guaranteed. Users are advised to import small batches only.
|]
where
createAccount = decodeUtf8 $ encodePretty $ genExample @(APIResponse Account)
createAddress = decodeUtf8 $ encodePretty $ genExample @(APIResponse WalletAddress)
createWallet = decodeUtf8 $ encodePretty $ genExample @(APIResponse Wallet)
readAccounts = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Account])
readAccountBalance = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountBalance)
readAccountAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountAddresses)
readAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Address])
readFees = decodeUtf8 $ encodePretty $ genExample @(APIResponse EstimatedFees)
readNodeInfo = decodeUtf8 $ encodePretty $ genExample @(APIResponse NodeInfo)
readTransactions = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Transaction])
readUtxoStatistics = decodeUtf8 $ encodePretty $ genExample @(APIResponse UtxoStatistics)
-- | Provide an alternative UI (ReDoc) for rendering Swagger documentation.
swaggerSchemaUIServer
:: (Server api ~ Handler Swagger)
=> Swagger -> Server (SwaggerSchemaUI' dir api)
swaggerSchemaUIServer =
swaggerSchemaUIServerImpl redocIndexTemplate redocFiles
where
redocIndexTemplate :: Text
redocIndexTemplate = [text|
<!doctype html>
<html lang="en">
<head>
<title>ReDoc</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body { margin: 0; padding: 0; }
</style>
<script>
// Force Strict-URL Routing for assets relative paths
(function onload() {
if (!window.location.pathname.endsWith("/")) {
window.location.pathname += "/";
}
}());
</script>
</head>
<body>
<redoc spec-url="../SERVANT_SWAGGER_UI_SCHEMA"></redoc>
<script src="redoc.min.js"> </script>
</body>
</html>|]
applyUpdateDescription :: Text
applyUpdateDescription = [text|
Apply the next available update proposal from the blockchain. Note that this
will immediately shutdown the node and makes it unavailable for a short while.
|]
postponeUpdateDescription :: Text
postponeUpdateDescription = [text|
Discard the next available update from the node's local state. Yet, this doesn't
reject the update which will still be applied as soon as the node is restarted.
|]
resetWalletStateDescription :: Text
resetWalletStateDescription = [text|
Wipe-out the node's local state entirely. The only intended use-case for this
endpoint is during API integration testing. Note also that this will fail by
default unless the node is running in debug mode.
|]
estimateFeesDescription :: Text
estimateFeesDescription = [text|
Estimate the fees which would incur from the input payment. This endpoint
**does not** require a _spending password_ to be supplied as it generates
under the hood an unsigned transaction.
|]
getAddressDescription :: Text
getAddressDescription = [text|
The previous version of this endpoint failed with an HTTP error when the given
address was unknown to the wallet.
This was misleading since an address that is unknown to the wallet may still
belong to the wallet (since it could be part of a pending transaction in
another instance of the same wallet).
To reflect this, the V1 endpoint does not fail when an address is not recognised
and returns a new field which indicates the address' ownership status, from the
node point of view.
|]
--
-- The API
--
data DescriptionEnvironment = DescriptionEnvironment
{ deErrorExample :: !T.Text
, deDefaultPerPage :: !T.Text
, deWalletErrorTable :: !T.Text
, deGitRevision :: !T.Text
, deSoftwareVersion :: !T.Text
, deMnemonicExample :: !T.Text
}
api :: HasSwagger a
=> (CompileTimeInfo, SoftwareVersion)
-> Proxy a
-> (DescriptionEnvironment -> T.Text)
-> Swagger
api (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI
& info.title .~ "Sealchain Wallet API"
& info.version .~ fromString (show curSoftwareVersion)
& host ?~ "127.0.0.1:8090"
& info.description ?~ mkDescription DescriptionEnvironment
{ deErrorExample = decodeUtf8 $ encodePretty WalletNotFound
, deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase)
, deDefaultPerPage = fromString (show defaultPerPageEntries)
, deWalletErrorTable = errorsDescription
, deGitRevision = ctiGitRevision compileInfo
, deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion)
}
& info.license ?~ ("MIT" & url ?~ URL "-project/sealchain/develop/LICENSE")
& paths %~ (POST, "/api/internal/apply-update") `setDescription` applyUpdateDescription
& paths %~ (POST, "/api/internal/postpone-update") `setDescription` postponeUpdateDescription
& paths %~ (DELETE, "/api/internal/reset-wallet-state") `setDescription` resetWalletStateDescription
& paths %~ (POST, "/api/v1/transactions/fees") `setDescription` estimateFeesDescription
& paths %~ (GET, "/api/v1/addresses/{address}") `setDescription` getAddressDescription
| null | https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/wallet/src/Cardano/Wallet/API/V1/Swagger.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE QuasiQuotes #
# LANGUAGE RankNTypes #
# LANGUAGE TypeFamilies #
Helper functions
| Surround a Text with another
| Display a multi-line code-block inline (e.g. in tables)
| Drill in the 'Swagger' file in an unsafe way to modify a specific operation
identified by a tuple (verb, path). The function looks a bit scary to use
but is actually rather simple (see example below).
Note that if the identified path doesn't exist, the function will throw
at runtime when trying to read the underlying swagger structure!
Example:
swagger
| A combinator to modify the description of an operation, using
'alterOperation' under the hood.
Example:
swagger
Instances
-----------------------|-----------------|---------
'WalletError'
'JSONValidationError'
'UnsupportedMimeTypeError'
TODO 'MnemonicError' ?
-----------------|-------------------
| Provide additional insights on V1 documentation
-----------------|-------------------
-------------------
cert ./scripts/tls-files/client.pem \
cacert ./scripts/tls-files/ca.crt \
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
-----------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
---------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
---------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
proof in hex
--------------
------------
--------------------
---------------------------
no-tls` flag to the wallet or by running a wallet in debug mode with `--wallet-debug` turned on.
----------------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
----------------------
-------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
---------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
---------------------
----------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
-------------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
----------------------------
--------------------------| ------------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
-------------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem
------------------------------------------------------------
cacert ./scripts/tls-files/ca.crt \
cert ./scripts/tls-files/client.pem \
| Provide an alternative UI (ReDoc) for rendering Swagger documentation.
The API
| # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Cardano.Wallet.API.V1.Swagger where
import Universum hiding (get, put)
import Cardano.Wallet.API.Indices (ParamNames)
import Cardano.Wallet.API.Request.Filter
import Cardano.Wallet.API.Request.Pagination
import Cardano.Wallet.API.Request.Sort
import Cardano.Wallet.API.Response
import Cardano.Wallet.API.V1.Generic (gconsName)
import Cardano.Wallet.API.V1.Parameters
import Cardano.Wallet.API.V1.Swagger.Example
import Cardano.Wallet.API.V1.Types
import Cardano.Wallet.TypeLits (KnownSymbols (..))
import Pos.Chain.Update (SoftwareVersion (svNumber))
import Pos.Core.NetworkMagic (NetworkMagic (..))
import Pos.Util.CompileInfo (CompileTimeInfo, ctiGitRevision)
import Pos.Util.Servant (LoggingApi)
import Control.Lens (At, Index, IxValue, at, (?~))
import Data.Aeson (encode)
import Data.Aeson.Encode.Pretty
import Data.Map (Map)
import Data.Swagger hiding (Example)
import Data.Typeable
import Formatting (build, sformat)
import NeatInterpolation
import Servant (Handler, ServantErr (..), Server, StdMethod (..))
import Servant.API.Sub
import Servant.Swagger
import Servant.Swagger.UI (SwaggerSchemaUI')
import Servant.Swagger.UI.Core (swaggerSchemaUIServerImpl)
import Servant.Swagger.UI.ReDoc (redocFiles)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Pos.Core as Core
import qualified Pos.Core.Attributes as Core
import qualified Pos.Crypto.Hashing as Crypto
surroundedBy :: Text -> Text -> Text
surroundedBy wrap context = wrap <> context <> wrap
inlineCodeBlock :: Text -> Text
inlineCodeBlock txt = "<pre>" <> replaceNewLines (replaceWhiteSpaces txt) <> "</pre>"
where
replaceNewLines = T.replace "\n" "<br/>"
replaceWhiteSpaces = T.replace " " " "
& paths % ~ ( POST , " /api / v1 / wallets " ) ` alterOperation ` ( description ? ~ " foo " )
& paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` alterOperation ` ( description ? ~ " bar " )
alterOperation ::
( IxValue m ~ item
, Index m ~ FilePath
, At m
, HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> (StdMethod, FilePath)
-> (Operation -> Operation)
-> m
-> m
alterOperation (verb, path) alter =
at path %~ (Just . unsafeAlterItem)
where
errUnreachableEndpoint :: Text
errUnreachableEndpoint =
"Unreachable endpoint: " <> show verb <> " " <> show path
errUnsupportedVerb :: Text
errUnsupportedVerb =
"Used unsupported verb to identify an endpoint: " <> show verb
unsafeAlterItem ::
( HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> Maybe item
-> item
unsafeAlterItem = maybe
(error errUnreachableEndpoint)
(unsafeLensFor verb %~ (Just . unsafeAlterOperation))
unsafeAlterOperation :: Maybe Operation -> Operation
unsafeAlterOperation = maybe
(error errUnreachableEndpoint)
alter
unsafeLensFor ::
( Functor f
, HasGet item (Maybe Operation)
, HasPut item (Maybe Operation)
, HasPatch item (Maybe Operation)
, HasPost item (Maybe Operation)
, HasDelete item (Maybe Operation)
)
=> StdMethod
-> (Maybe Operation -> f (Maybe Operation))
-> item
-> f item
unsafeLensFor = \case
GET -> get
PUT -> put
PATCH -> patch
POST -> post
DELETE -> delete
_ -> error errUnsupportedVerb
& paths % ~ ( POST , " /api / v1 / wallets " ) ` setDescription ` " foo "
& paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` setDescription ` " bar "
setDescription
:: (IxValue m ~ PathItem, Index m ~ FilePath, At m)
=> (StdMethod, FilePath)
-> Text
-> m
-> m
setDescription endpoint str =
endpoint `alterOperation` (description ?~ str)
instance HasSwagger a => HasSwagger (LoggingApi config a) where
toSwagger _ = toSwagger (Proxy @a)
instance
( Typeable res
, KnownSymbols syms
, HasSwagger subApi
, syms ~ ParamNames res params
) => HasSwagger (FilterBy params res :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @subApi)
allOps = map toText $ symbolVals (Proxy @syms)
in swgr & over (operationsOf swgr . parameters) (addFilterOperations allOps)
where
addFilterOperations :: [Text] -> [Referenced Param] -> [Referenced Param]
addFilterOperations ops xs = map (Inline . newParam) ops <> xs
newParam :: Text -> Param
newParam opName =
let typeOfRes = fromString $ show $ typeRep (Proxy @ res)
in Param {
_paramName = opName
, _paramRequired = Nothing
, _paramDescription = Just $ filterDescription typeOfRes
, _paramSchema = ParamOther ParamOtherSchema {
_paramOtherSchemaIn = ParamQuery
, _paramOtherSchemaAllowEmptyValue = Nothing
, _paramOtherSchemaParamSchema = mempty
}
}
filterDescription :: Text -> Text
filterDescription typeOfRes = mconcat
[ "A **FILTER** operation on a " <> typeOfRes <> ". "
, "Filters support a variety of queries on the resource. "
, "These are: \n\n"
, "- `EQ[value]` : only allow values equal to `value`\n"
, "- `LT[value]` : allow resource with attribute less than the `value`\n"
, "- `GT[value]` : allow objects with an attribute greater than the `value`\n"
, "- `GTE[value]` : allow objects with an attribute at least the `value`\n"
, "- `LTE[value]` : allow objects with an attribute at most the `value`\n"
, "- `RANGE[lo,hi]` : allow objects with the attribute in the range between `lo` and `hi`\n"
, "- `IN[a,b,c,d]` : allow objects with the attribute belonging to one provided.\n\n"
]
instance
( Typeable res
, KnownSymbols syms
, syms ~ ParamNames res params
, HasSwagger subApi
) => HasSwagger (SortBy params res :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @subApi)
in swgr & over (operationsOf swgr . parameters) addSortOperation
where
addSortOperation :: [Referenced Param] -> [Referenced Param]
addSortOperation xs = Inline newParam : xs
newParam :: Param
newParam =
let typeOfRes = fromString $ show $ typeRep (Proxy @ res)
allowedKeys = T.intercalate "," (map toText $ symbolVals (Proxy @syms))
in Param {
_paramName = "sort_by"
, _paramRequired = Just False
, _paramDescription = Just (sortDescription typeOfRes allowedKeys)
, _paramSchema = ParamOther ParamOtherSchema {
_paramOtherSchemaIn = ParamQuery
, _paramOtherSchemaAllowEmptyValue = Just True
, _paramOtherSchemaParamSchema = mempty
}
}
instance (HasSwagger subApi) => HasSwagger (WalletRequestParams :> subApi) where
toSwagger _ =
let swgr = toSwagger (Proxy @(WithWalletRequestParams subApi))
in swgr & over (operationsOf swgr . parameters) (map toDescription)
where
toDescription :: Referenced Param -> Referenced Param
toDescription (Inline p@(_paramName -> pName)) =
case M.lookup pName requestParameterToDescription of
Nothing -> Inline p
Just d -> Inline (p & description .~ Just d)
toDescription x = x
instance ToParamSchema WalletId
instance ToSchema Core.Address where
declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions
instance ToParamSchema Core.Address where
toParamSchema _ = mempty
& type_ .~ SwaggerString
instance ToParamSchema (V1 Core.Address) where
toParamSchema _ = toParamSchema (Proxy @Core.Address)
Descriptions
customQueryFlagToDescription :: Map T.Text T.Text
customQueryFlagToDescription = M.fromList [
("force_ntp_check", forceNtpCheckDescription)
]
requestParameterToDescription :: Map T.Text T.Text
requestParameterToDescription = M.fromList [
("page", pageDescription)
, ("per_page", perPageDescription (fromString $ show maxPerPageEntries) (fromString $ show defaultPerPageEntries))
]
forceNtpCheckDescription :: T.Text
forceNtpCheckDescription = [text|
In some cases, API Clients need to force a new NTP check as a previous result gets cached. A typical use-case is after asking a user to fix its system clock. If this flag is set, request will block until NTP server responds or it will timeout if NTP server is not available within a short delay.
|]
pageDescription :: T.Text
pageDescription = [text|
The page number to fetch for this request. The minimum is **1**. If nothing is specified, **this value defaults to 1** and always shows the first entries in the requested collection.
|]
perPageDescription :: T.Text -> T.Text -> T.Text
perPageDescription maxValue defaultValue = [text|
The number of entries to display for each page. The minimum is **1**, whereas the maximum is **$maxValue**. If nothing is specified, **this value defaults to $defaultValue**.
|]
sortDescription :: Text -> Text -> Text
sortDescription resource allowedKeys = [text|
A **SORT** operation on this $resource. Allowed keys: `$allowedKeys`.
|]
errorsDescription :: Text
errorsDescription = [text|
Error Name / Description | HTTP Error code | Example
$errors
|] where
errors = T.intercalate "\n" rows
rows =
[ mkRow fmtErr $ NotEnoughMoney (ErrAvailableBalanceIsInsufficient 1400)
, mkRow fmtErr $ OutputIsRedeem sampleAddress
, mkRow fmtErr $ UnknownError "Unexpected internal error."
, mkRow fmtErr $ InvalidAddressFormat "Provided address format is not valid."
, mkRow fmtErr WalletNotFound
, mkRow fmtErr $ WalletAlreadyExists exampleWalletId
, mkRow fmtErr AddressNotFound
, mkRow fmtErr $ InvalidPublicKey "Extended public key (for external wallet) is invalid."
, mkRow fmtErr UnsignedTxCreationError
, mkRow fmtErr $ SignedTxSubmitError "Unable to submit externally-signed transaction."
, mkRow fmtErr TooBigTransaction
, mkRow fmtErr TxFailedToStabilize
, mkRow fmtErr TxRedemptionDepleted
, mkRow fmtErr $ TxSafeSignerNotFound sampleAddress
, mkRow fmtErr $ MissingRequiredParams (("wallet_id", "walletId") :| [])
, mkRow fmtErr $ WalletIsNotReadyToProcessPayments genExample
, mkRow fmtErr $ NodeIsStillSyncing genExample
, mkRow fmtErr $ CannotCreateAddress "Cannot create derivation path for new address in external wallet."
, mkRow fmtErr $ RequestThrottled 42
, mkRow fmtErr $ JSONValidationFailed "Expected String, found Null."
, mkRow fmtErr $ UnsupportedMimeTypePresent "Expected Content-Type's main MIME-type to be 'application/json'."
, mkRow fmtErr $ UtxoNotEnoughFragmented (ErrUtxoNotEnoughFragmented 1 msgUtxoNotEnoughFragmented)
]
mkRow fmt err = T.intercalate "|" (fmt err)
fmtErr err =
[ surroundedBy "`" (gconsName err) <> "<br/>" <> toText (sformat build err)
, show $ errHTTPCode $ toServantError err
, inlineCodeBlock (T.decodeUtf8 $ BL.toStrict $ encodePretty err)
]
sampleAddress = V1 Core.Address
{ Core.addrRoot =
Crypto.unsafeAbstractHash ("asdfasdf" :: String)
, Core.addrAttributes =
Core.mkAttributes $ Core.AddrAttributes Nothing Core.BootstrapEraDistr NetworkMainOrStage
, Core.addrType =
Core.ATPubKey
}
| Shorter version of the doc below , only for Dev & V0 documentations
highLevelShortDescription :: DescriptionEnvironment -> T.Text
highLevelShortDescription DescriptionEnvironment{..} = [text|
This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).
Protocol Version | Git Revision
$deSoftwareVersion | $deGitRevision
|]
highLevelDescription :: DescriptionEnvironment -> T.Text
highLevelDescription DescriptionEnvironment{..} = [text|
This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).
Protocol Version | Git Revision
$deSoftwareVersion | $deGitRevision
Getting Started
===============
In the following examples, we will use *curl* to illustrate request to an API running on the default port **8090**.
Please note that wallet web API uses TLS for secure communication. Requests to the API need to
send a client CA certificate that was used when launching the node and identifies the client as
being permitted to invoke the server API.
Creating a New Wallet
You can create your first wallet using the [`POST /api/v1/wallets`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets%2Fpost) endpoint as follow:
```
curl -X POST :8090/api/v1/wallets \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
-d '{
"operation": "create",
"backupPhrase": $deMnemonicExample,
"assuranceLevel": "normal",
"name": "MyFirstWallet",
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
> **Warning**: Those 12 mnemonic words given for the backup phrase act as an example. **Do
> not** use them on a production system. See the section below about mnemonic codes for more
> information.
The `spendingPassword` is optional but highly recommended. It a string of 32
characters, encoded in base 16, yielding to an hexadecimal sequence of 64 bytes.
This passphrase is required for sensitive operations on the wallet and adds
an extra security layer to it.
To generate a valid `spendingPassword`, please follow the following steps:
- Pick a long sentence using a wide variety of characters (uppercase, lowercase,
whitespace, punctuation, etc). Using a computer to randomly generate
a passphrase is best, as humans aren't a good source of randomness.
- Compute an appropriate hash of this passphrase. You'll need to use an
algorithm that yields a 32-byte long string (e.g. *SHA256* or *BLAKE2b*).
- Hex-encode the 32-byte hash into a 64-byte sequence of bytes.
As a response, the API provides you with a unique wallet `id` to be used in subsequent
requests. Make sure to store it / write it down. Note that every API response is
[jsend-compliant](); Sealchain also augments responses with
meta-data specific to pagination. More details in the section below about [Pagination](#section/Pagination)
```json
$createWallet
```
You have just created your first wallet. Information about this wallet can be retrieved using the [`GET /api/v1/wallets/{walletId}`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets~1{walletId}%2Fget)
endpoint as follows:
```
curl -X GET :8090/api/v1/wallets/{{walletId}} \
-H "Accept: application/json; charset=utf-8" \
```
Receiving SEAL (or GD)
To receive _SEAL_ (or GD) from other users you should provide your address. This address can be obtained
from an account. Each wallet contains at least one account. An account is like a pocket inside
of your wallet. Vew all existing accounts of a wallet by using the [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)
endpoint:
```
curl -X GET :8090/api/v1/wallets/{{walletId}}/accounts?page=1&per_page=10 \
-H "Accept: application/json; charset=utf-8" \
```
Since you have, for now, only a single wallet, you'll see something like this:
```json
$readAccounts
```
All the wallet's accounts are listed under the `addresses` field. You can communicate one of
these addresses to receive _SEAL_(or GD) on the associated account.
Sending SEAL(or GD)
In order to send _SEAL_(or GD) from one of your accounts to another address, you must create a new
payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1payment%2Fpost)
endpoint as follows:
```
curl -X POST :8090/api/v1/transactions/payment \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
-d '{
"destinations": [{
"amount": {
"coins": 100000000,
"gds": 100
}
"address": "A7k5bz1QR2...Tx561NNmfF"
}],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Note that, in order to perform a transaction, you need to have enough existing _SEAL_(or GD) on the
source account! The Sealchain API is designed to accomodate multiple recipients payments
out-of-the-box; notice how `destinations` is a list of addresses (and corresponding amounts).
When the transaction succeeds, funds are no longer available in the sources addresses, and are
soon made available to the destinations within a short delay. Note that, you can at any time see
the status of your wallets by using the [`GET /api/v1/transactions/payment`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)
endpoint as follows:
```
curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwUPE...8V3AVTnqGZ\
-H "Accept: application/json; charset=utf-8" \
```
Here we constrained the request to a specific account. After our previous transaction the output
should look roughly similar to this:
```json
$readTransactions
```
In addition, and because it is not possible to _preview_ a transaction, one can lookup a
transaction's fees using the [`POST /api/v1/transactions/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)
endpoint to get an estimation of those fees.
See [Estimating Transaction Fees](#section/Common-Use-Cases/Estimating-Transaction-Fees) for more details.
Issue GD
To increase or decrease GD total supply, The issuer (the GD operator) can create a new
payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1issurance%2Fpost)
endpoint as follows:
```
curl -X POST :8090/api/v1/transactions/issurance \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
-d '{
"info": {
"increment": 10000000,
},
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Pagination
==========
**All GET requests of the API are paginated by default**. Whilst this can be a source of
surprise, is the best way of ensuring the performance of GET requests is not affected by the
size of the data storage.
Version `V1` introduced a different way of requesting information to the API. In particular,
GET requests which returns a _collection_ (i.e. typically a JSON array of resources) lists
extra parameters which can be used to modify the shape of the response. In particular, those
are:
* `page`: (Default value: **1**).
* `per_page`: (Default value: **$deDefaultPerPage**)
For a more accurate description, see the section `Parameters` of each GET request, but as a
brief overview the first two control how many results and which results to access in a
paginated request.
Filtering and Sorting
=====================
`GET` endpoints which list collection of resources supports filters & sort operations, which
are clearly marked in the swagger docs with the `FILTER` or `SORT` labels. The query format is
quite simple, and it goes this way:
Filter Operators
| Operator | Description | Example |
| - | If **no operator** is passed, this is equivalent to `EQ` (see below). | `balance=10` |
| `EQ` | Retrieves the resources with index _equal_ to the one provided. | `balance=EQ[10]` |
| `LT` | Retrieves the resources with index _less than_ the one provided. | `balance=LT[10]` |
| `LTE` | Retrieves the resources with index _less than equal_ the one provided. | `balance=LTE[10]` |
| `GT` | Retrieves the resources with index _greater than_ the one provided. | `balance=GT[10]` |
| `GTE` | Retrieves the resources with index _greater than equal_ the one provided. | `balance=GTE[10]` |
| `RANGE` | Retrieves the resources with index _within the inclusive range_ [k,k]. | `balance=RANGE[10,20]` |
Sort Operators
| Operator | Description | Example |
| `ASC` | Sorts the resources with the given index in _ascending_ order. | `sort_by=ASC[balance]` |
| `DES` | Sorts the resources with the given index in _descending_ order. | `sort_by=DES[balance]` |
| - | If **no operator** is passed, this is equivalent to `DES` (see above). | `sort_by=balance` |
Errors
======
In case a request cannot be served by the API, a non-2xx HTTP response will be issued, together
with a [JSend-compliant]() JSON Object describing the error
in detail together with a numeric error code which can be used by API consumers to implement
proper error handling in their application. For example, here's a typical error which might be
issued:
``` json
$deErrorExample
```
Existing Wallet Errors
$deWalletErrorTable
Monetary Denomination & Units
=============================
Sealchain's platform currency is called _SEAL_. _SEAL_ has up to **8** decimal places; hence the
smallest monetary unit that can be represented in the Seaichain's blockhain is: 0.00000001.
Sealchain originaly includes stablecoin called GD (GoldDollar), GD has up to **2** decimal places.
> **Warning**: All amounts manipulated in the API are given and expected in smallest monetary unit.
Mnemonic Codes
==============
The full list of accepted mnemonic codes to secure a wallet is defined by the [BIP-39
specifications](-0039.mediawiki). Note that
picking up 12 random words from the list **is not enough** and leads to poor security. Make
sure to carefully follow the steps described in the protocol when you generate words for a new
wallet.
Versioning & Legacy
===================
The API is **versioned**, meaning that is possible to access different versions of the API by adding the _version number_ in the URL.
**For the sake of backward compatibility, we expose the legacy version of the API, available simply as unversioned endpoints.**
This means that _omitting_ the version number would call the old version of the API. Deprecated
endpoints are currently grouped under an appropriate section; they would be removed in upcoming
released, if you're starting a new integration with Sealchain, please ignore these.
Note that Compatibility between major versions is not _guaranteed_, i.e. the request & response formats might differ.
Disable TLS (Not Recommended)
Common Use-Cases
================
Sending Money to Multiple Recipients
As seen in [Sending SEAL](#section/Getting-Started/Sending-SEAL), you can send _SEAL_ to
another party using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fpost) endpoint.
Important to notice is the type of the field `destinations`: it's a list, enabling you to provide more
than one destination. Each destination is composed of:
- An address
- A corresponding amount
The overall transaction corresponds to the sum of each outputs. For instance, to send money to
two parties simultaneously:
```
curl -X POST :8090/api/v1/transactions \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
-d '{
"destinations": [
{
"amount": 14,
"address": "A7k5bz1QR2...Tx561NNmfF"
},
{
"amount": 42,
"address": "B56n78WKE8...jXAa34NUFz"
}
],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
},
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
About UTXO Fragmentation
As described in [Sending Money to Multiple Recipients](#section/Common-Use-Cases/Sending-Money-to-Multiple-Recipients), it is possible to send ada to more than one destination. Sealchain only allows a given UTXO to cover at most one single transaction output. As a result,
when the number of transaction outputs is greater than the number the API returns a `UtxoNotEnoughFragmented` error which
looks like the following
```
{
"status": "error",
"diagnostic": {
"details": {
"help": "Utxo is not enough fragmented to handle the number of outputs of this transaction. Query /api/v1/wallets/{walletId}/statistics/utxos endpoint for more information",
"missingUtxos": 1
}
},
"message": "UtxoNotEnoughFragmented"
}
```
To make sure the source account has a sufficient level of UTXO fragmentation (i.e. number of UTXOs),
please monitor the state of the UTXOs as described in [Getting UTXO Statistics](#section/Common-Use-Cases/Getting-Utxo-Statistics). The
number of wallet UTXOs should be no less than the transaction outputs, and the sum of all UTXOs should be enough to cover the total
transaction amount, including fees.
Contrary to a classic accounting model, there's no such thing as spending _part of a UTXO_, and one has to wait for a transaction to be included in a
block before spending the remaining change. This is very similar to using bank notes: one can't spend a USD 20 bill at two different shops at the same time,
even if it is enough to cover both purchases — one has to wait for change from the first transaction before making the second one.
There's no "ideal" level of fragmentation; it depends on one's needs. However, the more UTXOs that are available, the higher the concurrency capacity
of one's wallet, allowing multiple transactions to be made at the same time.
Similarly, there's no practical maximum number of UTXOs, but there is nevertheless a maximum transaction size. By having many small UTXOs,
one is taking the risk of hitting that restriction, should too many inputs be selected to fill a transaction. The only way to
work around this is to make multiple smaller transactions.
Estimating Transaction Fees
When you submit a transaction to the network, some fees apply depending on, but not only, the
selected grouping policy and the available inputs on the source wallet. There's actually a
trade-off between fees, cryptographic security, throughput and privacy. The more inputs are
selected, the bigger is the payload, the bigger are the fees.
The API lets you estimate fees for a given transaction via the [`POST /api/v1/transaction/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)
endpoint. The request payload is identical to the one you would make to create a transaction:
```
curl -X POST :8090/api/v1/transactions/fees \
-H "Accept: application/json; charset=utf-8" \
-H "Content-Type: application/json; charset=utf-8" \
-d '{
"destinations": [{
"amount": 14,
"address": "A7k5bz1QR2...Tx561NNmfF"
}],
"source": {
"accountIndex": 0,
"walletId": "Ae2tdPwUPE...8V3AVTnqGZ"
}
}'
```
The API resolves with an estimated amount in _SEAL_. This estimation highly depends on the
current state of the ledger and diverges with time.
```json
$readFees
```
Managing Accounts
A wallet isn't limited to one account. It can actually be useful to have more than one account
in order to separate business activities. With the API, you can retrieve a specific account,
create new ones, list all existing accounts of a wallet or edit a few things on an existing
account. By default, your wallet comes with a provided account. Let's see how to create a fresh
new account on a wallet using [`POST /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fpost):
```
curl -X POST \
:8090/api/v1/Ae2tdPwUPE...8V3AVTnqGZ/accounts \
-H 'Content-Type: application/json;charset=utf-8' \
-H 'Accept: application/json;charset=utf-8' \
-d '{
"name": "MyOtherAccount",
"spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0"
}'
```
Note that the `spendingPassword` here should match the one provided earlier in [Creating a
New Wallet](#section/Getting-Started/Creating-a-New-Wallet).
```json
$createAccount
```
You can always retrieve this account description later if needed via [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts~1{accountId}%2Fget).
For example:
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts/2902829384 \
-H 'Accept: application/json;charset=utf-8' \
```
For a broader view, the full list of accounts of a given wallet can be retrieved using [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts \
-H 'Accept: application/json;charset=utf-8' \
```
```json
$readAccounts
```
Partial Representations
The previous endpoint gives you a list of full representations. However, in some cases, it might be interesting to retrieve only a partial representation of an account (e.g. only the balance). There are two extra endpoints one could use to either fetch a given account's balance, and another to retrieve the list of addresses associated to a specific account.
[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/addresses`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1addresses%2Fget)
```json
$readAccountAddresses
```
Note that this endpoint is paginated and allow basic filtering and sorting on
addresses. Similarly, you can retrieve only the account balance with:
[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/amount`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1amount%2Fget)
```json
$readAccountBalance
```
Managing Addresses
By default, wallets you create are provided with an account which has one default address. It
is possible (and recommended) for an account to manage multiple addresses. Address reuse
actually reduces privacy for it tights more transactions to a small set of addresses.
When paying, the wallet makes many of these choices for you. Addresses are
selected from a wallet's account based on several different strategies and
policies.
To create a new address, use the [`POST /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fpost)
endpoint:
```
curl -X POST \
:8090/api/v1/addresses \
-H 'Content-Type: application/json;charset=utf-8' \
-H 'Accept: application/json;charset=utf-8' \
-d '{
"walletId": "Ae2tdPwUPE...V3AVTnqGZ4",
"accountIndex": 2147483648
}'
```
```json
$createAddress
```
If your wallet is protected with a password, this password is also required in order to create
new addresses for that wallet. In such case, the field `spendingPassword` should match the one
defined earlier to protect your wallet.
Addresses generated as just described are always valid. When the API encounters
an invalid address however (e.g. when provided by another party), it will fail with a
client error.
You can always view all your available addresses across all your wallets by using
[`GET /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fget):
```
curl -X GET :8090/api/v1/addresses \
-H 'Accept: application/json;charset=utf-8' \
```
```json
$readAddresses
```
Checking Synchronization Progress
You can control the synchronization progress of the underlying node hosting the wallet's server
via [`GET /api/v1/node-info`](#tag/Info%2Fpaths%2F~1api~1v1~1node-info%2Fget). The output is
rather verbose and gives real-time progress updates about the current node.
```
curl -X GET :8090/api/v1/node-info \
-H 'Accept: application/json;charset=utf-8' \
```
```json
$readNodeInfo
```
Retrieving Transaction History
If needed, applications may regularly poll the wallet's backend to retrieve the history of
transactions of a given wallet. Using the [`GET /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)
endpoint, you can view the status of all transactions that ever sent or took money from the
wallet.
The following table sums up the available filters (also detailed in the endpoint documentation details):
Filter On | Corresponding Query Parameter(s)
Wallet | `wallet_id`
Wallet's account | `account_index` + `wallet_id`
Address | `address`
Transaction's creation time | `created_at`
Transaction's id | `id`
For example, in order to retrieve the last 50 transactions of a particular account,
ordered by descending date:
```
curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&account_index=2902829384&sort_by=DES\[created_at\]&per_page=50' \
-H 'Accept: application/json;charset=utf-8' \
```
For example, in order to retrieve the last 50 transactions, ordered by descending date:
```
curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ &sort_by=DES\[created_at\]&per_page=50' \
-H 'Accept: application/json;charset=utf-8' \
```
Another example, if you were to look for all transactions made since the 1st of January 2018:
```
curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&created_at=GT\[2018-01-01T00:00:00.00000\]' \
-H 'Accept: application/json;charset=utf-8' \
```
Getting Utxo statistics
You can get Utxo statistics of a given wallet using
[`GET /api/v1/wallets/{{walletId}}/statistics/utxos`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1statistics~1utxos%2Fget)
```
curl -X GET \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/statistics/utxos \
-H 'Accept: application/json;charset=utf-8' \
```
```json
$readUtxoStatistics
```
Make sure to carefully read the section about [Pagination](#section/Pagination) to fully
leverage the API capabilities.
Importing (Unused) Addresses From a Previous Node (or Version)
When restoring a wallet, only the information available on the blockchain can
be retrieved. Some pieces of information aren't stored on
the blockchain and are only defined as _Metadata_ of the wallet backend. This
includes:
- The wallet's name
- The wallet's assurance level
- The wallet's spending password
- The wallet's unused addresses
Unused addresses are not recorded on the blockchain and, in the case of random
derivation, it is unlikely that the same addresses will be generated on two
different node instances. However, some API users may wish to preserve unused
addresses between different instances of the wallet backend.
To enable this, the wallet backend provides an endpoint ([`POST /api/v1/wallets/{{walletId}}/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1addresses%2Fpost))
to import a list of addresses into a given account. Note that this endpoint is
quite lenient when it comes to errors: it tries to import all provided addresses
one by one, and ignores any that can't be imported for whatever reason. The
server will respond with the total number of successes and, if any, a list of
addresses that failed to be imported. Trying to import an address that is already
present will behave as a no-op.
For example:
```
curl -X POST \
:8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/addresses \
-H 'Accept: application/json;charset=utf-8' \
-d '[
"Ae2tdPwUPE...8V3AVTnqGZ",
"Ae2odDwvbA...b6V104CTV8"
]'
```
> **IMPORTANT**: This feature is experimental and performance is
> not guaranteed. Users are advised to import small batches only.
|]
where
createAccount = decodeUtf8 $ encodePretty $ genExample @(APIResponse Account)
createAddress = decodeUtf8 $ encodePretty $ genExample @(APIResponse WalletAddress)
createWallet = decodeUtf8 $ encodePretty $ genExample @(APIResponse Wallet)
readAccounts = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Account])
readAccountBalance = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountBalance)
readAccountAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountAddresses)
readAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Address])
readFees = decodeUtf8 $ encodePretty $ genExample @(APIResponse EstimatedFees)
readNodeInfo = decodeUtf8 $ encodePretty $ genExample @(APIResponse NodeInfo)
readTransactions = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Transaction])
readUtxoStatistics = decodeUtf8 $ encodePretty $ genExample @(APIResponse UtxoStatistics)
swaggerSchemaUIServer
:: (Server api ~ Handler Swagger)
=> Swagger -> Server (SwaggerSchemaUI' dir api)
swaggerSchemaUIServer =
swaggerSchemaUIServerImpl redocIndexTemplate redocFiles
where
redocIndexTemplate :: Text
redocIndexTemplate = [text|
<!doctype html>
<html lang="en">
<head>
<title>ReDoc</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body { margin: 0; padding: 0; }
</style>
<script>
// Force Strict-URL Routing for assets relative paths
(function onload() {
if (!window.location.pathname.endsWith("/")) {
window.location.pathname += "/";
}
}());
</script>
</head>
<body>
<redoc spec-url="../SERVANT_SWAGGER_UI_SCHEMA"></redoc>
<script src="redoc.min.js"> </script>
</body>
</html>|]
applyUpdateDescription :: Text
applyUpdateDescription = [text|
Apply the next available update proposal from the blockchain. Note that this
will immediately shutdown the node and makes it unavailable for a short while.
|]
postponeUpdateDescription :: Text
postponeUpdateDescription = [text|
Discard the next available update from the node's local state. Yet, this doesn't
reject the update which will still be applied as soon as the node is restarted.
|]
resetWalletStateDescription :: Text
resetWalletStateDescription = [text|
Wipe-out the node's local state entirely. The only intended use-case for this
endpoint is during API integration testing. Note also that this will fail by
default unless the node is running in debug mode.
|]
estimateFeesDescription :: Text
estimateFeesDescription = [text|
Estimate the fees which would incur from the input payment. This endpoint
**does not** require a _spending password_ to be supplied as it generates
under the hood an unsigned transaction.
|]
getAddressDescription :: Text
getAddressDescription = [text|
The previous version of this endpoint failed with an HTTP error when the given
address was unknown to the wallet.
This was misleading since an address that is unknown to the wallet may still
belong to the wallet (since it could be part of a pending transaction in
another instance of the same wallet).
To reflect this, the V1 endpoint does not fail when an address is not recognised
and returns a new field which indicates the address' ownership status, from the
node point of view.
|]
data DescriptionEnvironment = DescriptionEnvironment
{ deErrorExample :: !T.Text
, deDefaultPerPage :: !T.Text
, deWalletErrorTable :: !T.Text
, deGitRevision :: !T.Text
, deSoftwareVersion :: !T.Text
, deMnemonicExample :: !T.Text
}
api :: HasSwagger a
=> (CompileTimeInfo, SoftwareVersion)
-> Proxy a
-> (DescriptionEnvironment -> T.Text)
-> Swagger
api (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI
& info.title .~ "Sealchain Wallet API"
& info.version .~ fromString (show curSoftwareVersion)
& host ?~ "127.0.0.1:8090"
& info.description ?~ mkDescription DescriptionEnvironment
{ deErrorExample = decodeUtf8 $ encodePretty WalletNotFound
, deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase)
, deDefaultPerPage = fromString (show defaultPerPageEntries)
, deWalletErrorTable = errorsDescription
, deGitRevision = ctiGitRevision compileInfo
, deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion)
}
& info.license ?~ ("MIT" & url ?~ URL "-project/sealchain/develop/LICENSE")
& paths %~ (POST, "/api/internal/apply-update") `setDescription` applyUpdateDescription
& paths %~ (POST, "/api/internal/postpone-update") `setDescription` postponeUpdateDescription
& paths %~ (DELETE, "/api/internal/reset-wallet-state") `setDescription` resetWalletStateDescription
& paths %~ (POST, "/api/v1/transactions/fees") `setDescription` estimateFeesDescription
& paths %~ (GET, "/api/v1/addresses/{address}") `setDescription` getAddressDescription
|
418f2e4a28c1eee5c23b1e2879f3f0f450980f61b7b287beb97fcf50477102db | arenadotio/pgx | test_pgx_value_core.ml | open Core_kernel
module Value = Pgx_value_core
let time_roundtrip str = Value.of_string str |> Value.to_time_exn
let printer = Time.to_string_abs ~zone:Time.Zone.utc
let time_testable =
Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal
;;
let check_time = Alcotest.check time_testable
let check_string = Alcotest.(check string)
let test_time_of_string _ =
let expected = Time.of_string "2016-03-15 19:55:18.123456-04:00" in
check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18.123456");
check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18.123456Z");
check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04");
check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04:00")
;;
let test_time_of_string_no_ms _ =
let expected = Time.of_string "2016-03-15 19:55:18-04:00" in
check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18");
check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18Z");
check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18-04");
check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18-04:00")
;;
let test_time_conversion_roundtrip _ =
let expected_str = "2016-03-15 23:55:18.123456Z" in
check_string "parse-print" expected_str (time_roundtrip expected_str |> printer);
let expected_time = Time.of_string expected_str in
check_time "print-parse" expected_time (Value.of_time expected_time |> Value.to_time_exn)
;;
let time_tests =
[ Alcotest.test_case "test time_of_string" `Quick test_time_of_string
; Alcotest.test_case
"test time_of_string no milliseconds"
`Quick
test_time_of_string_no_ms
; Alcotest.test_case
"test time conversion roundtrip"
`Quick
test_time_conversion_roundtrip
]
;;
let () = Alcotest.run "pgx_async_conversions" [ "time", time_tests ]
| null | https://raw.githubusercontent.com/arenadotio/pgx/8d5ca02213faa69e692c5d0dc3e81408db3774a1/pgx_value_core/test/test_pgx_value_core.ml | ocaml | open Core_kernel
module Value = Pgx_value_core
let time_roundtrip str = Value.of_string str |> Value.to_time_exn
let printer = Time.to_string_abs ~zone:Time.Zone.utc
let time_testable =
Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal
;;
let check_time = Alcotest.check time_testable
let check_string = Alcotest.(check string)
let test_time_of_string _ =
let expected = Time.of_string "2016-03-15 19:55:18.123456-04:00" in
check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18.123456");
check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18.123456Z");
check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04");
check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04:00")
;;
let test_time_of_string_no_ms _ =
let expected = Time.of_string "2016-03-15 19:55:18-04:00" in
check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18");
check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18Z");
check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18-04");
check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18-04:00")
;;
let test_time_conversion_roundtrip _ =
let expected_str = "2016-03-15 23:55:18.123456Z" in
check_string "parse-print" expected_str (time_roundtrip expected_str |> printer);
let expected_time = Time.of_string expected_str in
check_time "print-parse" expected_time (Value.of_time expected_time |> Value.to_time_exn)
;;
let time_tests =
[ Alcotest.test_case "test time_of_string" `Quick test_time_of_string
; Alcotest.test_case
"test time_of_string no milliseconds"
`Quick
test_time_of_string_no_ms
; Alcotest.test_case
"test time conversion roundtrip"
`Quick
test_time_conversion_roundtrip
]
;;
let () = Alcotest.run "pgx_async_conversions" [ "time", time_tests ]
|
|
8d35a45e41a48d54970a4d4b22cc2ddb8b1634a954206029ec680281a4a49f75 | bytekid/mkbtt | codeTree.ml | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
*
@author
@since 2009/07/21
@author Sarah Winkler
@since 2009/07/21 *)
(** Term indexing using code trees *)
(*** OPENS ********************************************************************)
open Util;;
(*** EXCEPTIONS **********************************************************)
exception No_back_pointer
exception Malformed_tree of string
exception Not_in_index
exception Empty_branch
(*** MODULES *************************************************************)
module Fun = Rewriting.Function;;
module Pos = Rewriting.Position;;
module Var = Rewriting.Variable;;
module T = U.Term;;
module M = U.Monad;;
open M;;
: TermIndex . T with type entry = Entry.t
= functor (Entry: TermIndex.ENTRY_TYPE) ->
struct
(*** SUBMODULES **********************************************************)
module EL = TermIndex.EntryList(Entry);;
(*** TYPES ***************************************************************)
type entry = Entry.t
type instruction =
| Check of Fun.t * instruction * instruction
| Put of int * instruction * instruction
| Compare of int * int * instruction * instruction
| Success of Entry.t list
| Fail
;;
type t = instruction
(* convenient for generalization retrievals *)
type flatterm =
| Fun of Fun.t * flatterm * flatterm * T.t (* next, after, subterm here *)
| Var of Var.t * flatterm (* next = after *)
| End
;;
(*** GLOBALS *************************************************************)
(*** FUNCTIONS ***********************************************************)
let is_empty t = return (t == Fail)
let cont n = function
| Check (_, c, _ )
| Put (_, c, _ )
| Compare (_, _, c, _ ) -> c
| _ -> raise (Malformed_tree "cont does not exist")
;;
let back n = function
| Check (_, _, b)
| Put (_, _, b)
| Compare (_, _, _, b) -> b
| _ -> raise (Malformed_tree "cont does not exist")
;;
let set_back instruction b' =
match instruction with
| Check (f, c, b) -> Check (f, c, b')
| Put (n, c, b) -> Put (n, c, b')
| Compare (m, k, c, b) -> Compare (m, k, c, b')
| _ -> raise (Malformed_tree "back does not exist")
;;
(* output code *)
let rec code_to_string c =
match c with
| Check(f, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
"Check(" ^ (Fun.to_string f) ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Put(k, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
"Put(" ^ (string_of_int k) ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Compare(m, k, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
let sk, sm = string_of_int k, string_of_int m in
"Compare(" ^ sm ^ ", " ^ sk ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Success values -> "Success" ^ (List.join Entry.to_string " " values)
| Fail -> "Fail"
;;
let lookup table x i code =
try
let j = List.assoc x table in
(Compare(j, i, Fail, Fail)) :: code, table
with Not_found ->
code, (x, i) :: table
;;
let rec code_list tcodes ccodes i table = function
| T.Var x ->
let ccodes', table' = lookup table x i ccodes in
(Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1
| T.Fun(f, ts) ->
let tcodes' = (Check(f, Fail, Fail)) :: tcodes in
List.fold_left app_tcode (tcodes', ccodes, table, i) ts
and app_tcode (tcodes, ccodes, table, i) t =
code_list tcodes ccodes i table t
;;
let rec combine_code instruction = function
| [] -> instruction
| Check(f, _, _) :: l ->
combine_code (Check(f, instruction, Fail)) l
| Put(k, _, _) :: l ->
combine_code (Put(k, instruction, Fail)) l
| Compare(k, m,_, _) :: l ->
combine_code (Compare(k, m, instruction, Fail)) l
| _ -> raise (Malformed_tree "Compare/Fail/Success not expected")
;;
let code_for_term t =
let success = Success [] in
let tcode, ccode, _, _ = code_list [] [] 0 [] t in
combine_code (combine_code success ccode) tcode
;;
let code_for_value (t, v) =
let success = Success [v] in
let tcode, ccode, _, _ = code_list [] [] 0 [] t in
combine_code (combine_code success ccode) tcode
;;
(* ****************** CONSTRUCTION OF CODE TREES ********************** *)
let make () = Fail
(* assume code is just code, not tree (otherwise, change case for
Success in tree *)
let rec insert' code tree =
match code, tree with
| _, Fail -> code
| Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 ->
Check(g, insert' c c', b')
| Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') ->
Compare(m', k', insert' c c', b')
| Put(k, c, _), Put(k', c', b') when k = k' ->
Put(k, insert' c c', b')
| _, Check(_, _, b)
| _, Compare (_, _, _, b)
| _, Put (_, _, b) ->
set_back tree (insert' code b)
| Check(_, _, b), Success vs (* cases relevant? *)
| Compare(_, _, _, b), Success vs
| Put(_, _, b), Success vs ->
set_back code (Success vs)
| Success v, Success values ->
Success (EL.union v values) (* variant *)
| Fail, Success _ -> raise (Malformed_tree "Fail, Success not expected")
;;
(* add entry element into code tree *)
let insert tree (term, value) =
T.to_stringm term > > = fun s - >
Format.printf " Insert into index term % s\n% ! " s ;
Format.printf " Tree before is % s\n " ( code_to_string tree ) ;
Format.printf "Insert into index term %s\n%!" s;
Format.printf "Tree before is %s\n" (code_to_string tree);*)
let code = code_for_value (term, value) in
let tree' = insert' code tree in
(* Format.printf "Code is %s\n" (code_to_string code);
Format.printf "Tree is %s\n" (code_to_string tree');*)
return tree'
;;
let rec remove_code code tree v =
match code, tree with
| Fail, _ -> raise (Malformed_tree "Fail in code not expected")
| Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 ->
(try Check(g, remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') ->
(try Compare(m', k', remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| Put(k, c, b), Put(k', c', b') when k = k' ->
(try Put(k', remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| _, Check(_, _, b)
| _, Compare(_, _, _, b)
| _, Put(_, _, b) ->
(try set_back tree (remove_code code b v)
with Empty_branch -> set_back tree Fail)
| Success v, Success values ->
if (List.length values) == 1 then raise Empty_branch
else Success (EL.diff values v) (* variant *)
| _, Success _ -> raise (Malformed_tree "Success in tree not expected")
| _ -> raise Not_in_index
;;
(* removes the value from the index. if not found, Not_in_index is raised *)
let delete tree value =
T.to_stringm ( fst value ) > > = fun s - >
Format.printf " Remove term % s\n% ! " s ;
Format.printf "Remove term %s\n%!" s;*)
let code = code_for_value value in
let tree' = try remove_code code tree value with Empty_branch -> Fail in
return tree'
;;
(********* RETRIEVAL OPERATIONS ******************************************)
(***** VARIANTS *****)
let rec retrieve_variants tree code =
match tree, code with
| Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 ->
retrieve_variants c c'
| Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') ->
retrieve_variants c c'
| Put(k, c, b), Put(k', c', _) when k = k' ->
retrieve_variants c c'
| Check(_, _, b), _
| Compare(_, _, _, b), _
| Put(_, _, b), _ ->
retrieve_variants b code
| Success variants, Success _ -> variants
| Fail, _
| Success _, _ -> []
;;
let variant_candidates tree term =
let code = code_for_term term in
let vars = retrieve_variants tree code in
U.Term.to_stringm term > > = fun s - >
Format.printf " CT : vars 4 % s : % i:\n%s\n " s ( vars )
( List.foldl ( fun s x - > ( Entry.to_string x)^s ) " " vars ) ;
Format.printf "CT: vars 4 %s: %i:\n%s\n" s (List.length vars)
(List.foldl (fun s x -> (Entry.to_string x)^s) "" vars);*)
return vars
;;
(***** GENERALIZATIONS *****)
let rec flatten' after t =
match t with
| T.Var x -> Var (x, after)
| T.Fun(f, ts) ->
let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in
Fun(f, flat_ts, after, t) (* add t here, required in gen retrieve *)
;;
let flatten = flatten' End
let subst table i =
try
List.assoc i table
with
Not_found -> raise (Malformed_tree "compare without put")
;;
let rec retrieve_generalizations tree t_flat sub =
match tree, t_flat with
| Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 ->
let gens = retrieve_generalizations c next sub in
EL.union (retrieve_generalizations b t_flat sub) gens
| Compare(m, k, c, b), End ->
let gens = retrieve_generalizations b End sub in
if (compare (subst sub m) (subst sub k)) == 0 then
EL.union (retrieve_generalizations c End sub) gens
else
gens
| Put(k, c, b), Var (x, after) ->
let subterm = T.Var x in
let gens = retrieve_generalizations c after ((k, subterm) :: sub) in
EL.union (retrieve_generalizations b t_flat sub) gens
| Put(k, c, b), Fun (_, _, after, subterm) ->
let gens = retrieve_generalizations c after ((k, subterm) :: sub) in
EL.union (retrieve_generalizations b t_flat sub) gens
| Check(_, _, b), _ ->
retrieve_generalizations b t_flat sub
| Success entries, End -> entries
| Fail, _
| Compare _, _
| Success _, _ -> []
| Put _, End -> raise (Malformed_tree "not malformed?")
;;
find generalizations for a given term in dtree
let generalization_candidates tree term =
let t_flat = flatten term in
let gens = retrieve_generalizations tree t_flat [] in
return gens
;;
(***** ENCOMPASSMENTS *****)
given a term , non - var generalization of subterms are returned ,
paired with the subterm 's position . Not strict ! Also not possible
as indexing destroys nonlinearity .
paired with the subterm's position. Not strict! Also not possible
as indexing destroys nonlinearity. *)
let encompassment_candidates tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
( fun r ( t , p ) - >
let gs = retrieve_generalizations tree ( flatten t ) [ ] in
( List.map ( fun n - > ( n , p ) ) gs ) @ r )
[ ] ( ( term , Pos.root ) : : pos_st )
in
return ecs
; ;
let encompassment_candidates tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
(fun r (t, p) ->
let gs = retrieve_generalizations tree (flatten t) [] in
(List.map (fun n -> (n, p)) gs) @ r)
[] ((term,Pos.root) :: pos_st)
in
return ecs
;;*)
given a term , non - var generalization of subterms are returned ,
paired with the subterm 's position . Not strict !
paired with the subterm's position. Not strict! *)
let encompassment_candidates_below_root tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
(fun r (t, p) ->
let gs = retrieve_generalizations tree (flatten t) [] in
(List.map (fun n -> (n, p)) gs) @ r)
[] pos_st
in
return ecs
;;
let encompassment_candidates tree term =
let at_root = retrieve_generalizations tree (flatten term) [] in
encompassment_candidates_below_root tree term >>= fun below ->
let root = flip Pair.make Pos.root in
return (List.rev_append (List.map root at_root) below)
;;
let size t = is_empty t >>= fun b -> return (if b then 0 else 1)
let overlap1_candidates t = failwith "CodeTree: overlaps not implemented"
let overlap1_candidates_below_root t =
failwith "CodeTree: overlaps not implemented"
;;
let overlap2_candidates t = failwith "CodeTree: overlaps not implemented"
let unification_candidates t =
failwith "CodeTree: unification not implemented"
;;
end (* Make *)
module TermCodeTree = Make(TermIndex.TermEntry)
let test ( ) =
Format.printf " testing module CodeTree\n " ;
let c = Fun.of_string " c " 0 in
let f = Fun.of_string " f " 1 in
let g = Fun.of_string " g " 2 in
let x = Term . ( Var.of_string " x " ) in
let y = Term . ( Var.of_string " y " ) in
let f_x = Term . Fun ( f , [ x ] ) in
let f_f_x = Term . Fun ( f , [ f_x ] ) in
let c _ = Term . Fun ( c , [ ] ) in
let g_x_x = Term . Fun(g , [ x ; x ] ) in
Format.printf " Code for % s : \n % s\n "
( Term.to_string f_f_x )
( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ;
Format.printf " Code for % s : \n % s\n "
( Term.to_string g_x_x )
( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ;
let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string g_f_f_x_c )
( TermCodeTree.code_to_string ( ) ) ;
let = Term . Fun ( g , [ f_f_x ; f_x ] ) in
let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string g_f_f_x_f_x )
( TermCodeTree.code_to_string ( ) ) ;
let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in
let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string t )
( TermCodeTree.code_to_string ( t ) ) ;
( * INSERT
let test () =
Format.printf "testing module CodeTree\n";
let c = Fun.of_string "c" 0 in
let f = Fun.of_string "f" 1 in
let g = Fun.of_string "g" 2 in
let x = Term.Var (Var.of_string "x") in
let y = Term.Var (Var.of_string "y") in
let f_x = Term.Fun (f, [x]) in
let f_f_x = Term.Fun (f, [f_x]) in
let c_ = Term.Fun (c, []) in
let g_x_x = Term.Fun(g, [x; x]) in
Format.printf "Code for %s: \n %s\n"
(Term.to_string f_f_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x));
Format.printf "Code for %s: \n %s\n"
(Term.to_string g_x_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x));
let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string g_f_f_x_c)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c));
let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in
let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string g_f_f_x_f_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x));
let t = Term.Fun (g, [g_f_f_x_f_x; y]) in
let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string t)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value t));
(* INSERT *)
let tree =
TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y
in
Format.printf "Code for insert: \n %s\n\n"
(TermCodeTree.code_to_string tree);
let tree' = TermCodeTree.insert tree t in
Format.printf "Code for insert: \n %s\n\n"
(TermCodeTree.code_to_string tree');
let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in
let tree' = TermCodeTree.insert tree' g_f_f_y_c in
Format.printf "Code for insert g_f_f_y_c: \n %s\n\n"
(TermCodeTree.code_to_string tree');
(* DELETE *)
let tree'' = TermCodeTree.delete tree' g_f_f_y_c in
Format.printf "Code for delete g_f_f_y_c again: \n %s\n\n"
(TermCodeTree.code_to_string tree'');
Format.printf " Code for delete g_x_x : \n % s\n\n "
( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ;
(TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*)
(* VARIANTS *)
let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in
let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in
Format.printf "variants for %s: %s, %s: %s\n"
(Term.to_string g_f_f_x_f_x)
(List.to_string Term.to_string "" variants)
(Term.to_string g_f_f_x_y)
(List.to_string Term.to_string "" variants');
let tree' = TermCodeTree.insert tree' t' in
GENERALIZATIONS
let u = Term.Fun (g, [f_x; y]) in
let tree' = TermCodeTree.insert tree' u in
let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in
Format.printf "generalizations for %s: %s\n"
(Term.to_string g_f_f_y_c)
(List.to_string Term.to_string "" gens); (* ok *)
let gens = TermCodeTree.generalization_candidates tree' u in
Format.printf "generalizations for %s: %s\n"
(Term.to_string u)
(List.to_string Term.to_string "" gens); (* ok *)
let s = Term.Fun (g, [f_x; x]) in
let tree' = TermCodeTree.insert tree' s in
let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in
Format.printf "generalizations for %s: %s\n"
(Term.to_string g_f_f_x_f_x)
(List.to_string Term.to_string "" gens);
(***** ENCOMPASSMENTS *****)
let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in
let f (t, p) = (Term.to_string t) ^ "@" ^ (Position.to_string p) ^ "\n" in
Format.printf "encompassments for %s: %s\n"
(Term.to_string t)
(List.to_string f "" gens);
;;
*)
(* test ()*)
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mkbtt/termindexing/codeTree.ml | ocaml | * Term indexing using code trees
** OPENS *******************************************************************
** EXCEPTIONS *********************************************************
** MODULES ************************************************************
** SUBMODULES *********************************************************
** TYPES **************************************************************
convenient for generalization retrievals
next, after, subterm here
next = after
** GLOBALS ************************************************************
** FUNCTIONS **********************************************************
output code
****************** CONSTRUCTION OF CODE TREES **********************
assume code is just code, not tree (otherwise, change case for
Success in tree
cases relevant?
variant
add entry element into code tree
Format.printf "Code is %s\n" (code_to_string code);
Format.printf "Tree is %s\n" (code_to_string tree');
variant
removes the value from the index. if not found, Not_in_index is raised
******** RETRIEVAL OPERATIONS *****************************************
**** VARIANTS ****
**** GENERALIZATIONS ****
add t here, required in gen retrieve
**** ENCOMPASSMENTS ****
Make
INSERT
DELETE
VARIANTS
ok
ok
**** ENCOMPASSMENTS ****
test () | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
*
@author
@since 2009/07/21
@author Sarah Winkler
@since 2009/07/21 *)
open Util;;
exception No_back_pointer
exception Malformed_tree of string
exception Not_in_index
exception Empty_branch
module Fun = Rewriting.Function;;
module Pos = Rewriting.Position;;
module Var = Rewriting.Variable;;
module T = U.Term;;
module M = U.Monad;;
open M;;
: TermIndex . T with type entry = Entry.t
= functor (Entry: TermIndex.ENTRY_TYPE) ->
struct
module EL = TermIndex.EntryList(Entry);;
type entry = Entry.t
type instruction =
| Check of Fun.t * instruction * instruction
| Put of int * instruction * instruction
| Compare of int * int * instruction * instruction
| Success of Entry.t list
| Fail
;;
type t = instruction
type flatterm =
| End
;;
let is_empty t = return (t == Fail)
let cont n = function
| Check (_, c, _ )
| Put (_, c, _ )
| Compare (_, _, c, _ ) -> c
| _ -> raise (Malformed_tree "cont does not exist")
;;
let back n = function
| Check (_, _, b)
| Put (_, _, b)
| Compare (_, _, _, b) -> b
| _ -> raise (Malformed_tree "cont does not exist")
;;
let set_back instruction b' =
match instruction with
| Check (f, c, b) -> Check (f, c, b')
| Put (n, c, b) -> Put (n, c, b')
| Compare (m, k, c, b) -> Compare (m, k, c, b')
| _ -> raise (Malformed_tree "back does not exist")
;;
let rec code_to_string c =
match c with
| Check(f, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
"Check(" ^ (Fun.to_string f) ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Put(k, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
"Put(" ^ (string_of_int k) ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Compare(m, k, c, b) ->
let cs, bs = code_to_string c, code_to_string b in
let sk, sm = string_of_int k, string_of_int m in
"Compare(" ^ sm ^ ", " ^ sk ^ ", " ^ cs ^ ", " ^ bs ^ ")"
| Success values -> "Success" ^ (List.join Entry.to_string " " values)
| Fail -> "Fail"
;;
let lookup table x i code =
try
let j = List.assoc x table in
(Compare(j, i, Fail, Fail)) :: code, table
with Not_found ->
code, (x, i) :: table
;;
let rec code_list tcodes ccodes i table = function
| T.Var x ->
let ccodes', table' = lookup table x i ccodes in
(Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1
| T.Fun(f, ts) ->
let tcodes' = (Check(f, Fail, Fail)) :: tcodes in
List.fold_left app_tcode (tcodes', ccodes, table, i) ts
and app_tcode (tcodes, ccodes, table, i) t =
code_list tcodes ccodes i table t
;;
let rec combine_code instruction = function
| [] -> instruction
| Check(f, _, _) :: l ->
combine_code (Check(f, instruction, Fail)) l
| Put(k, _, _) :: l ->
combine_code (Put(k, instruction, Fail)) l
| Compare(k, m,_, _) :: l ->
combine_code (Compare(k, m, instruction, Fail)) l
| _ -> raise (Malformed_tree "Compare/Fail/Success not expected")
;;
let code_for_term t =
let success = Success [] in
let tcode, ccode, _, _ = code_list [] [] 0 [] t in
combine_code (combine_code success ccode) tcode
;;
let code_for_value (t, v) =
let success = Success [v] in
let tcode, ccode, _, _ = code_list [] [] 0 [] t in
combine_code (combine_code success ccode) tcode
;;
let make () = Fail
let rec insert' code tree =
match code, tree with
| _, Fail -> code
| Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 ->
Check(g, insert' c c', b')
| Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') ->
Compare(m', k', insert' c c', b')
| Put(k, c, _), Put(k', c', b') when k = k' ->
Put(k, insert' c c', b')
| _, Check(_, _, b)
| _, Compare (_, _, _, b)
| _, Put (_, _, b) ->
set_back tree (insert' code b)
| Compare(_, _, _, b), Success vs
| Put(_, _, b), Success vs ->
set_back code (Success vs)
| Success v, Success values ->
| Fail, Success _ -> raise (Malformed_tree "Fail, Success not expected")
;;
let insert tree (term, value) =
T.to_stringm term > > = fun s - >
Format.printf " Insert into index term % s\n% ! " s ;
Format.printf " Tree before is % s\n " ( code_to_string tree ) ;
Format.printf "Insert into index term %s\n%!" s;
Format.printf "Tree before is %s\n" (code_to_string tree);*)
let code = code_for_value (term, value) in
let tree' = insert' code tree in
return tree'
;;
let rec remove_code code tree v =
match code, tree with
| Fail, _ -> raise (Malformed_tree "Fail in code not expected")
| Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 ->
(try Check(g, remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') ->
(try Compare(m', k', remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| Put(k, c, b), Put(k', c', b') when k = k' ->
(try Put(k', remove_code c c' v, b')
with Empty_branch -> if b' != Fail then b' else raise Empty_branch)
| _, Check(_, _, b)
| _, Compare(_, _, _, b)
| _, Put(_, _, b) ->
(try set_back tree (remove_code code b v)
with Empty_branch -> set_back tree Fail)
| Success v, Success values ->
if (List.length values) == 1 then raise Empty_branch
| _, Success _ -> raise (Malformed_tree "Success in tree not expected")
| _ -> raise Not_in_index
;;
let delete tree value =
T.to_stringm ( fst value ) > > = fun s - >
Format.printf " Remove term % s\n% ! " s ;
Format.printf "Remove term %s\n%!" s;*)
let code = code_for_value value in
let tree' = try remove_code code tree value with Empty_branch -> Fail in
return tree'
;;
let rec retrieve_variants tree code =
match tree, code with
| Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 ->
retrieve_variants c c'
| Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') ->
retrieve_variants c c'
| Put(k, c, b), Put(k', c', _) when k = k' ->
retrieve_variants c c'
| Check(_, _, b), _
| Compare(_, _, _, b), _
| Put(_, _, b), _ ->
retrieve_variants b code
| Success variants, Success _ -> variants
| Fail, _
| Success _, _ -> []
;;
let variant_candidates tree term =
let code = code_for_term term in
let vars = retrieve_variants tree code in
U.Term.to_stringm term > > = fun s - >
Format.printf " CT : vars 4 % s : % i:\n%s\n " s ( vars )
( List.foldl ( fun s x - > ( Entry.to_string x)^s ) " " vars ) ;
Format.printf "CT: vars 4 %s: %i:\n%s\n" s (List.length vars)
(List.foldl (fun s x -> (Entry.to_string x)^s) "" vars);*)
return vars
;;
let rec flatten' after t =
match t with
| T.Var x -> Var (x, after)
| T.Fun(f, ts) ->
let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in
;;
let flatten = flatten' End
let subst table i =
try
List.assoc i table
with
Not_found -> raise (Malformed_tree "compare without put")
;;
let rec retrieve_generalizations tree t_flat sub =
match tree, t_flat with
| Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 ->
let gens = retrieve_generalizations c next sub in
EL.union (retrieve_generalizations b t_flat sub) gens
| Compare(m, k, c, b), End ->
let gens = retrieve_generalizations b End sub in
if (compare (subst sub m) (subst sub k)) == 0 then
EL.union (retrieve_generalizations c End sub) gens
else
gens
| Put(k, c, b), Var (x, after) ->
let subterm = T.Var x in
let gens = retrieve_generalizations c after ((k, subterm) :: sub) in
EL.union (retrieve_generalizations b t_flat sub) gens
| Put(k, c, b), Fun (_, _, after, subterm) ->
let gens = retrieve_generalizations c after ((k, subterm) :: sub) in
EL.union (retrieve_generalizations b t_flat sub) gens
| Check(_, _, b), _ ->
retrieve_generalizations b t_flat sub
| Success entries, End -> entries
| Fail, _
| Compare _, _
| Success _, _ -> []
| Put _, End -> raise (Malformed_tree "not malformed?")
;;
find generalizations for a given term in dtree
let generalization_candidates tree term =
let t_flat = flatten term in
let gens = retrieve_generalizations tree t_flat [] in
return gens
;;
given a term , non - var generalization of subterms are returned ,
paired with the subterm 's position . Not strict ! Also not possible
as indexing destroys nonlinearity .
paired with the subterm's position. Not strict! Also not possible
as indexing destroys nonlinearity. *)
let encompassment_candidates tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
( fun r ( t , p ) - >
let gs = retrieve_generalizations tree ( flatten t ) [ ] in
( List.map ( fun n - > ( n , p ) ) gs ) @ r )
[ ] ( ( term , Pos.root ) : : pos_st )
in
return ecs
; ;
let encompassment_candidates tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
(fun r (t, p) ->
let gs = retrieve_generalizations tree (flatten t) [] in
(List.map (fun n -> (n, p)) gs) @ r)
[] ((term,Pos.root) :: pos_st)
in
return ecs
;;*)
given a term , non - var generalization of subterms are returned ,
paired with the subterm 's position . Not strict !
paired with the subterm's position. Not strict! *)
let encompassment_candidates_below_root tree term =
let pos_st = Termx.nonvar_pos_proper_subterms term in
let ecs =
List.fold_left
(fun r (t, p) ->
let gs = retrieve_generalizations tree (flatten t) [] in
(List.map (fun n -> (n, p)) gs) @ r)
[] pos_st
in
return ecs
;;
let encompassment_candidates tree term =
let at_root = retrieve_generalizations tree (flatten term) [] in
encompassment_candidates_below_root tree term >>= fun below ->
let root = flip Pair.make Pos.root in
return (List.rev_append (List.map root at_root) below)
;;
let size t = is_empty t >>= fun b -> return (if b then 0 else 1)
let overlap1_candidates t = failwith "CodeTree: overlaps not implemented"
let overlap1_candidates_below_root t =
failwith "CodeTree: overlaps not implemented"
;;
let overlap2_candidates t = failwith "CodeTree: overlaps not implemented"
let unification_candidates t =
failwith "CodeTree: unification not implemented"
;;
module TermCodeTree = Make(TermIndex.TermEntry)
let test ( ) =
Format.printf " testing module CodeTree\n " ;
let c = Fun.of_string " c " 0 in
let f = Fun.of_string " f " 1 in
let g = Fun.of_string " g " 2 in
let x = Term . ( Var.of_string " x " ) in
let y = Term . ( Var.of_string " y " ) in
let f_x = Term . Fun ( f , [ x ] ) in
let f_f_x = Term . Fun ( f , [ f_x ] ) in
let c _ = Term . Fun ( c , [ ] ) in
let g_x_x = Term . Fun(g , [ x ; x ] ) in
Format.printf " Code for % s : \n % s\n "
( Term.to_string f_f_x )
( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ;
Format.printf " Code for % s : \n % s\n "
( Term.to_string g_x_x )
( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ;
let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string g_f_f_x_c )
( TermCodeTree.code_to_string ( ) ) ;
let = Term . Fun ( g , [ f_f_x ; f_x ] ) in
let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string g_f_f_x_f_x )
( TermCodeTree.code_to_string ( ) ) ;
let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in
let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in
Format.printf " Code for % s : \n % s\n\n "
( Term.to_string t )
( TermCodeTree.code_to_string ( t ) ) ;
( * INSERT
let test () =
Format.printf "testing module CodeTree\n";
let c = Fun.of_string "c" 0 in
let f = Fun.of_string "f" 1 in
let g = Fun.of_string "g" 2 in
let x = Term.Var (Var.of_string "x") in
let y = Term.Var (Var.of_string "y") in
let f_x = Term.Fun (f, [x]) in
let f_f_x = Term.Fun (f, [f_x]) in
let c_ = Term.Fun (c, []) in
let g_x_x = Term.Fun(g, [x; x]) in
Format.printf "Code for %s: \n %s\n"
(Term.to_string f_f_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x));
Format.printf "Code for %s: \n %s\n"
(Term.to_string g_x_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x));
let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string g_f_f_x_c)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c));
let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in
let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string g_f_f_x_f_x)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x));
let t = Term.Fun (g, [g_f_f_x_f_x; y]) in
let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in
Format.printf "Code for %s: \n %s\n\n"
(Term.to_string t)
(TermCodeTree.code_to_string (TermCodeTree.code_for_value t));
let tree =
TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y
in
Format.printf "Code for insert: \n %s\n\n"
(TermCodeTree.code_to_string tree);
let tree' = TermCodeTree.insert tree t in
Format.printf "Code for insert: \n %s\n\n"
(TermCodeTree.code_to_string tree');
let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in
let tree' = TermCodeTree.insert tree' g_f_f_y_c in
Format.printf "Code for insert g_f_f_y_c: \n %s\n\n"
(TermCodeTree.code_to_string tree');
let tree'' = TermCodeTree.delete tree' g_f_f_y_c in
Format.printf "Code for delete g_f_f_y_c again: \n %s\n\n"
(TermCodeTree.code_to_string tree'');
Format.printf " Code for delete g_x_x : \n % s\n\n "
( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ;
(TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*)
let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in
let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in
Format.printf "variants for %s: %s, %s: %s\n"
(Term.to_string g_f_f_x_f_x)
(List.to_string Term.to_string "" variants)
(Term.to_string g_f_f_x_y)
(List.to_string Term.to_string "" variants');
let tree' = TermCodeTree.insert tree' t' in
GENERALIZATIONS
let u = Term.Fun (g, [f_x; y]) in
let tree' = TermCodeTree.insert tree' u in
let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in
Format.printf "generalizations for %s: %s\n"
(Term.to_string g_f_f_y_c)
let gens = TermCodeTree.generalization_candidates tree' u in
Format.printf "generalizations for %s: %s\n"
(Term.to_string u)
let s = Term.Fun (g, [f_x; x]) in
let tree' = TermCodeTree.insert tree' s in
let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in
Format.printf "generalizations for %s: %s\n"
(Term.to_string g_f_f_x_f_x)
(List.to_string Term.to_string "" gens);
let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in
let f (t, p) = (Term.to_string t) ^ "@" ^ (Position.to_string p) ^ "\n" in
Format.printf "encompassments for %s: %s\n"
(Term.to_string t)
(List.to_string f "" gens);
;;
*)
|
2d15462f3dfb7abb6a87dfbb0692273cd9c76cdfcae05b89b48377f7c0244d7c | haskellari/indexed-traversable | GhcList.hs | {-# LANGUAGE CPP #-}
#if MIN_VERSION_base(4,17,0)
{-# LANGUAGE Safe #-}
#elif __GLASGOW_HASKELL__ >= 702
# LANGUAGE Trustworthy #
#endif
module GhcList (
build,
) where
#if MIN_VERSION_base(4,17,0)
import GHC.List (build)
#else
import GHC.Exts (build)
#endif
| null | https://raw.githubusercontent.com/haskellari/indexed-traversable/8403a52163e5b8f3ec32a2846b53ccc2e8088a6f/indexed-traversable/src/GhcList.hs | haskell | # LANGUAGE CPP #
# LANGUAGE Safe # | #if MIN_VERSION_base(4,17,0)
#elif __GLASGOW_HASKELL__ >= 702
# LANGUAGE Trustworthy #
#endif
module GhcList (
build,
) where
#if MIN_VERSION_base(4,17,0)
import GHC.List (build)
#else
import GHC.Exts (build)
#endif
|
5f4bf801f0e07c26630f9b98714dcb90238998b8f63796e2642b954c933544e2 | alanz/ghc-exactprint | SH_Overlap9.hs | # OPTIONS_GHC -fwarn - safe #
# LANGUAGE FlexibleInstances #
-- | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred
-- unsafe due to overlapping instances at call site `f`.
module SH_Overlap9 where
import SH_Overlap9_A
instance
C [a] where
f _ = "[a]"
test :: String
test = f ([1,2,3,4] :: [Int])
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/SH_Overlap9.hs | haskell | | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred
unsafe due to overlapping instances at call site `f`. | # OPTIONS_GHC -fwarn - safe #
# LANGUAGE FlexibleInstances #
module SH_Overlap9 where
import SH_Overlap9_A
instance
C [a] where
f _ = "[a]"
test :: String
test = f ([1,2,3,4] :: [Int])
|
85cccff35599098082d332fcc983d93f978779fc63b43311cca246dda1f99ee0 | janestreet/async_rpc_kernel | rpc_metadata.mli | * Metadata is arbitrary information provided by a caller along with the query . It is
opaque to the Async RPC protocol , and may not be present on all queries . Metadata
should generally be small , middleware - provided data that does not affect the callee 's
behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are
too large . See [ Connection.create ] for more info .
opaque to the Async RPC protocol, and may not be present on all queries. Metadata
should generally be small, middleware-provided data that does not affect the callee's
behavior (e.g. tracing ids). It may be subject to truncation if values provided are
too large. See [Connection.create] for more info. *)
open! Core
type t = string [@@deriving sexp_of]
* Retrieves the metadata in the context of the current RPC call , if it is available .
val get : unit -> t option
module Private : sig
val with_metadata : t option -> f:(unit -> 'a) -> 'a
end
| null | https://raw.githubusercontent.com/janestreet/async_rpc_kernel/541fb417b39fad5c930ac73b729a7aaf59bd1001/src/rpc_metadata.mli | ocaml | * Metadata is arbitrary information provided by a caller along with the query . It is
opaque to the Async RPC protocol , and may not be present on all queries . Metadata
should generally be small , middleware - provided data that does not affect the callee 's
behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are
too large . See [ Connection.create ] for more info .
opaque to the Async RPC protocol, and may not be present on all queries. Metadata
should generally be small, middleware-provided data that does not affect the callee's
behavior (e.g. tracing ids). It may be subject to truncation if values provided are
too large. See [Connection.create] for more info. *)
open! Core
type t = string [@@deriving sexp_of]
* Retrieves the metadata in the context of the current RPC call , if it is available .
val get : unit -> t option
module Private : sig
val with_metadata : t option -> f:(unit -> 'a) -> 'a
end
|
|
6771c3d64b28efe1bd2eead1a00e0d49576f11f430ff39602d538a7c8f4d162a | pqwy/notty | notty_top_init.ml | Copyright ( c ) 2017 . All rights reserved .
See LICENSE.md .
See LICENSE.md. *)
open Notty;;
#install_printer Notty.Render.pp_image;;
#install_printer Notty.Render.pp_attr;;
| null | https://raw.githubusercontent.com/pqwy/notty/389366c023396017aa21efcdbb07ade5ba0974c5/src/notty_top_init.ml | ocaml | Copyright ( c ) 2017 . All rights reserved .
See LICENSE.md .
See LICENSE.md. *)
open Notty;;
#install_printer Notty.Render.pp_image;;
#install_printer Notty.Render.pp_attr;;
|
|
c6b904a109064fdbcdd47b942a4448d1892030c35f153fa778071da196ef2869 | hasktorch/ffi-experimental | Scalar.hs | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module Torch.Scalar where
import Foreign.ForeignPtr
import qualified ATen.Const as ATen
import qualified ATen.Managed.Type.Scalar as ATen
import qualified ATen.Type as ATen
import ATen.Managed.Cast
import ATen.Class (Castable(..))
import ATen.Cast
instance Castable Float (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_d (realToFrac x) >>= f
uncast x f = undefined
instance Castable Double (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_d (realToFrac x) >>= f
uncast x f = undefined
instance Castable Int (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_i (fromIntegral x) >>= f
uncast x f = undefined
class (Castable a (ForeignPtr ATen.Scalar)) => Scalar a
instance Scalar Float
instance Scalar Double
instance Scalar Int
| null | https://raw.githubusercontent.com/hasktorch/ffi-experimental/54192297742221c4d50398586ba8d187451f9ee0/hasktorch/src/Torch/Scalar.hs | haskell | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module Torch.Scalar where
import Foreign.ForeignPtr
import qualified ATen.Const as ATen
import qualified ATen.Managed.Type.Scalar as ATen
import qualified ATen.Type as ATen
import ATen.Managed.Cast
import ATen.Class (Castable(..))
import ATen.Cast
instance Castable Float (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_d (realToFrac x) >>= f
uncast x f = undefined
instance Castable Double (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_d (realToFrac x) >>= f
uncast x f = undefined
instance Castable Int (ForeignPtr ATen.Scalar) where
cast x f = ATen.newScalar_i (fromIntegral x) >>= f
uncast x f = undefined
class (Castable a (ForeignPtr ATen.Scalar)) => Scalar a
instance Scalar Float
instance Scalar Double
instance Scalar Int
|
|
e8e44553c6bc715e30d1f0cfe4aee8dba9a3f8964713f76f40af4bf96db3856f | threatgrid/ctia | crud.clj | (ns ctia.stores.es.crud
(:require
[clojure.set :as set]
[clojure.string :as string]
[clojure.tools.logging :as log]
[ctia.domain.access-control :as ac
:refer [allow-read? allow-write? restricted-read?]]
[ctia.lib.pagination :refer [list-response-schema]]
[ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]]
[ctia.schemas.search-agg
:refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]]
[ctia.stores.es.sort :as es.sort]
[ctia.stores.es.query :as es.query]
[ctia.stores.es.schemas :refer [ESConnState]]
[ductile.document :as ductile.doc]
[ductile.query :as q]
[ring.swagger.coerce :as sc]
[schema-tools.core :as st]
[schema.coerce :as c]
[schema.core :as s]))
(defn make-es-read-params
"Prepare ES Params for read operations, setting the _source field
and including ACL mandatory ones."
[{:keys [fields]
:as es-params}]
(cond-> es-params
(coll? fields)
(-> (assoc :_source (concat fields ac/acl-fields))
(dissoc :fields))))
(defn coerce-to-fn
[Model]
(c/coercer! Model sc/json-schema-coercion-matcher))
(defn ensure-document-id
"Returns a document ID. if id is a object ID, it extract the
document ID, if it's a document ID already, it will just return
that."
[id]
(let [[_orig docid] (re-matches #".*?([^/]+)\z" id)]
docid))
(defn ensure-document-id-in-map
"Ensure a document ID in a given filter map"
[{:keys [id] :as m}]
(cond-> m
(string? id) (update :id list)
id (update :id #(map ensure-document-id %))))
(defn remove-es-actions
"Removes the ES action level
[{:index {:_id \"1\"}}
{:index {:_id \"2\"}}]
->
[{:_id \"1\"}
{:_id \"2\"}]
"
[items]
(map (comp first vals) items))
(defn build-create-result
[item coerce-fn]
(-> item
(dissoc :_id :_index :_type)
coerce-fn))
(defn partial-results
"Build partial results when an error occurs for one or more items
in the bulk operation.
Ex:
[{model1}
{:error \"Error message item2\"}
{model3}]"
[exception-data models coerce-fn]
(let [{{:keys [items]}
:es-http-res-body} exception-data]
{:data (map (fn [{:keys [error _id]} model]
(if error
{:error error
:id _id}
(build-create-result model coerce-fn)))
(remove-es-actions items) models)}))
(s/defn get-docs-with-indices
"Retrieves a documents from a search \"ids\" query. It enables to retrieves
documents from an alias that points to multiple indices.
It returns the documents with full hits meta data including the real index in which is stored the document."
[{:keys [conn index] :as _conn-state} :- ESConnState
ids :- [s/Str]
es-params]
(let [limit (count ids)
ids-query (q/ids (map ensure-document-id ids))
res (ductile.doc/query conn
index
ids-query
(assoc (make-es-read-params es-params)
:limit limit
:full-hits? true))]
(:data res)))
(s/defn get-doc-with-index
"Retrieves a document from a search \"ids\" query. It is used to perform a get query on an alias that points to multiple indices.
It returns the document with full hits meta data including the real index in which is stored the document."
[conn-state :- ESConnState
_id :- s/Str
es-params]
(first (get-docs-with-indices conn-state [_id] es-params)))
(defn ^:private prepare-opts
[{:keys [props]}
{:keys [refresh]}]
{:refresh (or refresh
(:refresh props)
"false")})
(s/defn bulk-schema
[Model :- (s/pred map?)]
(st/optional-keys
{:create [Model]
:index [Model]
:update [(st/optional-keys Model)]
:delete [s/Str]}))
(s/defn ^:private prepare-bulk-doc
[{:keys [props]} :- ESConnState
mapping :- s/Keyword
doc :- (s/pred map?)]
(assoc doc
:_id (:id doc)
:_index (:write-index props)
:_type (name mapping)))
(defn handle-create
"Generate an ES create handler using some mapping and schema"
[mapping Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- [Model]
[{:keys [conn] :as conn-state} :- ESConnState
docs :- [Model]
_ident
es-params]
(let [prepare-doc (partial prepare-bulk-doc conn-state mapping)
prepared (mapv prepare-doc docs)]
(try
(ductile.doc/bulk-index-docs conn
prepared
(prepare-opts conn-state es-params))
docs
(catch Exception e
(throw
(if-let [ex-data (ex-data e)]
;; Add partial results to the exception data map
(ex-info (.getMessage e)
(partial-results ex-data docs coerce!))
e))))))))
(defn handle-update
"Generate an ES update handler using some mapping and schema"
[mapping Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- (s/maybe Model)
[{:keys [conn] :as conn-state} :- ESConnState
id :- s/Str
realized :- Model
ident
es-params]
(when-let [[{index :_index current-doc :_source}]
(get-docs-with-indices conn-state [id] {})]
(if (allow-write? current-doc ident)
(let [update-doc (assoc realized
:id (ensure-document-id id))]
(ductile.doc/index-doc conn
index
(name mapping)
update-doc
(prepare-opts conn-state es-params))
(coerce! update-doc))
(throw (ex-info "You are not allowed to update this document"
{:type :access-control-error})))))))
(defn handle-read
"Generate an ES read handler using some mapping and schema"
[Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- (s/maybe Model)
[{{{:keys [get-in-config]} :ConfigService}
:services
:as conn-state}
:- ESConnState
id :- s/Str
ident
es-params]
(when-let [doc (-> (get-doc-with-index conn-state
id
(make-es-read-params es-params))
:_source
coerce!)]
(if (allow-read? doc ident get-in-config)
doc
(throw (ex-info "You are not allowed to read this document"
{:type :access-control-error})))))))
(defn handle-read-many
"Generate an ES read-many handler using some mapping and schema"
[Model]
(let [coerce! (coerce-to-fn Model)]
(s/fn :- [(s/maybe Model)]
[{{{:keys [get-in-config]} :ConfigService}
:services
:as conn-state}
:- ESConnState
ids :- [s/Str]
ident
{:keys [suppress-access-control-error?]
:or {suppress-access-control-error? false}
:as es-params}]
(sequence
(comp (map :_source)
(map coerce!)
(map (fn [record]
(if (allow-read? record ident get-in-config)
record
(let [ex (ex-info "You are not allowed to read this document"
{:type :access-control-error})]
(if suppress-access-control-error?
(log/error ex)
(throw ex)))))))
(get-docs-with-indices conn-state ids (make-es-read-params es-params))))))
(defn access-control-filter-list
"Given an ident, keep only documents it is allowed to read"
[docs ident get-in-config]
(filter #(allow-read? % ident get-in-config) docs))
(s/defschema BulkResult
(st/optional-keys
{:deleted [s/Str]
:updated [s/Str]
:errors (st/optional-keys
{:forbidden [s/Str]
:not-found [s/Str]
:internal-error [s/Str]})}))
(s/defschema ESActionResult
(st/open-schema
{:_id s/Str
:_index s/Str
:status s/Int
:result s/Str}))
TODO move it to ductile
(s/defschema ESBulkRes
{:took s/Int
:errors s/Bool
:items [{ductile.doc/BulkOps ESActionResult}]})
(s/defn ^:private format-bulk-res
"transform an elasticsearch bulk result into a CTIA Bulk Result.
ex: -bulk.html#docs-bulk-api-example"
[bulk-res :- ESBulkRes]
(let [{:keys [deleted updated not_found]}
(->> (:items bulk-res)
(map (comp first vals))
(group-by :result)
(into {}
(map (fn [[result items]]
{(keyword result) (map :_id items)}))))]
(cond-> {}
deleted (assoc :deleted deleted)
updated (assoc :updated updated)
not_found (assoc-in [:errors :not-found] not_found))))
(s/defn check-and-prepare-bulk
:- (st/assoc BulkResult
(s/optional-key :prepared)
[(s/pred map?)])
"prepare a bulk query:
- retrieve actual indices, deletion cannot be performed on the alias.
- filter out forbidden entitites
- forbidden and not_found errors are prepared for the response."
[conn-state :- ESConnState
ids :- [s/Str]
ident]
(let [get-in-config (get-in conn-state [:services :ConfigService])
doc-ids (map ensure-document-id ids)
docs-with-indices (get-docs-with-indices conn-state doc-ids {})
{authorized true forbidden-write false}
(group-by #(allow-write? (:_source %) ident)
docs-with-indices)
{forbidden true not-visible false}
(group-by #(allow-read? (:_source %) ident get-in-config)
forbidden-write)
missing (set/difference (set doc-ids)
(set (map :_id docs-with-indices)))
not-found (into (map :_id not-visible) missing)
prepared-docs (map #(select-keys % [:_index :_type :_id])
authorized)]
(cond-> {}
forbidden (assoc-in [:errors :forbidden] (map :_id forbidden))
(seq not-found) (assoc-in [:errors :not-found] not-found)
authorized (assoc :prepared prepared-docs))))
(s/defn bulk-delete :- BulkResult
[{:keys [conn] :as conn-state}
ids :- [s/Str]
ident
es-params]
(let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident)
bulk-res (when prepared
(try
(format-bulk-res
(ductile.doc/bulk-delete-docs conn
prepared
(prepare-opts conn-state es-params)))
(catch Exception e
(log/error e
(str "bulk delete failed: " (.getMessage e))
(pr-str prepared))
{:errors {:internal-error (map :_id prepared)}})))]
(cond-> bulk-res
errors (update :errors
#(merge-with concat errors %)))))
(s/defn bulk-update
"Generate an ES bulk update handler using some mapping and schema"
[Model]
(s/fn :- BulkResult
[{:keys [conn] :as conn-state}
docs :- [Model]
ident
es-params]
(let [by-id (group-by :id docs)
ids (seq (keys by-id))
{:keys [prepared errors]} (check-and-prepare-bulk conn-state
ids
ident)
prepared-docs (map (fn [meta]
(-> (:_id meta)
by-id
first
(into meta)))
prepared)
bulk-res (when prepared
(try
(format-bulk-res
(ductile.doc/bulk-index-docs conn
prepared-docs
(prepare-opts conn-state es-params)))
(catch Exception e
(log/error (str "bulk update failed: " (.getMessage e))
(pr-str prepared))
{:errors {:internal-error (map :_id prepared)}})))]
(cond-> bulk-res
errors (update :errors
#(merge-with concat errors %))))))
(defn handle-delete
"Generate an ES delete handler using some mapping"
[mapping]
(s/fn :- s/Bool
[{:keys [conn] :as conn-state} :- ESConnState
id :- s/Str
ident
es-params]
(if-let [{index :_index doc :_source}
(get-doc-with-index conn-state id {})]
(if (allow-write? doc ident)
(ductile.doc/delete-doc conn
index
(name mapping)
(ensure-document-id id)
(prepare-opts conn-state es-params))
(throw (ex-info "You are not allowed to delete this document"
{:type :access-control-error})))
false)))
(s/defschema FilterSchema
(st/optional-keys
{:all-of {s/Any s/Any}
:one-of {s/Any s/Any}
:query s/Str}))
(def enumerable-fields-mapping
"Mapping table for all fields which needs to be renamed
for the sorting or aggregation. Instead of using fielddata we can have
a text field for full text searches, and an unanalysed keyword
field with doc_values enabled for sorting or aggregation"
{"title" "title.whole"
"reason" "reason.whole"})
(s/defn parse-sort-by :- [SortExtension]
"Parses the sort_by parameter
Ex:
\"title:ASC,revision:DESC\"
->
[{:op :field :field-name \"title\" :sort_order \"ASC\"}
{:op :field :field-name \"revision\" :sort_order \"DESC\"}]"
[sort_by]
(if ((some-fn string? simple-ident?) sort_by)
(map
(fn [field]
(let [[field-name field-order] (string/split field #":")]
(cond-> {:op :field
:field-name (keyword field-name)}
field-order (assoc :sort_order field-order))))
(string/split (name sort_by) #","))
sort_by))
(defn with-default-sort-field
[es-params {:keys [default-sort]}]
(assert (not (:sort_by es-params)))
(update es-params :sort #(or %
(some->> default-sort
parse-sort-by
(mapv (fn [m] (es.sort/parse-sort-params-op m :asc))))
[{"_doc" :asc} {"id" :asc}])))
(s/defn rename-sort-fields
"Renames sort fields based on the content of the `enumerable-fields-mapping` table
and remaps to script extensions."
[{:keys [sort_by sort_order] :as es-params}
sort-extension-definitions :- (s/maybe SortExtensionDefinitions)]
(cond-> (dissoc es-params :sort_by :sort_order)
(and sort_by (not (:sort es-params)))
(assoc :sort
(->> sort_by
parse-sort-by
(mapv (fn [field]
{:pre [(= :field (:op field))]}
(let [{:keys [field-name] :as field}
(update field :field-name #(or (keyword (enumerable-fields-mapping (name %)))
%))]
(assert (simple-keyword? field-name))
(-> (or (some-> (get sort-extension-definitions field-name)
(into (select-keys field [:sort_order]))
(update :field-name #(or % (:field-name field))))
field)
(es.sort/parse-sort-params-op (or sort_order :asc))))))))))
(s/defschema MakeQueryParamsArgs
{:params s/Any
:props s/Any
(s/optional-key :sort-extension-definitions) SortExtensionDefinitions})
(s/defn make-query-params :- {s/Keyword s/Any}
[{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs]
(cond-> (-> params
(rename-sort-fields sort-extension-definitions)
(with-default-sort-field props)
make-es-read-params)
(<= 7 (:version props)) (assoc :track_total_hits true)))
(defn handle-find
"Generate an ES find/list handler using some mapping and schema"
[Model]
(let [response-schema (list-response-schema Model)
coerce! (coerce-to-fn response-schema)]
(s/fn :- response-schema
[{{{:keys [get-in-config]} :ConfigService} :services
:keys [conn index props]} :- ESConnState
{:keys [all-of one-of query]
:or {all-of {} one-of {}}} :- FilterSchema
ident
es-params]
(let [filter-val (cond-> (q/prepare-terms all-of)
(restricted-read? ident)
(conj (es.query/find-restriction-query-part ident get-in-config)))
query_string {:query_string {:query query}}
date-range-query (es.query/make-date-range-query es-params)
bool-params (cond-> {:filter filter-val}
(seq one-of) (into
{:should (q/prepare-terms one-of)
:minimum_should_match 1})
query (update :filter conj query_string)
(seq date-range-query) (update :filter conj {:range date-range-query}))
query-params (make-query-params {:params es-params :props props})]
(cond-> (coerce! (ductile.doc/query conn
index
(q/bool bool-params)
query-params))
(restricted-read? ident) (update :data
access-control-filter-list
ident
get-in-config))))))
(s/defn make-search-query :- {s/Keyword s/Any}
"Translate SearchQuery map into ES Query DSL map"
[es-conn-state :- ESConnState
search-query :- SearchQuery
ident]
(let [{:keys [services]} es-conn-state
{{:keys [get-in-config]} :ConfigService} services
{:keys [filter-map range full-text]} search-query
range-query (when range
{:range range})
filter-terms (-> (ensure-document-id-in-map filter-map)
q/prepare-terms)]
{:bool
{:filter
(cond-> [(es.query/find-restriction-query-part ident get-in-config)]
(seq filter-map) (into filter-terms)
(seq range) (conj range-query)
(seq full-text) (into (es.query/refine-full-text-query-parts
es-conn-state full-text)))}}))
(defn handle-query-string-search
"Generate an ES query handler for given schema schema"
[Model]
(let [response-schema (list-response-schema Model)
coerce! (coerce-to-fn response-schema)]
(s/fn :- response-schema
[{:keys [props] :as es-conn-state} :- ESConnState
{:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs]
(let [{conn :conn, index :index
{{:keys [get-in-config]} :ConfigService}
:services} es-conn-state
query (make-search-query es-conn-state search-query ident)
query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions])
(assoc :props props)))]
(cond-> (coerce! (ductile.doc/query
conn
index
query
query-params))
(restricted-read? ident) (update
:data
access-control-filter-list
ident
get-in-config))))))
(s/defn handle-delete-search
"ES delete by query handler"
[{:keys [conn index] :as es-conn-state} :- ESConnState
search-query :- SearchQuery
ident
es-params]
(let [query (make-search-query es-conn-state search-query ident)]
(:deleted
(ductile.doc/delete-by-query conn
[index]
query
(prepare-opts es-conn-state es-params)))))
(s/defn handle-query-string-count :- (s/pred nat-int?)
"ES count handler"
[{conn :conn
index :index
:as es-conn-state} :- ESConnState
search-query :- SearchQuery
ident]
(let [query (make-search-query es-conn-state search-query ident)]
(ductile.doc/count-docs conn
index
query)))
(s/defn make-histogram
[{:keys [aggregate-on granularity timezone]
:or {timezone "+00:00"}} :- HistogramQuery]
{:date_histogram
{:field aggregate-on
TODO switch to calendar_interval with ES7
:time_zone timezone}})
(s/defn make-topn
[{:keys [aggregate-on limit sort_order]
:or {limit 10 sort_order :desc}} :- TopnQuery]
{:terms
{:field (get enumerable-fields-mapping aggregate-on aggregate-on)
:size limit
:order {:_count sort_order}}})
(s/defn make-cardinality
[{:keys [aggregate-on]} :- CardinalityQuery]
{:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on)
:precision_threshold 10000}})
(s/defn make-aggregation
[{:keys [agg-type agg-key aggs]
:or {agg-key :metric}
:as agg-query} :- AggQuery]
(let [root-agg (dissoc agg-query :aggs)
agg-fn
(case agg-type
:topn make-topn
:cardinality make-cardinality
:histogram make-histogram
(throw (ex-info (str "invalid aggregation type: " (pr-str agg-type))
{})))]
(cond-> {agg-key (agg-fn root-agg)}
(seq aggs) (assoc :aggs (make-aggregation aggs)))))
(defn format-agg-result
[agg-type
{:keys [value buckets] :as _metric-res}]
(case agg-type
:cardinality value
:topn (map #(array-map :key (:key %)
:value (:doc_count %))
buckets)
:histogram (map #(array-map :key (:key_as_string %)
:value (:doc_count %))
buckets)))
(s/defn handle-aggregate
"Generate an ES aggregation handler for given schema"
[{:keys [conn index] :as es-conn-state} :- ESConnState
search-query :- SearchQuery
{:keys [agg-type] :as agg-query} :- AggQuery
ident]
(let [query (make-search-query es-conn-state search-query ident)
agg (make-aggregation (assoc agg-query :agg-key :metric))
es-res (ductile.doc/query conn
index
query
agg
{:limit 0})]
(format-agg-result agg-type
(get-in es-res [:aggs :metric]))))
| null | https://raw.githubusercontent.com/threatgrid/ctia/6c11ba6a7c57a44de64c16601d3914f5b0cf308e/src/ctia/stores/es/crud.clj | clojure | Add partial results to the exception data map | (ns ctia.stores.es.crud
(:require
[clojure.set :as set]
[clojure.string :as string]
[clojure.tools.logging :as log]
[ctia.domain.access-control :as ac
:refer [allow-read? allow-write? restricted-read?]]
[ctia.lib.pagination :refer [list-response-schema]]
[ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]]
[ctia.schemas.search-agg
:refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]]
[ctia.stores.es.sort :as es.sort]
[ctia.stores.es.query :as es.query]
[ctia.stores.es.schemas :refer [ESConnState]]
[ductile.document :as ductile.doc]
[ductile.query :as q]
[ring.swagger.coerce :as sc]
[schema-tools.core :as st]
[schema.coerce :as c]
[schema.core :as s]))
(defn make-es-read-params
"Prepare ES Params for read operations, setting the _source field
and including ACL mandatory ones."
[{:keys [fields]
:as es-params}]
(cond-> es-params
(coll? fields)
(-> (assoc :_source (concat fields ac/acl-fields))
(dissoc :fields))))
(defn coerce-to-fn
[Model]
(c/coercer! Model sc/json-schema-coercion-matcher))
(defn ensure-document-id
"Returns a document ID. if id is a object ID, it extract the
document ID, if it's a document ID already, it will just return
that."
[id]
(let [[_orig docid] (re-matches #".*?([^/]+)\z" id)]
docid))
(defn ensure-document-id-in-map
"Ensure a document ID in a given filter map"
[{:keys [id] :as m}]
(cond-> m
(string? id) (update :id list)
id (update :id #(map ensure-document-id %))))
(defn remove-es-actions
"Removes the ES action level
[{:index {:_id \"1\"}}
{:index {:_id \"2\"}}]
->
[{:_id \"1\"}
{:_id \"2\"}]
"
[items]
(map (comp first vals) items))
(defn build-create-result
[item coerce-fn]
(-> item
(dissoc :_id :_index :_type)
coerce-fn))
(defn partial-results
"Build partial results when an error occurs for one or more items
in the bulk operation.
Ex:
[{model1}
{:error \"Error message item2\"}
{model3}]"
[exception-data models coerce-fn]
(let [{{:keys [items]}
:es-http-res-body} exception-data]
{:data (map (fn [{:keys [error _id]} model]
(if error
{:error error
:id _id}
(build-create-result model coerce-fn)))
(remove-es-actions items) models)}))
(s/defn get-docs-with-indices
"Retrieves a documents from a search \"ids\" query. It enables to retrieves
documents from an alias that points to multiple indices.
It returns the documents with full hits meta data including the real index in which is stored the document."
[{:keys [conn index] :as _conn-state} :- ESConnState
ids :- [s/Str]
es-params]
(let [limit (count ids)
ids-query (q/ids (map ensure-document-id ids))
res (ductile.doc/query conn
index
ids-query
(assoc (make-es-read-params es-params)
:limit limit
:full-hits? true))]
(:data res)))
(s/defn get-doc-with-index
"Retrieves a document from a search \"ids\" query. It is used to perform a get query on an alias that points to multiple indices.
It returns the document with full hits meta data including the real index in which is stored the document."
[conn-state :- ESConnState
_id :- s/Str
es-params]
(first (get-docs-with-indices conn-state [_id] es-params)))
(defn ^:private prepare-opts
[{:keys [props]}
{:keys [refresh]}]
{:refresh (or refresh
(:refresh props)
"false")})
(s/defn bulk-schema
[Model :- (s/pred map?)]
(st/optional-keys
{:create [Model]
:index [Model]
:update [(st/optional-keys Model)]
:delete [s/Str]}))
(s/defn ^:private prepare-bulk-doc
[{:keys [props]} :- ESConnState
mapping :- s/Keyword
doc :- (s/pred map?)]
(assoc doc
:_id (:id doc)
:_index (:write-index props)
:_type (name mapping)))
(defn handle-create
"Generate an ES create handler using some mapping and schema"
[mapping Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- [Model]
[{:keys [conn] :as conn-state} :- ESConnState
docs :- [Model]
_ident
es-params]
(let [prepare-doc (partial prepare-bulk-doc conn-state mapping)
prepared (mapv prepare-doc docs)]
(try
(ductile.doc/bulk-index-docs conn
prepared
(prepare-opts conn-state es-params))
docs
(catch Exception e
(throw
(if-let [ex-data (ex-data e)]
(ex-info (.getMessage e)
(partial-results ex-data docs coerce!))
e))))))))
(defn handle-update
"Generate an ES update handler using some mapping and schema"
[mapping Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- (s/maybe Model)
[{:keys [conn] :as conn-state} :- ESConnState
id :- s/Str
realized :- Model
ident
es-params]
(when-let [[{index :_index current-doc :_source}]
(get-docs-with-indices conn-state [id] {})]
(if (allow-write? current-doc ident)
(let [update-doc (assoc realized
:id (ensure-document-id id))]
(ductile.doc/index-doc conn
index
(name mapping)
update-doc
(prepare-opts conn-state es-params))
(coerce! update-doc))
(throw (ex-info "You are not allowed to update this document"
{:type :access-control-error})))))))
(defn handle-read
"Generate an ES read handler using some mapping and schema"
[Model]
(let [coerce! (coerce-to-fn (s/maybe Model))]
(s/fn :- (s/maybe Model)
[{{{:keys [get-in-config]} :ConfigService}
:services
:as conn-state}
:- ESConnState
id :- s/Str
ident
es-params]
(when-let [doc (-> (get-doc-with-index conn-state
id
(make-es-read-params es-params))
:_source
coerce!)]
(if (allow-read? doc ident get-in-config)
doc
(throw (ex-info "You are not allowed to read this document"
{:type :access-control-error})))))))
(defn handle-read-many
"Generate an ES read-many handler using some mapping and schema"
[Model]
(let [coerce! (coerce-to-fn Model)]
(s/fn :- [(s/maybe Model)]
[{{{:keys [get-in-config]} :ConfigService}
:services
:as conn-state}
:- ESConnState
ids :- [s/Str]
ident
{:keys [suppress-access-control-error?]
:or {suppress-access-control-error? false}
:as es-params}]
(sequence
(comp (map :_source)
(map coerce!)
(map (fn [record]
(if (allow-read? record ident get-in-config)
record
(let [ex (ex-info "You are not allowed to read this document"
{:type :access-control-error})]
(if suppress-access-control-error?
(log/error ex)
(throw ex)))))))
(get-docs-with-indices conn-state ids (make-es-read-params es-params))))))
(defn access-control-filter-list
"Given an ident, keep only documents it is allowed to read"
[docs ident get-in-config]
(filter #(allow-read? % ident get-in-config) docs))
(s/defschema BulkResult
(st/optional-keys
{:deleted [s/Str]
:updated [s/Str]
:errors (st/optional-keys
{:forbidden [s/Str]
:not-found [s/Str]
:internal-error [s/Str]})}))
(s/defschema ESActionResult
(st/open-schema
{:_id s/Str
:_index s/Str
:status s/Int
:result s/Str}))
TODO move it to ductile
(s/defschema ESBulkRes
{:took s/Int
:errors s/Bool
:items [{ductile.doc/BulkOps ESActionResult}]})
(s/defn ^:private format-bulk-res
"transform an elasticsearch bulk result into a CTIA Bulk Result.
ex: -bulk.html#docs-bulk-api-example"
[bulk-res :- ESBulkRes]
(let [{:keys [deleted updated not_found]}
(->> (:items bulk-res)
(map (comp first vals))
(group-by :result)
(into {}
(map (fn [[result items]]
{(keyword result) (map :_id items)}))))]
(cond-> {}
deleted (assoc :deleted deleted)
updated (assoc :updated updated)
not_found (assoc-in [:errors :not-found] not_found))))
(s/defn check-and-prepare-bulk
:- (st/assoc BulkResult
(s/optional-key :prepared)
[(s/pred map?)])
"prepare a bulk query:
- retrieve actual indices, deletion cannot be performed on the alias.
- filter out forbidden entitites
- forbidden and not_found errors are prepared for the response."
[conn-state :- ESConnState
ids :- [s/Str]
ident]
(let [get-in-config (get-in conn-state [:services :ConfigService])
doc-ids (map ensure-document-id ids)
docs-with-indices (get-docs-with-indices conn-state doc-ids {})
{authorized true forbidden-write false}
(group-by #(allow-write? (:_source %) ident)
docs-with-indices)
{forbidden true not-visible false}
(group-by #(allow-read? (:_source %) ident get-in-config)
forbidden-write)
missing (set/difference (set doc-ids)
(set (map :_id docs-with-indices)))
not-found (into (map :_id not-visible) missing)
prepared-docs (map #(select-keys % [:_index :_type :_id])
authorized)]
(cond-> {}
forbidden (assoc-in [:errors :forbidden] (map :_id forbidden))
(seq not-found) (assoc-in [:errors :not-found] not-found)
authorized (assoc :prepared prepared-docs))))
(s/defn bulk-delete :- BulkResult
[{:keys [conn] :as conn-state}
ids :- [s/Str]
ident
es-params]
(let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident)
bulk-res (when prepared
(try
(format-bulk-res
(ductile.doc/bulk-delete-docs conn
prepared
(prepare-opts conn-state es-params)))
(catch Exception e
(log/error e
(str "bulk delete failed: " (.getMessage e))
(pr-str prepared))
{:errors {:internal-error (map :_id prepared)}})))]
(cond-> bulk-res
errors (update :errors
#(merge-with concat errors %)))))
(s/defn bulk-update
"Generate an ES bulk update handler using some mapping and schema"
[Model]
(s/fn :- BulkResult
[{:keys [conn] :as conn-state}
docs :- [Model]
ident
es-params]
(let [by-id (group-by :id docs)
ids (seq (keys by-id))
{:keys [prepared errors]} (check-and-prepare-bulk conn-state
ids
ident)
prepared-docs (map (fn [meta]
(-> (:_id meta)
by-id
first
(into meta)))
prepared)
bulk-res (when prepared
(try
(format-bulk-res
(ductile.doc/bulk-index-docs conn
prepared-docs
(prepare-opts conn-state es-params)))
(catch Exception e
(log/error (str "bulk update failed: " (.getMessage e))
(pr-str prepared))
{:errors {:internal-error (map :_id prepared)}})))]
(cond-> bulk-res
errors (update :errors
#(merge-with concat errors %))))))
(defn handle-delete
"Generate an ES delete handler using some mapping"
[mapping]
(s/fn :- s/Bool
[{:keys [conn] :as conn-state} :- ESConnState
id :- s/Str
ident
es-params]
(if-let [{index :_index doc :_source}
(get-doc-with-index conn-state id {})]
(if (allow-write? doc ident)
(ductile.doc/delete-doc conn
index
(name mapping)
(ensure-document-id id)
(prepare-opts conn-state es-params))
(throw (ex-info "You are not allowed to delete this document"
{:type :access-control-error})))
false)))
(s/defschema FilterSchema
(st/optional-keys
{:all-of {s/Any s/Any}
:one-of {s/Any s/Any}
:query s/Str}))
(def enumerable-fields-mapping
"Mapping table for all fields which needs to be renamed
for the sorting or aggregation. Instead of using fielddata we can have
a text field for full text searches, and an unanalysed keyword
field with doc_values enabled for sorting or aggregation"
{"title" "title.whole"
"reason" "reason.whole"})
(s/defn parse-sort-by :- [SortExtension]
"Parses the sort_by parameter
Ex:
\"title:ASC,revision:DESC\"
->
[{:op :field :field-name \"title\" :sort_order \"ASC\"}
{:op :field :field-name \"revision\" :sort_order \"DESC\"}]"
[sort_by]
(if ((some-fn string? simple-ident?) sort_by)
(map
(fn [field]
(let [[field-name field-order] (string/split field #":")]
(cond-> {:op :field
:field-name (keyword field-name)}
field-order (assoc :sort_order field-order))))
(string/split (name sort_by) #","))
sort_by))
(defn with-default-sort-field
[es-params {:keys [default-sort]}]
(assert (not (:sort_by es-params)))
(update es-params :sort #(or %
(some->> default-sort
parse-sort-by
(mapv (fn [m] (es.sort/parse-sort-params-op m :asc))))
[{"_doc" :asc} {"id" :asc}])))
(s/defn rename-sort-fields
"Renames sort fields based on the content of the `enumerable-fields-mapping` table
and remaps to script extensions."
[{:keys [sort_by sort_order] :as es-params}
sort-extension-definitions :- (s/maybe SortExtensionDefinitions)]
(cond-> (dissoc es-params :sort_by :sort_order)
(and sort_by (not (:sort es-params)))
(assoc :sort
(->> sort_by
parse-sort-by
(mapv (fn [field]
{:pre [(= :field (:op field))]}
(let [{:keys [field-name] :as field}
(update field :field-name #(or (keyword (enumerable-fields-mapping (name %)))
%))]
(assert (simple-keyword? field-name))
(-> (or (some-> (get sort-extension-definitions field-name)
(into (select-keys field [:sort_order]))
(update :field-name #(or % (:field-name field))))
field)
(es.sort/parse-sort-params-op (or sort_order :asc))))))))))
(s/defschema MakeQueryParamsArgs
{:params s/Any
:props s/Any
(s/optional-key :sort-extension-definitions) SortExtensionDefinitions})
(s/defn make-query-params :- {s/Keyword s/Any}
[{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs]
(cond-> (-> params
(rename-sort-fields sort-extension-definitions)
(with-default-sort-field props)
make-es-read-params)
(<= 7 (:version props)) (assoc :track_total_hits true)))
(defn handle-find
"Generate an ES find/list handler using some mapping and schema"
[Model]
(let [response-schema (list-response-schema Model)
coerce! (coerce-to-fn response-schema)]
(s/fn :- response-schema
[{{{:keys [get-in-config]} :ConfigService} :services
:keys [conn index props]} :- ESConnState
{:keys [all-of one-of query]
:or {all-of {} one-of {}}} :- FilterSchema
ident
es-params]
(let [filter-val (cond-> (q/prepare-terms all-of)
(restricted-read? ident)
(conj (es.query/find-restriction-query-part ident get-in-config)))
query_string {:query_string {:query query}}
date-range-query (es.query/make-date-range-query es-params)
bool-params (cond-> {:filter filter-val}
(seq one-of) (into
{:should (q/prepare-terms one-of)
:minimum_should_match 1})
query (update :filter conj query_string)
(seq date-range-query) (update :filter conj {:range date-range-query}))
query-params (make-query-params {:params es-params :props props})]
(cond-> (coerce! (ductile.doc/query conn
index
(q/bool bool-params)
query-params))
(restricted-read? ident) (update :data
access-control-filter-list
ident
get-in-config))))))
(s/defn make-search-query :- {s/Keyword s/Any}
"Translate SearchQuery map into ES Query DSL map"
[es-conn-state :- ESConnState
search-query :- SearchQuery
ident]
(let [{:keys [services]} es-conn-state
{{:keys [get-in-config]} :ConfigService} services
{:keys [filter-map range full-text]} search-query
range-query (when range
{:range range})
filter-terms (-> (ensure-document-id-in-map filter-map)
q/prepare-terms)]
{:bool
{:filter
(cond-> [(es.query/find-restriction-query-part ident get-in-config)]
(seq filter-map) (into filter-terms)
(seq range) (conj range-query)
(seq full-text) (into (es.query/refine-full-text-query-parts
es-conn-state full-text)))}}))
(defn handle-query-string-search
"Generate an ES query handler for given schema schema"
[Model]
(let [response-schema (list-response-schema Model)
coerce! (coerce-to-fn response-schema)]
(s/fn :- response-schema
[{:keys [props] :as es-conn-state} :- ESConnState
{:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs]
(let [{conn :conn, index :index
{{:keys [get-in-config]} :ConfigService}
:services} es-conn-state
query (make-search-query es-conn-state search-query ident)
query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions])
(assoc :props props)))]
(cond-> (coerce! (ductile.doc/query
conn
index
query
query-params))
(restricted-read? ident) (update
:data
access-control-filter-list
ident
get-in-config))))))
(s/defn handle-delete-search
"ES delete by query handler"
[{:keys [conn index] :as es-conn-state} :- ESConnState
search-query :- SearchQuery
ident
es-params]
(let [query (make-search-query es-conn-state search-query ident)]
(:deleted
(ductile.doc/delete-by-query conn
[index]
query
(prepare-opts es-conn-state es-params)))))
(s/defn handle-query-string-count :- (s/pred nat-int?)
"ES count handler"
[{conn :conn
index :index
:as es-conn-state} :- ESConnState
search-query :- SearchQuery
ident]
(let [query (make-search-query es-conn-state search-query ident)]
(ductile.doc/count-docs conn
index
query)))
(s/defn make-histogram
[{:keys [aggregate-on granularity timezone]
:or {timezone "+00:00"}} :- HistogramQuery]
{:date_histogram
{:field aggregate-on
TODO switch to calendar_interval with ES7
:time_zone timezone}})
(s/defn make-topn
[{:keys [aggregate-on limit sort_order]
:or {limit 10 sort_order :desc}} :- TopnQuery]
{:terms
{:field (get enumerable-fields-mapping aggregate-on aggregate-on)
:size limit
:order {:_count sort_order}}})
(s/defn make-cardinality
[{:keys [aggregate-on]} :- CardinalityQuery]
{:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on)
:precision_threshold 10000}})
(s/defn make-aggregation
[{:keys [agg-type agg-key aggs]
:or {agg-key :metric}
:as agg-query} :- AggQuery]
(let [root-agg (dissoc agg-query :aggs)
agg-fn
(case agg-type
:topn make-topn
:cardinality make-cardinality
:histogram make-histogram
(throw (ex-info (str "invalid aggregation type: " (pr-str agg-type))
{})))]
(cond-> {agg-key (agg-fn root-agg)}
(seq aggs) (assoc :aggs (make-aggregation aggs)))))
(defn format-agg-result
[agg-type
{:keys [value buckets] :as _metric-res}]
(case agg-type
:cardinality value
:topn (map #(array-map :key (:key %)
:value (:doc_count %))
buckets)
:histogram (map #(array-map :key (:key_as_string %)
:value (:doc_count %))
buckets)))
(s/defn handle-aggregate
"Generate an ES aggregation handler for given schema"
[{:keys [conn index] :as es-conn-state} :- ESConnState
search-query :- SearchQuery
{:keys [agg-type] :as agg-query} :- AggQuery
ident]
(let [query (make-search-query es-conn-state search-query ident)
agg (make-aggregation (assoc agg-query :agg-key :metric))
es-res (ductile.doc/query conn
index
query
agg
{:limit 0})]
(format-agg-result agg-type
(get-in es-res [:aggs :metric]))))
|
1474dc7693ddebbfcbd686f2889a6603019a6cd4face2891224b437c797d7c3a | argp/bap | bench_map.ml | cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native
(* The purpose of this test is to compare different implementation of
the Map associative data structure. *)
let total_length = 500_000
let (%) = BatPervasives.(%)
module MapBench (M : sig val input_length : int end) = struct
let input_length = M.input_length
let nb_iter =
max 10 (total_length / input_length)
let () = Printf.printf "%d iterations\n" nb_iter
let random_key () = Random.int input_length
let random_value () = Random.int input_length
let random_inputs random_elt () =
BatList.init input_length (fun _ -> random_elt ())
let make_samples input tests () = Bench.bench_funs tests input
we do n't use BatInt to ensure that the same comparison function
is used ( PMap use Pervasives.compare by default ) , in order to
have comparable performance results .
is used (PMap use Pervasives.compare by default), in order to
have comparable performance results. *)
module StdMap = BatMap.Make(struct type t = int let compare = compare end)
module Map = BatMap
let same_elts stdmap pmap =
BatList.of_enum (StdMap.enum stdmap)
= BatList.of_enum (Map.enum pmap)
(* A benchmark for key insertion *)
let create_std_map input =
List.fold_left
(fun t (k, v) -> StdMap.add k v t)
StdMap.empty input
let create_poly_map input =
List.fold_left
(fun t (k, v) -> Map.add k v t)
Map.empty input
let create_input =
let keys = random_inputs random_key () in
let values = random_inputs random_value () in
BatList.combine keys values
let std_created_map = create_std_map create_input
let poly_created_map = create_poly_map create_input
let () =
assert (same_elts std_created_map poly_created_map)
let samples_create = make_samples create_input
[ "stdmap create", ignore % create_std_map;
"pmap create", ignore % create_poly_map ]
(* A benchmark for fast import *)
let import_std_map input =
StdMap.of_enum (BatList.enum input)
let import_poly_map input =
Map.of_enum (BatList.enum input)
let import_input = create_input
let () =
let std_imported_map = import_std_map import_input in
assert (same_elts std_imported_map poly_created_map);
let poly_imported_map = import_poly_map import_input in
assert (same_elts std_created_map poly_imported_map);
()
let samples_import = make_samples import_input
[ "stdmap import", ignore % import_std_map;
"pmap import", ignore % import_poly_map ]
(* A benchmark for key lookup *)
let lookup_input =
random_inputs random_key ()
let lookup_std_map input =
List.iter
(fun k -> ignore (StdMap.mem k std_created_map))
input
let lookup_poly_map input =
List.iter
(fun k -> ignore (Map.mem k poly_created_map))
input
let samples_lookup = make_samples lookup_input
[ "stdmap lookup", lookup_std_map;
"pmap lookup", lookup_poly_map ]
(* A benchmark for key removal *)
let remove_input =
random_inputs random_key ()
let remove_std_map input =
List.fold_left
(fun t k -> StdMap.remove k t)
std_created_map input
let remove_poly_map input =
List.fold_left
(fun t k -> Map.remove k t)
poly_created_map input
let () =
assert (same_elts
(remove_std_map remove_input)
(remove_poly_map remove_input))
let samples_remove = make_samples remove_input
[ "stdmap remove", ignore % remove_std_map;
"pmap remove", ignore % remove_poly_map ]
(* A benchmark for merging *)
let random_pairlist () =
BatList.combine
(random_inputs random_key ())
(random_inputs random_value ())
let p1 = random_pairlist ()
let p2 = random_pairlist ()
let merge_fun k a b =
if k mod 2 = 0 then None else Some ()
let merge_std_map =
let m1 = StdMap.of_enum (BatList.enum p1) in
let m2 = StdMap.of_enum (BatList.enum p2) in
fun () ->
StdMap.merge merge_fun m1 m2
let merge_poly_map =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
fun () ->
Map.merge merge_fun m1 m2
let samples_merge = make_samples () [
"stdmap merge", ignore % merge_std_map;
"pmap merge", ignore % merge_poly_map;
]
(* compare fold-based and merge-based union, diff, intersect *)
let pmap_union (m1, m2) = Map.union m1 m2
let fold_union (m1, m2) =
Map.foldi Map.add m1 m2
let merge_union (m1, m2) =
let merge_fun k a b = if a <> None then a else b in
Map.merge merge_fun m1 m2
let union_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_union =
li (pmap_union union_input) = li (impl_union union_input) in
assert (test fold_union);
assert (test merge_union);
()
let samples_union = make_samples union_input [
"pmap union", ignore % pmap_union;
"fold-based union", ignore % fold_union;
"merge-based union", ignore % merge_union;
]
let pmap_diff (m1, m2) =
Map.diff m1 m2
let fold_diff (m1, m2) =
Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1
let merge_diff (m1, m2) =
let merge_fun k a b = if b <> None then None else a in
Map.merge merge_fun m1 m2
let diff_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_diff =
li (pmap_diff diff_input) = li (impl_diff diff_input) in
assert (test fold_diff);
assert (test merge_diff);
()
let samples_diff = make_samples diff_input [
"pmap diff", ignore % pmap_diff;
"fold-based diff", ignore % fold_diff;
"merge-based diff", ignore % merge_diff;
]
let pmap_intersect f (m1, m2) =
Map.intersect f m1 m2
let filter_intersect f (m1, m2) =
let filter_fun k v1 =
match
try Some (Map.find k m2)
with Not_found -> None
with
| None -> None
| Some v2 -> Some (f v1 v2) in
Map.filter_map filter_fun m1
let merge_intersect f (m1, m2) =
let merge_fun k a b =
match a, b with
| Some v1, Some v2 -> Some (f v1 v2)
| None, _ | _, None -> None in
Map.merge merge_fun m1 m2
let intersect_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_intersect =
li (pmap_intersect (-) intersect_input)
= li (impl_intersect (-) intersect_input) in
assert (test filter_intersect);
assert (test merge_intersect);
()
let samples_intersect = make_samples intersect_input [
"pmap intersect", ignore % pmap_intersect (-);
"filter-based intersect", ignore % filter_intersect (-);
"merge-based intersect", ignore % merge_intersect (-);
]
let () =
let create = samples_create () in
let import = samples_import () in
let lookup = samples_lookup () in
let remove = samples_remove () in
let merge = samples_merge () in
let union = samples_union () in
let diff = samples_diff () in
let intersect = samples_intersect () in
List.iter
(print_newline % Bench.summarize)
[
create;
import;
lookup;
remove;
merge;
union;
diff;
intersect;
]
end
let big_length = 100_000
let small_length = 500
let () =
Printf.printf "Test with small maps (length = %d)\n%!" small_length;
let () =
let module M = MapBench(struct let input_length = small_length end) in
() in
print_newline ();
print_newline ();
Printf.printf "Test with big maps (length = %d)\n%!" big_length;
Bench.config.Bench.samples <- 100;
let () =
let module M = MapBench(struct let input_length = big_length end) in
() in
()
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/benchsuite/bench_map.ml | ocaml | The purpose of this test is to compare different implementation of
the Map associative data structure.
A benchmark for key insertion
A benchmark for fast import
A benchmark for key lookup
A benchmark for key removal
A benchmark for merging
compare fold-based and merge-based union, diff, intersect | cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native
let total_length = 500_000
let (%) = BatPervasives.(%)
module MapBench (M : sig val input_length : int end) = struct
let input_length = M.input_length
let nb_iter =
max 10 (total_length / input_length)
let () = Printf.printf "%d iterations\n" nb_iter
let random_key () = Random.int input_length
let random_value () = Random.int input_length
let random_inputs random_elt () =
BatList.init input_length (fun _ -> random_elt ())
let make_samples input tests () = Bench.bench_funs tests input
we do n't use BatInt to ensure that the same comparison function
is used ( PMap use Pervasives.compare by default ) , in order to
have comparable performance results .
is used (PMap use Pervasives.compare by default), in order to
have comparable performance results. *)
module StdMap = BatMap.Make(struct type t = int let compare = compare end)
module Map = BatMap
let same_elts stdmap pmap =
BatList.of_enum (StdMap.enum stdmap)
= BatList.of_enum (Map.enum pmap)
let create_std_map input =
List.fold_left
(fun t (k, v) -> StdMap.add k v t)
StdMap.empty input
let create_poly_map input =
List.fold_left
(fun t (k, v) -> Map.add k v t)
Map.empty input
let create_input =
let keys = random_inputs random_key () in
let values = random_inputs random_value () in
BatList.combine keys values
let std_created_map = create_std_map create_input
let poly_created_map = create_poly_map create_input
let () =
assert (same_elts std_created_map poly_created_map)
let samples_create = make_samples create_input
[ "stdmap create", ignore % create_std_map;
"pmap create", ignore % create_poly_map ]
let import_std_map input =
StdMap.of_enum (BatList.enum input)
let import_poly_map input =
Map.of_enum (BatList.enum input)
let import_input = create_input
let () =
let std_imported_map = import_std_map import_input in
assert (same_elts std_imported_map poly_created_map);
let poly_imported_map = import_poly_map import_input in
assert (same_elts std_created_map poly_imported_map);
()
let samples_import = make_samples import_input
[ "stdmap import", ignore % import_std_map;
"pmap import", ignore % import_poly_map ]
let lookup_input =
random_inputs random_key ()
let lookup_std_map input =
List.iter
(fun k -> ignore (StdMap.mem k std_created_map))
input
let lookup_poly_map input =
List.iter
(fun k -> ignore (Map.mem k poly_created_map))
input
let samples_lookup = make_samples lookup_input
[ "stdmap lookup", lookup_std_map;
"pmap lookup", lookup_poly_map ]
let remove_input =
random_inputs random_key ()
let remove_std_map input =
List.fold_left
(fun t k -> StdMap.remove k t)
std_created_map input
let remove_poly_map input =
List.fold_left
(fun t k -> Map.remove k t)
poly_created_map input
let () =
assert (same_elts
(remove_std_map remove_input)
(remove_poly_map remove_input))
let samples_remove = make_samples remove_input
[ "stdmap remove", ignore % remove_std_map;
"pmap remove", ignore % remove_poly_map ]
let random_pairlist () =
BatList.combine
(random_inputs random_key ())
(random_inputs random_value ())
let p1 = random_pairlist ()
let p2 = random_pairlist ()
let merge_fun k a b =
if k mod 2 = 0 then None else Some ()
let merge_std_map =
let m1 = StdMap.of_enum (BatList.enum p1) in
let m2 = StdMap.of_enum (BatList.enum p2) in
fun () ->
StdMap.merge merge_fun m1 m2
let merge_poly_map =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
fun () ->
Map.merge merge_fun m1 m2
let samples_merge = make_samples () [
"stdmap merge", ignore % merge_std_map;
"pmap merge", ignore % merge_poly_map;
]
let pmap_union (m1, m2) = Map.union m1 m2
let fold_union (m1, m2) =
Map.foldi Map.add m1 m2
let merge_union (m1, m2) =
let merge_fun k a b = if a <> None then a else b in
Map.merge merge_fun m1 m2
let union_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_union =
li (pmap_union union_input) = li (impl_union union_input) in
assert (test fold_union);
assert (test merge_union);
()
let samples_union = make_samples union_input [
"pmap union", ignore % pmap_union;
"fold-based union", ignore % fold_union;
"merge-based union", ignore % merge_union;
]
let pmap_diff (m1, m2) =
Map.diff m1 m2
let fold_diff (m1, m2) =
Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1
let merge_diff (m1, m2) =
let merge_fun k a b = if b <> None then None else a in
Map.merge merge_fun m1 m2
let diff_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_diff =
li (pmap_diff diff_input) = li (impl_diff diff_input) in
assert (test fold_diff);
assert (test merge_diff);
()
let samples_diff = make_samples diff_input [
"pmap diff", ignore % pmap_diff;
"fold-based diff", ignore % fold_diff;
"merge-based diff", ignore % merge_diff;
]
let pmap_intersect f (m1, m2) =
Map.intersect f m1 m2
let filter_intersect f (m1, m2) =
let filter_fun k v1 =
match
try Some (Map.find k m2)
with Not_found -> None
with
| None -> None
| Some v2 -> Some (f v1 v2) in
Map.filter_map filter_fun m1
let merge_intersect f (m1, m2) =
let merge_fun k a b =
match a, b with
| Some v1, Some v2 -> Some (f v1 v2)
| None, _ | _, None -> None in
Map.merge merge_fun m1 m2
let intersect_input =
let m1 = Map.of_enum (BatList.enum p1) in
let m2 = Map.of_enum (BatList.enum p2) in
m1, m2
let () =
let li m = BatList.of_enum (Map.enum m) in
let test impl_intersect =
li (pmap_intersect (-) intersect_input)
= li (impl_intersect (-) intersect_input) in
assert (test filter_intersect);
assert (test merge_intersect);
()
let samples_intersect = make_samples intersect_input [
"pmap intersect", ignore % pmap_intersect (-);
"filter-based intersect", ignore % filter_intersect (-);
"merge-based intersect", ignore % merge_intersect (-);
]
let () =
let create = samples_create () in
let import = samples_import () in
let lookup = samples_lookup () in
let remove = samples_remove () in
let merge = samples_merge () in
let union = samples_union () in
let diff = samples_diff () in
let intersect = samples_intersect () in
List.iter
(print_newline % Bench.summarize)
[
create;
import;
lookup;
remove;
merge;
union;
diff;
intersect;
]
end
let big_length = 100_000
let small_length = 500
let () =
Printf.printf "Test with small maps (length = %d)\n%!" small_length;
let () =
let module M = MapBench(struct let input_length = small_length end) in
() in
print_newline ();
print_newline ();
Printf.printf "Test with big maps (length = %d)\n%!" big_length;
Bench.config.Bench.samples <- 100;
let () =
let module M = MapBench(struct let input_length = big_length end) in
() in
()
|
679d955390e90fde7e78b0a061ac74e3c7b9e356f35d688d8edc4745799d3804 | Oblosys/proxima | SemHsTokens.hs |
UUAGC 0.9.10 ( SemHsTokens.ag )
module SemHsTokens where
import qualified Data.Sequence as Seq
import Data.Sequence(Seq,empty,singleton,(><))
import Data.Foldable(toList)
import Pretty
import TokenDef
import HsToken
import ErrorMessages
import CommonTypes
import UU.Scanner.Position(Pos)
isNTname allnts (Just (NT nt _)) = nt `elem` allnts
isNTname allnts _ = False
-----------------------------------------------------
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
fieldnames : [ Identifier ]
nt : Identifier
synthesized attributes :
errors : Seq Error
tok : ( Pos , String )
: [ ( Identifier , Identifier ) ]
usedFields : Seq Identifier
usedLocals : [ Identifier ]
alternatives :
alternative AGField :
child field : { Identifier }
child attr : { Identifier }
child pos : { Pos }
child rdesc : { Maybe String }
visit 0 :
local addTrace : _
alternative AGLocal :
child var : { Identifier }
child pos : { Pos }
child rdesc : { Maybe String }
visit 0 :
local _ tup1 : _
local errors : _
local tok : _
local usedLocals : _
alternative CharToken :
child value : { String }
child pos : { Pos }
alternative :
child mesg : { String }
child pos : { Pos }
alternative :
child value : { String }
child pos : { Pos }
alternative StrToken :
child value : { String }
child pos : { Pos }
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
fieldnames : [Identifier]
nt : Identifier
synthesized attributes:
errors : Seq Error
tok : (Pos,String)
usedAttrs : [(Identifier,Identifier)]
usedFields : Seq Identifier
usedLocals : [Identifier]
alternatives:
alternative AGField:
child field : {Identifier}
child attr : {Identifier}
child pos : {Pos}
child rdesc : {Maybe String}
visit 0:
local addTrace : _
alternative AGLocal:
child var : {Identifier}
child pos : {Pos}
child rdesc : {Maybe String}
visit 0:
local _tup1 : _
local errors : _
local tok : _
local usedLocals : _
alternative CharToken:
child value : {String}
child pos : {Pos}
alternative Err:
child mesg : {String}
child pos : {Pos}
alternative HsToken:
child value : {String}
child pos : {Pos}
alternative StrToken:
child value : {String}
child pos : {Pos}
-}
-- cata
sem_HsToken :: HsToken ->
T_HsToken
sem_HsToken (AGField _field _attr _pos _rdesc ) =
(sem_HsToken_AGField _field _attr _pos _rdesc )
sem_HsToken (AGLocal _var _pos _rdesc ) =
(sem_HsToken_AGLocal _var _pos _rdesc )
sem_HsToken (CharToken _value _pos ) =
(sem_HsToken_CharToken _value _pos )
sem_HsToken (Err _mesg _pos ) =
(sem_HsToken_Err _mesg _pos )
sem_HsToken (HsToken _value _pos ) =
(sem_HsToken_HsToken _value _pos )
sem_HsToken (StrToken _value _pos ) =
(sem_HsToken_StrToken _value _pos )
-- semantic domain
newtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
([Identifier]) ->
Identifier ->
( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))
data Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier}
data Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]}
wrap_HsToken :: T_HsToken ->
Inh_HsToken ->
Syn_HsToken
wrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =
(let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )
in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsToken_AGField :: Identifier ->
Identifier ->
Pos ->
(Maybe String) ->
T_HsToken
sem_HsToken_AGField field_ attr_ pos_ rdesc_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOtok :: ((Pos,String))
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 74 , column 15 )
_lhsOerrors =
if (field_,attr_) `elem` _lhsIattrs
then Seq.empty
else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames))
then Seq.singleton (UndefChild _lhsInt _lhsIcon field_)
else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False)
" SemHsTokens.ag"(line 88 , column 13 )
_lhsOusedAttrs =
[(field_,attr_)]
" SemHsTokens.ag"(line 115 , column 8)
_addTrace =
case rdesc_ of
Just d -> \x -> "(trace " ++ show (d ++ " -> " ++ show field_ ++ "." ++ show attr_) ++ " (" ++ x ++ "))"
Nothing -> id
" SemHsTokens.ag"(line 118 , column 8)
_lhsOtok =
(pos_, _addTrace $ attrname True field_ attr_)
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_AGLocal :: Identifier ->
Pos ->
(Maybe String) ->
T_HsToken
sem_HsToken_AGLocal var_ pos_ rdesc_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOusedFields :: (Seq Identifier)
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedLocals :: ([Identifier])
_lhsOtok :: ((Pos,String))
" SemHsTokens.ag"(line 64 , column 19 )
__tup1 =
if var_ `elem` _lhsIfieldnames
then if isNTname _lhsIallnts (lookup var_ (map (\(n,t,_) -> (n,t)) _lhsIallfields))
then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] )
else (Seq.empty, (pos_,fieldname var_), [] )
else if (_LOC,var_) `elem` _lhsIattrs
then (Seq.empty , (pos_,locname var_), [var_])
else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] )
" SemHsTokens.ag"(line 64 , column 19 )
(_errors,_,_) =
__tup1
" SemHsTokens.ag"(line 64 , column 19 )
(_,_tok,_) =
__tup1
" SemHsTokens.ag"(line 64 , column 19 )
(_,_,_usedLocals) =
__tup1
" SemHsTokens.ag"(line 96 , column 13 )
_lhsOusedFields =
if var_ `elem` _lhsIfieldnames
then Seq.singleton var_
else Seq.empty
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
_errors
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
_usedLocals
-- copy rule (from local)
_lhsOtok =
_tok
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_CharToken :: String ->
Pos ->
T_HsToken
sem_HsToken_CharToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 122 , column 16 )
_lhsOtok =
(pos_, if null value_
then ""
else showCharShort (head value_)
)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_Err :: String ->
Pos ->
T_HsToken
sem_HsToken_Err mesg_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOerrors :: (Seq Error)
_lhsOtok :: ((Pos,String))
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 50 , column 9 )
_lhsOerrors =
let m = text mesg_
in Seq.singleton (CustomError False pos_ m)
" SemHsTokens.ag"(line 128 , column 16 )
_lhsOtok =
(pos_, "")
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_HsToken :: String ->
Pos ->
T_HsToken
sem_HsToken_HsToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 120 , column 14 )
_lhsOtok =
(pos_, value_)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_StrToken :: String ->
Pos ->
T_HsToken
sem_HsToken_StrToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 127 , column 16 )
_lhsOtok =
(pos_, showStrShort value_)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
HsTokens ----------------------------------------------------
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
fieldnames : [ Identifier ]
nt : Identifier
synthesized attributes :
errors : Seq Error
tks : [ ( Pos , String ) ]
: [ ( Identifier , Identifier ) ]
usedFields : Seq Identifier
usedLocals : [ Identifier ]
alternatives :
alternative Cons :
child hd :
child tl :
alternative :
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
fieldnames : [Identifier]
nt : Identifier
synthesized attributes:
errors : Seq Error
tks : [(Pos,String)]
usedAttrs : [(Identifier,Identifier)]
usedFields : Seq Identifier
usedLocals : [Identifier]
alternatives:
alternative Cons:
child hd : HsToken
child tl : HsTokens
alternative Nil:
-}
-- cata
sem_HsTokens :: HsTokens ->
T_HsTokens
sem_HsTokens list =
(Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) )
-- semantic domain
newtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
([Identifier]) ->
Identifier ->
( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))
data Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier}
data Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]}
wrap_HsTokens :: T_HsTokens ->
Inh_HsTokens ->
Syn_HsTokens
wrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =
(let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )
in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsTokens_Cons :: T_HsToken ->
T_HsTokens ->
T_HsTokens
sem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) =
(T_HsTokens (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtks :: ([(Pos,String)])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
_hdOallfields :: ([(Identifier,Type,Bool)])
_hdOallnts :: ([Identifier])
_hdOattrs :: ([(Identifier,Identifier)])
_hdOcon :: Identifier
_hdOfieldnames :: ([Identifier])
_hdOnt :: Identifier
_tlOallfields :: ([(Identifier,Type,Bool)])
_tlOallnts :: ([Identifier])
_tlOattrs :: ([(Identifier,Identifier)])
_tlOcon :: Identifier
_tlOfieldnames :: ([Identifier])
_tlOnt :: Identifier
_hdIerrors :: (Seq Error)
_hdItok :: ((Pos,String))
_hdIusedAttrs :: ([(Identifier,Identifier)])
_hdIusedFields :: (Seq Identifier)
_hdIusedLocals :: ([Identifier])
_tlIerrors :: (Seq Error)
_tlItks :: ([(Pos,String)])
_tlIusedAttrs :: ([(Identifier,Identifier)])
_tlIusedFields :: (Seq Identifier)
_tlIusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 110 , column 10 )
_lhsOtks =
_hdItok : _tlItks
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
_hdIerrors Seq.>< _tlIerrors
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
_hdIusedAttrs ++ _tlIusedAttrs
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
_hdIusedFields Seq.>< _tlIusedFields
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
_hdIusedLocals ++ _tlIusedLocals
-- copy rule (down)
_hdOallfields =
_lhsIallfields
-- copy rule (down)
_hdOallnts =
_lhsIallnts
-- copy rule (down)
_hdOattrs =
_lhsIattrs
-- copy rule (down)
_hdOcon =
_lhsIcon
-- copy rule (down)
_hdOfieldnames =
_lhsIfieldnames
-- copy rule (down)
_hdOnt =
_lhsInt
-- copy rule (down)
_tlOallfields =
_lhsIallfields
-- copy rule (down)
_tlOallnts =
_lhsIallnts
-- copy rule (down)
_tlOattrs =
_lhsIattrs
-- copy rule (down)
_tlOcon =
_lhsIcon
-- copy rule (down)
_tlOfieldnames =
_lhsIfieldnames
-- copy rule (down)
_tlOnt =
_lhsInt
( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) =
(hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt )
( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) =
(tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt )
in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsTokens_Nil :: T_HsTokens
sem_HsTokens_Nil =
(T_HsTokens (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtks :: ([(Pos,String)])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 111 , column 10 )
_lhsOtks =
[]
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
HsTokensRoot ------------------------------------------------
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
nt : Identifier
synthesized attributes :
errors : Seq Error
textLines : [ String ]
: [ ( Identifier , Identifier ) ]
usedFields : [ Identifier ]
usedLocals : [ Identifier ]
alternatives :
alternative HsTokensRoot :
child tokens : HsTokens
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
nt : Identifier
synthesized attributes:
errors : Seq Error
textLines : [String]
usedAttrs : [(Identifier,Identifier)]
usedFields : [Identifier]
usedLocals : [Identifier]
alternatives:
alternative HsTokensRoot:
child tokens : HsTokens
-}
-- cata
sem_HsTokensRoot :: HsTokensRoot ->
T_HsTokensRoot
sem_HsTokensRoot (HsTokensRoot _tokens ) =
(sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) )
-- semantic domain
newtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
Identifier ->
( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier])))
data Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier}
data Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]}
wrap_HsTokensRoot :: T_HsTokensRoot ->
Inh_HsTokensRoot ->
Syn_HsTokensRoot
wrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) =
(let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt )
in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsTokensRoot_HsTokensRoot :: T_HsTokens ->
T_HsTokensRoot
sem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) =
(T_HsTokensRoot (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsInt ->
(let _tokensOfieldnames :: ([Identifier])
_lhsOusedFields :: ([Identifier])
_lhsOtextLines :: ([String])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedLocals :: ([Identifier])
_tokensOallfields :: ([(Identifier,Type,Bool)])
_tokensOallnts :: ([Identifier])
_tokensOattrs :: ([(Identifier,Identifier)])
_tokensOcon :: Identifier
_tokensOnt :: Identifier
_tokensIerrors :: (Seq Error)
_tokensItks :: ([(Pos,String)])
_tokensIusedAttrs :: ([(Identifier,Identifier)])
_tokensIusedFields :: (Seq Identifier)
_tokensIusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 38 , column 18 )
_tokensOfieldnames =
map (\(n,_,_) -> n) _lhsIallfields
" SemHsTokens.ag"(line 100 , column 18 )
_lhsOusedFields =
toList _tokensIusedFields
" SemHsTokens.ag"(line 107 , column 18 )
_lhsOtextLines =
showTokens _tokensItks
use rule " SemHsTokens.ag"(line 18 , column 18 )
_lhsOerrors =
_tokensIerrors
-- copy rule (up)
_lhsOusedAttrs =
_tokensIusedAttrs
-- copy rule (up)
_lhsOusedLocals =
_tokensIusedLocals
-- copy rule (down)
_tokensOallfields =
_lhsIallfields
-- copy rule (down)
_tokensOallnts =
_lhsIallnts
-- copy rule (down)
_tokensOattrs =
_lhsIattrs
-- copy rule (down)
_tokensOcon =
_lhsIcon
-- copy rule (down)
_tokensOnt =
_lhsInt
( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) =
(tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt )
in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) | null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src-derived/SemHsTokens.hs | haskell | ---------------------------------------------------
cata
semantic domain
copy rule (from local)
--------------------------------------------------
cata
semantic domain
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
----------------------------------------------
cata
semantic domain
copy rule (up)
copy rule (up)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down)
copy rule (down) |
UUAGC 0.9.10 ( SemHsTokens.ag )
module SemHsTokens where
import qualified Data.Sequence as Seq
import Data.Sequence(Seq,empty,singleton,(><))
import Data.Foldable(toList)
import Pretty
import TokenDef
import HsToken
import ErrorMessages
import CommonTypes
import UU.Scanner.Position(Pos)
isNTname allnts (Just (NT nt _)) = nt `elem` allnts
isNTname allnts _ = False
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
fieldnames : [ Identifier ]
nt : Identifier
synthesized attributes :
errors : Seq Error
tok : ( Pos , String )
: [ ( Identifier , Identifier ) ]
usedFields : Seq Identifier
usedLocals : [ Identifier ]
alternatives :
alternative AGField :
child field : { Identifier }
child attr : { Identifier }
child pos : { Pos }
child rdesc : { Maybe String }
visit 0 :
local addTrace : _
alternative AGLocal :
child var : { Identifier }
child pos : { Pos }
child rdesc : { Maybe String }
visit 0 :
local _ tup1 : _
local errors : _
local tok : _
local usedLocals : _
alternative CharToken :
child value : { String }
child pos : { Pos }
alternative :
child mesg : { String }
child pos : { Pos }
alternative :
child value : { String }
child pos : { Pos }
alternative StrToken :
child value : { String }
child pos : { Pos }
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
fieldnames : [Identifier]
nt : Identifier
synthesized attributes:
errors : Seq Error
tok : (Pos,String)
usedAttrs : [(Identifier,Identifier)]
usedFields : Seq Identifier
usedLocals : [Identifier]
alternatives:
alternative AGField:
child field : {Identifier}
child attr : {Identifier}
child pos : {Pos}
child rdesc : {Maybe String}
visit 0:
local addTrace : _
alternative AGLocal:
child var : {Identifier}
child pos : {Pos}
child rdesc : {Maybe String}
visit 0:
local _tup1 : _
local errors : _
local tok : _
local usedLocals : _
alternative CharToken:
child value : {String}
child pos : {Pos}
alternative Err:
child mesg : {String}
child pos : {Pos}
alternative HsToken:
child value : {String}
child pos : {Pos}
alternative StrToken:
child value : {String}
child pos : {Pos}
-}
sem_HsToken :: HsToken ->
T_HsToken
sem_HsToken (AGField _field _attr _pos _rdesc ) =
(sem_HsToken_AGField _field _attr _pos _rdesc )
sem_HsToken (AGLocal _var _pos _rdesc ) =
(sem_HsToken_AGLocal _var _pos _rdesc )
sem_HsToken (CharToken _value _pos ) =
(sem_HsToken_CharToken _value _pos )
sem_HsToken (Err _mesg _pos ) =
(sem_HsToken_Err _mesg _pos )
sem_HsToken (HsToken _value _pos ) =
(sem_HsToken_HsToken _value _pos )
sem_HsToken (StrToken _value _pos ) =
(sem_HsToken_StrToken _value _pos )
newtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
([Identifier]) ->
Identifier ->
( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))
data Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier}
data Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]}
wrap_HsToken :: T_HsToken ->
Inh_HsToken ->
Syn_HsToken
wrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =
(let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )
in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsToken_AGField :: Identifier ->
Identifier ->
Pos ->
(Maybe String) ->
T_HsToken
sem_HsToken_AGField field_ attr_ pos_ rdesc_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOtok :: ((Pos,String))
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 74 , column 15 )
_lhsOerrors =
if (field_,attr_) `elem` _lhsIattrs
then Seq.empty
else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames))
then Seq.singleton (UndefChild _lhsInt _lhsIcon field_)
else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False)
" SemHsTokens.ag"(line 88 , column 13 )
_lhsOusedAttrs =
[(field_,attr_)]
" SemHsTokens.ag"(line 115 , column 8)
_addTrace =
case rdesc_ of
Just d -> \x -> "(trace " ++ show (d ++ " -> " ++ show field_ ++ "." ++ show attr_) ++ " (" ++ x ++ "))"
Nothing -> id
" SemHsTokens.ag"(line 118 , column 8)
_lhsOtok =
(pos_, _addTrace $ attrname True field_ attr_)
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_AGLocal :: Identifier ->
Pos ->
(Maybe String) ->
T_HsToken
sem_HsToken_AGLocal var_ pos_ rdesc_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOusedFields :: (Seq Identifier)
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedLocals :: ([Identifier])
_lhsOtok :: ((Pos,String))
" SemHsTokens.ag"(line 64 , column 19 )
__tup1 =
if var_ `elem` _lhsIfieldnames
then if isNTname _lhsIallnts (lookup var_ (map (\(n,t,_) -> (n,t)) _lhsIallfields))
then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] )
else (Seq.empty, (pos_,fieldname var_), [] )
else if (_LOC,var_) `elem` _lhsIattrs
then (Seq.empty , (pos_,locname var_), [var_])
else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] )
" SemHsTokens.ag"(line 64 , column 19 )
(_errors,_,_) =
__tup1
" SemHsTokens.ag"(line 64 , column 19 )
(_,_tok,_) =
__tup1
" SemHsTokens.ag"(line 64 , column 19 )
(_,_,_usedLocals) =
__tup1
" SemHsTokens.ag"(line 96 , column 13 )
_lhsOusedFields =
if var_ `elem` _lhsIfieldnames
then Seq.singleton var_
else Seq.empty
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
_errors
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
_usedLocals
_lhsOtok =
_tok
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_CharToken :: String ->
Pos ->
T_HsToken
sem_HsToken_CharToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 122 , column 16 )
_lhsOtok =
(pos_, if null value_
then ""
else showCharShort (head value_)
)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_Err :: String ->
Pos ->
T_HsToken
sem_HsToken_Err mesg_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOerrors :: (Seq Error)
_lhsOtok :: ((Pos,String))
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 50 , column 9 )
_lhsOerrors =
let m = text mesg_
in Seq.singleton (CustomError False pos_ m)
" SemHsTokens.ag"(line 128 , column 16 )
_lhsOtok =
(pos_, "")
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_HsToken :: String ->
Pos ->
T_HsToken
sem_HsToken_HsToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 120 , column 14 )
_lhsOtok =
(pos_, value_)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsToken_StrToken :: String ->
Pos ->
T_HsToken
sem_HsToken_StrToken value_ pos_ =
(T_HsToken (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtok :: ((Pos,String))
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 127 , column 16 )
_lhsOtok =
(pos_, showStrShort value_)
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
fieldnames : [ Identifier ]
nt : Identifier
synthesized attributes :
errors : Seq Error
tks : [ ( Pos , String ) ]
: [ ( Identifier , Identifier ) ]
usedFields : Seq Identifier
usedLocals : [ Identifier ]
alternatives :
alternative Cons :
child hd :
child tl :
alternative :
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
fieldnames : [Identifier]
nt : Identifier
synthesized attributes:
errors : Seq Error
tks : [(Pos,String)]
usedAttrs : [(Identifier,Identifier)]
usedFields : Seq Identifier
usedLocals : [Identifier]
alternatives:
alternative Cons:
child hd : HsToken
child tl : HsTokens
alternative Nil:
-}
sem_HsTokens :: HsTokens ->
T_HsTokens
sem_HsTokens list =
(Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) )
newtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
([Identifier]) ->
Identifier ->
( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))
data Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier}
data Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]}
wrap_HsTokens :: T_HsTokens ->
Inh_HsTokens ->
Syn_HsTokens
wrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =
(let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )
in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsTokens_Cons :: T_HsToken ->
T_HsTokens ->
T_HsTokens
sem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) =
(T_HsTokens (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtks :: ([(Pos,String)])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
_hdOallfields :: ([(Identifier,Type,Bool)])
_hdOallnts :: ([Identifier])
_hdOattrs :: ([(Identifier,Identifier)])
_hdOcon :: Identifier
_hdOfieldnames :: ([Identifier])
_hdOnt :: Identifier
_tlOallfields :: ([(Identifier,Type,Bool)])
_tlOallnts :: ([Identifier])
_tlOattrs :: ([(Identifier,Identifier)])
_tlOcon :: Identifier
_tlOfieldnames :: ([Identifier])
_tlOnt :: Identifier
_hdIerrors :: (Seq Error)
_hdItok :: ((Pos,String))
_hdIusedAttrs :: ([(Identifier,Identifier)])
_hdIusedFields :: (Seq Identifier)
_hdIusedLocals :: ([Identifier])
_tlIerrors :: (Seq Error)
_tlItks :: ([(Pos,String)])
_tlIusedAttrs :: ([(Identifier,Identifier)])
_tlIusedFields :: (Seq Identifier)
_tlIusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 110 , column 10 )
_lhsOtks =
_hdItok : _tlItks
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
_hdIerrors Seq.>< _tlIerrors
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
_hdIusedAttrs ++ _tlIusedAttrs
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
_hdIusedFields Seq.>< _tlIusedFields
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
_hdIusedLocals ++ _tlIusedLocals
_hdOallfields =
_lhsIallfields
_hdOallnts =
_lhsIallnts
_hdOattrs =
_lhsIattrs
_hdOcon =
_lhsIcon
_hdOfieldnames =
_lhsIfieldnames
_hdOnt =
_lhsInt
_tlOallfields =
_lhsIallfields
_tlOallnts =
_lhsIallnts
_tlOattrs =
_lhsIattrs
_tlOcon =
_lhsIcon
_tlOfieldnames =
_lhsIfieldnames
_tlOnt =
_lhsInt
( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) =
(hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt )
( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) =
(tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt )
in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
sem_HsTokens_Nil :: T_HsTokens
sem_HsTokens_Nil =
(T_HsTokens (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsIfieldnames
_lhsInt ->
(let _lhsOtks :: ([(Pos,String)])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedFields :: (Seq Identifier)
_lhsOusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 111 , column 10 )
_lhsOtks =
[]
use rule " SemHsTokens.ag"(line 43 , column 37 )
_lhsOerrors =
Seq.empty
use rule " SemHsTokens.ag"(line 85 , column 40 )
_lhsOusedAttrs =
[]
use rule " SemHsTokens.ag"(line 93 , column 40 )
_lhsOusedFields =
Seq.empty
use rule " SemHsTokens.ag"(line 84 , column 40 )
_lhsOusedLocals =
[]
in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )
visit 0 :
inherited attributes :
allfields : [ ( Identifier , Type , ) ]
: [ Identifier ]
attrs : [ ( Identifier , Identifier ) ]
con : Identifier
nt : Identifier
synthesized attributes :
errors : Seq Error
textLines : [ String ]
: [ ( Identifier , Identifier ) ]
usedFields : [ Identifier ]
usedLocals : [ Identifier ]
alternatives :
alternative HsTokensRoot :
child tokens : HsTokens
visit 0:
inherited attributes:
allfields : [(Identifier,Type,Bool)]
allnts : [Identifier]
attrs : [(Identifier,Identifier)]
con : Identifier
nt : Identifier
synthesized attributes:
errors : Seq Error
textLines : [String]
usedAttrs : [(Identifier,Identifier)]
usedFields : [Identifier]
usedLocals : [Identifier]
alternatives:
alternative HsTokensRoot:
child tokens : HsTokens
-}
sem_HsTokensRoot :: HsTokensRoot ->
T_HsTokensRoot
sem_HsTokensRoot (HsTokensRoot _tokens ) =
(sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) )
newtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) ->
([Identifier]) ->
([(Identifier,Identifier)]) ->
Identifier ->
Identifier ->
( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier])))
data Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier}
data Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]}
wrap_HsTokensRoot :: T_HsTokensRoot ->
Inh_HsTokensRoot ->
Syn_HsTokensRoot
wrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) =
(let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =
(sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt )
in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))
sem_HsTokensRoot_HsTokensRoot :: T_HsTokens ->
T_HsTokensRoot
sem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) =
(T_HsTokensRoot (\ _lhsIallfields
_lhsIallnts
_lhsIattrs
_lhsIcon
_lhsInt ->
(let _tokensOfieldnames :: ([Identifier])
_lhsOusedFields :: ([Identifier])
_lhsOtextLines :: ([String])
_lhsOerrors :: (Seq Error)
_lhsOusedAttrs :: ([(Identifier,Identifier)])
_lhsOusedLocals :: ([Identifier])
_tokensOallfields :: ([(Identifier,Type,Bool)])
_tokensOallnts :: ([Identifier])
_tokensOattrs :: ([(Identifier,Identifier)])
_tokensOcon :: Identifier
_tokensOnt :: Identifier
_tokensIerrors :: (Seq Error)
_tokensItks :: ([(Pos,String)])
_tokensIusedAttrs :: ([(Identifier,Identifier)])
_tokensIusedFields :: (Seq Identifier)
_tokensIusedLocals :: ([Identifier])
" SemHsTokens.ag"(line 38 , column 18 )
_tokensOfieldnames =
map (\(n,_,_) -> n) _lhsIallfields
" SemHsTokens.ag"(line 100 , column 18 )
_lhsOusedFields =
toList _tokensIusedFields
" SemHsTokens.ag"(line 107 , column 18 )
_lhsOtextLines =
showTokens _tokensItks
use rule " SemHsTokens.ag"(line 18 , column 18 )
_lhsOerrors =
_tokensIerrors
_lhsOusedAttrs =
_tokensIusedAttrs
_lhsOusedLocals =
_tokensIusedLocals
_tokensOallfields =
_lhsIallfields
_tokensOallnts =
_lhsIallnts
_tokensOattrs =
_lhsIattrs
_tokensOcon =
_lhsIcon
_tokensOnt =
_lhsInt
( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) =
(tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt )
in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) |
9f01966b071397927bcb7d951499d0e2382d81176ce11c2fd6eaa3a32be64223 | alex-gutev/tridash | macros.lisp | macros.lisp
;;;;
;;;; Tridash Programming Language.
Copyright ( C ) 2019 - 2021
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;;; GNU General Public License for more details.
;;;;
You should have received a copy of the GNU General Public License
;;;; along with this program. If not, see </>.
;;;; User-Defined Macro Tests
(defpackage :tridash/test.macros
(:use :generic-cl
:alexandria
:anaphora
:arrows
:iterate
:optima
:named-readtables
:tridash.parser
:tridash.frontend
:fiveam
:tridash/test
:tridash/test.util)
(:shadowing-import-from :generic-cl
:emptyp
:multiply
:accumulate)
(:shadowing-import-from :fiveam :fail)
(:import-from :lol
:defmacro!
:lol-syntax)
(:import-from :tridash.frontend
:tridash->cl-function
:call-meta-node
:call-tridash-meta-node
:call-node
:thunk
:resolve
:resolve%
:tridash-fail
:fail-thunk
:+empty-list+
:group-rest-args
:check-arity
:correct-arity?%
:fail-arity-error
:+optional-argument+
:+rest-argument+))
(in-package :tridash/test.macros)
(in-readtable lol-syntax)
;;; Test Suite Definition
(def-suite macros
:description "Test user-defined Tridash macros."
:in frontend)
(in-suite macros)
Utilities
(defun functor (operator &rest arguments)
"Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments
ARGUMENTS."
(functor-expression operator arguments))
(defun expression= (expected got)
"Checks that the CL expression GOT is equal to EXPECTED. Symbols in
EXPECTED, beginning with $, are replaced with the symbol in GOT
corresponding to the first occurrence."
(let ((aliases (make-hash-map)))
(flet ((equal? (got expected)
(match* (got expected)
(((type symbol) (type symbol))
(= got
(cond
((starts-with #\$ (symbol-name expected))
(ensure-get expected aliases got))
((starts-with #\! (symbol-name expected))
(id-symbol (subseq (symbol-name expected) 1)))
(t
expected))))
((_ _)
(= got expected)))))
(tree-equal got expected :test #'equal?))))
(defmacro with-external-meta-nodes ((&rest names) &body body)
"Creates `EXTERNAL-META-NODE's with names NAMES and binds to
variables with the same identifiers as the names upcased."
`(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names)
,@body))
(defmacro! with-core-nodes ((&rest names) &body body)
"Builds the core module and binds the node with names NAMES to
variables with the same identifiers as the names, upcased."
`(with-module-table ,g!modules
(build-core-module)
(with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules
,@body)))
(defmacro mock-meta-node ((&rest operands) expression)
"Creates a `META-NODE' which takes operands OPERANDS and has a value
function consisting of EXPRESSION. OPERANDS is a list of symbols
naming the dependency nodes. EXPRESSION is evaluated in an
environment where each symbol in OPERANDS is bound to the
`NODE-LINK' object corresponding to the operand, and the symbol
SELF is bound to the `META-NODE' object."
(flet ((make-operand (operand)
(match operand
((or (list 'optional symb value)
(list 'optional symb))
(list +optional-argument+ (make-instance 'node :name symb) value))
((list 'rest symb)
(list +rest-argument+ (make-instance 'node :name symb)))
(_ (make-instance 'node :name operand))))
(operand-node (operand)
(match operand
((list* 'optional symb _)
symb)
((list 'rest symb)
symb)
(_ operand))))
`(let ((self (make-instance 'final-meta-node
:name 'test-meta-node
:operands ',(map #'make-operand operands)))
,@(map #`(,a1 (node-link (make-instance 'node :name ',a1)))
(map #'operand-node operands)))
;; Create an empty `FLAT-NODE-TABLE' to mark meta-node as
;; already built
(setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set)))
(setf (value-function (context self nil))
,expression)
,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1)
(map #'operand-node operands))
,@(map #`(setf (get ',a1 (dependencies self)) ,a1)
(map #'operand-node operands))
self)))
(defmacro test-compile-meta-node ((&rest operands) expression args body)
"Creates and compiles a `META-NODE' to a CL LAMBDA expression and
checks that it has arguments ARGS and body BODY, by EXPRESSION=.
OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION
arguments of MOCK-META-NODE.
ARGS (not evaluated) is the expected lambda-list of the function.
BODY is the expected body expression within the BLOCK, TAGBODY,
RETURN expression. The symbol $recur, occurring in BODY is
substituted with the TAGBODY tag for tail-recursive self
calls. BODY is evaluated in an environment in which the symbol SELF
is bound to the `META-NODE' object."
(flet ((lambda-args (lambda-list)
(->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list)
(map #'ensure-car)
(map (compose #'gensym #'symbol-name)))))
`(let ((self (mock-meta-node ,operands ,expression)))
(is (expression=
`(lambda ,',args
(declare (ignorable ,@',(lambda-args args)))
,,body)
(tridash->cl-function self))))))
Tridash to CL Compilation Tests
(test compile-functor-expression
"Test compilation of functor expressions to CL."
(with-core-nodes ("if" "<" "-")
(test-compile-meta-node
(a b)
(functor if (functor < a b) (functor - b a) (functor - a b))
($a $b)
'(let nil
(!|if| (!< $a $b)
(thunk (!- $b $a))
(thunk (!- $a $b)))))))
(test compile-if-expression
"Test compilation of if expressions to CL."
(with-core-nodes ("<" "-")
(test-compile-meta-node
(a b)
(if-expression (functor < a b) (functor - b a) (functor - a b))
($a $b)
'(let nil
(!|if| (!< $a $b)
(thunk (!- $b $a))
(thunk (!- $a $b)))))))
(test compile-object-expression
"Test compilation of object expressions to CL."
(with-core-nodes ("+" "-")
(test-compile-meta-node
(x y)
(object-expression
`((sum ,(functor + x y))
(diff ,(functor - x y))))
($x $y)
'(let nil
(alist-hash-map
(list
(cons 'sum (thunk (!+ $x $y)))
(cons 'diff (thunk (!- $x $y)))))))))
(test compile-member-expression
"Test compilation of member expressions to CL."
(test-compile-meta-node
(object)
(member-expression
(member-expression object 'key1) 'key2)
($obj)
'(let nil
(!|member| (!|member| $obj 'key1) 'key2))))
(test compile-catch-expression
"Test compilation of catch expressions to CL."
(with-core-nodes ("/" "*")
(test-compile-meta-node
(a b)
(catch-expression
(functor / a b)
(functor * a b))
($a $b)
'(let nil
(!|catch| (!/ $a $b) (thunk (!* $a $b)))))))
(test compile-fail-expression
"Test compilation of fail expressions to CL."
(test-compile-meta-node
()
(fail-expression)
()
'(let nil
(!|fail|))))
(test compile-expression-block
"Test compilation of expression blocks, with reference count = 1, to CL."
(with-core-nodes ("+")
(test-compile-meta-node
(a)
(expression-block
(functor + a 1))
($a)
'(let nil
(!+ $a 1)))))
(test compile-expression-block-muliple-references
"Test compilation of expression blocks, with reference count > 1, to CL."
(with-core-nodes ("+")
(test-compile-meta-node
(a)
(let ((block (expression-block (functor + a 1) :count 2)))
(functor + block block))
($a)
'(let ($a+1)
(setf $a+1 (thunk (!+ $a 1)))
(!+ $a+1 $a+1)))))
(test compile-meta-node-call
"Test compilation of calls to other meta-nodes, to CL."
(with-core-nodes ("-")
(let ((meta-node (mock-meta-node (a) a)))
(test-compile-meta-node
(a)
(functor meta-node (functor - a))
($a)
`(let nil
(call-tridash-meta-node ,meta-node (list (!- $a))))))))
(test compile-higher-order-external-meta-node
"Test compilation of higher order external meta-node."
(with-core-nodes ("not")
(let ((apply (mock-meta-node (f x) (functor f x))))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref not) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 1) (length $args))
(apply #'!|not| $args)
(fail-arity-error)))
$x)))))))
(test compile-higher-order-if-meta-node
"Test compilation of higher order if meta-node."
(with-core-nodes ("if")
(let ((apply (mock-meta-node (f x y z) (functor f x y z))))
(test-compile-meta-node
(x y z)
(functor apply (meta-node-ref if) x y z)
($x $y $z)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 3) (length $args))
(apply #'!|if| $args)
(fail-arity-error)))
$x $y $z)))))))
(test compile-higher-order-and-meta-node
"Test compilation of higher order `and` meta-node."
(with-core-nodes ("and")
(let ((apply (mock-meta-node (f x y) (functor f x y))))
(test-compile-meta-node
(x y)
(functor apply (meta-node-ref and) x y)
($x $y)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 2) (length $args))
(apply #'!|and| $args)
(fail-arity-error)))
$x $y)))))))
(test compile-higher-order-or-meta-node
"Test compilation of higher order `or` meta-node."
(with-core-nodes ("or")
(let ((apply (mock-meta-node (f x y) (functor f x y))))
(test-compile-meta-node
(x y)
(functor apply (meta-node-ref or) x y)
($x $y)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 2) (length $args))
(apply #'!|or| $args)
(fail-arity-error)))
$x $y)))))))
(test compile-higher-order-meta-node
"Test compilation of higher-order user defined meta-node."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x) x)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 1) (length $args))
(destructuring-bind ($x2) $args
(call-tridash-meta-node ,f (list $x2)))
(fail-arity-error)))
$x))))))
(test compile-higher-order-meta-node-optional-arguments
"Test compilation of higher-order meta-node with optional arguments."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x (optional y) (optional z)) x)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f :optional (list 1 2)) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 3) (length $args))
(destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args
(call-tridash-meta-node ,f (list $x2 $y $z)))
(fail-arity-error)))
$x))))))
(test compile-higher-order-meta-node-rest-arguments
"Test compilation of higher-order meta-node with rest arguments."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x y (rest xs)) xs)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2) (length $args))
(destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+)))
$args
(call-tridash-meta-node ,f (list $x2 $y $rest)))
(fail-arity-error)))
$x))))))
(test compile-invoke-higher-order-node
"Test compilation of invoking value nodes."
(test-compile-meta-node
(f x y)
(functor f x y)
($f $x $y)
`(let nil
(call-node $f (list $x $y)))))
(test compile-literals
"Test compilation of literal values."
(with-core-nodes ("and")
(test-compile-meta-node
()
(functor and "hello" (functor and 1 (functor and 2.3 'symbol)))
()
'(let nil
(!|and| "hello"
(thunk
(!|and| 1
(thunk
(!|and| 2.3 'symbol)))))))))
(test compile-core-arithmetic
"Test compilation of core arithmetic meta-nodes."
(with-core-nodes ("/" "*" "+" "-")
(test-compile-meta-node
(a b c d)
(functor
/
(functor * (functor + a b) (functor - c d))
(functor - d))
($a $b $c $d)
'(let nil
(!/
(!* (!+ $a $b) (!- $c $d))
(!- $d))))))
(test compile-core-comparison-and-logical
"Test compilation of core comparison and logical meta-nodes."
(with-core-nodes ("not" "or" "and" "=" "!=" "<" "<=" ">" ">=")
(test-compile-meta-node
(x y)
(functor
not
(functor
or
(functor and (functor < x y) (functor = y x))
(functor
or
(functor <= x 10)
(functor
or
(functor > 1 y)
(functor
or
(functor >= 8 y)
(functor != x y))))))
($x $y)
'(let nil
(!|not|
(!|or|
(!|and| (!< $x $y) (thunk (!= $y $x)))
(thunk
(!|or|
(!<= $x 10)
(thunk
(!|or|
(!> 1 $y)
(thunk
(!|or|
(!>= 8 $y)
(thunk (!!= $x $y))))))))))))))
(test compile-core-type-checks
"Test compilation of core type checking meta-nodes."
(with-core-nodes ("or" "int?" "real?" "string?")
(test-compile-meta-node
(x y z)
(functor
or
(functor int? x)
(functor
or
(functor real? y)
(functor string? z)))
($x $y $z)
'(let nil
(!|or|
(!|int?| $x)
(thunk
(!|or|
(!|real?| $y)
(thunk (!|string?| $z)))))))))
(test compile-tail-recursive-if
"Test compilation of if expression in recursive tail position."
(with-core-nodes ("-" "*" "<")
(test-compile-meta-node
(n acc)
(if-expression (functor < n 2)
acc
(functor self (functor - n 1) (functor * n acc)))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-if-functor
"Test compilation of if functor in recursive tail position."
(with-core-nodes ("if" "-" "*" "<")
(test-compile-meta-node
(n acc)
(functor if
(functor < n 2)
acc
(functor self (functor - n 1) (functor * n acc)))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-expression-block
"Test compilation of expression blocks in recursive tail position."
(with-core-nodes ("if" "-" "*" "<")
(test-compile-meta-node
(n acc)
(functor if
(functor < n 2)
acc
(expression-block
(functor self (functor - n 1) (functor * n acc))))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-or-functor
"Test compilation of `or` functor in recursive tail position."
(with-core-nodes ("or" "=" "!=" "-")
(test-compile-meta-node
(n)
(functor or (functor = n 0) (functor self (functor - n 1)))
($n)
`(let nil
(!|or| (!= $n 0)
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-tail-recursive-and-functor
"Test compilation of `and` functor in recursive tail position."
(with-core-nodes ("and" "=" "!=" "-")
(test-compile-meta-node
(n)
(functor and (functor = n 0) (functor self (functor - n 1)))
($n)
`(let nil
(!|and| (!= $n 0)
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-tail-recursive-catch-expression
"Test compilation of catch expressions in recursive tail position."
(with-core-nodes ("-" "+")
(test-compile-meta-node
(n)
(catch-expression (functor self (functor + n 1))
(functor self (functor - n 1)))
($n)
`(let nil
(!|catch| (call-tridash-meta-node ,self (list (!+ $n 1)))
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-meta-node-optional-arguments
"Test compilation of meta-node with optional arguments."
(with-core-nodes ("+")
(test-compile-meta-node
(n (optional d 1))
(functor + n d)
($n &optional ($d 1))
'(let nil
(!|+| $n $d)))))
(test compile-meta-node-multiple-optional-arguments
"Test compilation of meta-node with multiple optional arguments."
(with-core-nodes ("+")
(test-compile-meta-node
(n (optional d 1) (optional e 2))
(functor + n (functor + d e))
($n &optional ($d 1) ($e 2))
'(let nil
(!|+| $n (!|+| $d $e))))))
(test compile-meta-node-rest-argument
"Test compilation of meta-node with rest argument."
(with-core-nodes ("cons")
(test-compile-meta-node
(x (rest xs))
(functor cons x xs)
($x &optional ($xs +empty-list+))
'(let nil
(!|cons| $x $xs)))))
(test compile-meta-node-optional-and-rest-arguments
"Test compilation of meta-node with optional and rest arguments."
(with-core-nodes ("cons")
(test-compile-meta-node
(x (optional y 2) (rest xs))
(functor cons x (functor cons y xs))
($x &optional ($y 2) ($xs +empty-list+))
'(let nil
(!|cons| $x (thunk (!|cons| $y $xs)))))))
(test compile-cyclic-references
"Test compilation of cyclic references."
(with-core-nodes ("cons")
(test-compile-meta-node
(a b)
(aprog1 (expression-block nil :count 2)
(setf (expression-block-expression it)
(functor cons a (functor cons b (cyclic-reference it)))))
($a $b)
'(let ($block)
(setf $block
(thunk (!|cons| $a (thunk (!|cons| $b $block)))))
$block))))
(test compile-error-usupported-external-meta-node
"Test that compiling an unsupported external-meta-node results in an error."
(with-external-meta-nodes ("not-a-function")
(signals
unsupported-meta-node-error
(tridash->cl-function
(mock-meta-node
(arg)
(functor not-a-function arg))))))
Test Calling Tridash Meta - Nodes from CL
(test call-meta-node-single-expression
"Test calling a single expression meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"min(x,y) : case{x < y : x; y}")
(with-nodes ((min "min")) modules
(is (= 2 (call-meta-node min '(2 10))))
(is (= 2 (call-meta-node min '(10 2))))
(is (= -5.3 (call-meta-node min '(-5.3 7.6))))
(is (= 1 (call-meta-node min '(1 1)))))))
(test call-meta-node-with-if-expression
"Test calling a meta-node with if expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : if(cond, x, 0)")
(with-nodes ((f "f")) modules
(is (= 10 (call-meta-node f '(t 10))))
(is (= 0 (call-meta-node f '(nil 5))))
(signals tridash-fail (call-meta-node f '(1 5))))))
(test call-meta-node-with-and-expression
"Test calling a meta-node with `and` expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : cond and x")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(t t)))
(is (= nil (call-meta-node f '(nil t))))
(is (= nil (call-meta-node f '(t nil))))
(is (= nil (call-meta-node f '(nil nil)))))))
(test call-meta-node-with-or-expression
"Test calling a meta-node with `or` expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : cond or x")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(t t)))
(is-true (call-meta-node f '(nil t)))
(is-true (call-meta-node f '(t nil)))
(is (= nil (call-meta-node f '(nil nil)))))))
(test call-meta-node-catch-fail-expression
"Test calling a meta-node with multiple nodes and CATCH-FAIL expressions."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }")
(with-nodes ((min "min")) modules
(is (= 2 (resolve (call-meta-node min '(2 10)))))
(is (= 2 (resolve (call-meta-node min '(10 2)))))
(is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6)))))
(is (= 1 (resolve (call-meta-node min '(1 1))))))))
(test call-meta-node-recursive
"Test calling a recursive meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }")
(with-nodes ((fact "fact")) modules
(is (= 6 (call-meta-node fact '(3))))
(is (= 120 (call-meta-node fact '(5))))
(is (= 1 (call-meta-node fact '(0)))))))
(test call-meta-node-tail-recursive
"Test calling a tail-recursive meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }")
(with-nodes ((fact "fact")) modules
(is (= 6 (call-meta-node fact '(3))))
(is (= 120 (call-meta-node fact '(5))))
(is (= 1 (call-meta-node fact '(0)))))))
(test call-meta-node-with-meta-node-call
"Test calling a meta-node which calls other meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"1-(n) : n - 1"
"1+(n) : n + 1"
"f(a, b) : 1-(a) * 1+(b)")
(with-nodes ((f "f")) modules
(is (= 0 (call-meta-node f '(1 5))))
(is (= 45 (call-meta-node f '(10 4))))
(is (= 33 (call-meta-node f '(4 10)))))))
(test call-meta-node-nested-meta-nodes
"Test calling a meta-node with nested meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }")
(with-nodes ((f "f")) modules
(is (= 0 (call-meta-node f '(1 2 3))))
(is (= 2 (call-meta-node f '(2 3 7)))))))
(test call-meta-node-optional-arguments-no-default
"Test calling a meta-node with optional arguments without default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +, fail-type?)"
"inc(n, :(d)) : n + d"
"f(x) : inc(x)"
"g(x) : inc(x, 2)"
"h(x) : fail-type?(inc(x), &(No-Value%))")
(with-nodes ((f "f") (g "g") (h "h")) modules
(signals tridash-fail (call-meta-node f (list 3)))
(is (= 7 (call-meta-node g (list 5))))
(is-true (call-meta-node h (list 2))))))
(test call-meta-node-optional-arguments-with-default
"Test calling a meta-node with optional arguments without default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"inc(n, d : 1) : n + d"
"f(x) : inc(x)"
"g(x) : inc(x, 2)")
(with-nodes ((f "f") (g "g")) modules
(is (= 4 (call-meta-node f (list 3))))
(is (= 7 (call-meta-node g (list 5)))))))
(test call-meta-node-keyword-arguments
"Test calling a meta-node with keyword arguments"
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"add(a, b, c : 3, d : 4) : a + b + c + d"
"f(x, y) : add(x, d : 10, b : y)")
(with-nodes ((f "f")) modules
(is (= 16 (call-meta-node f '(1 2))))
(is (= 45 (call-meta-node f '(15 17)))))))
(test call-meta-node-rest-argument
"Test calling a meta-node with rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, and, =, Empty)"
"check(..(xs)) : xs = Empty"
"f(x) : x and check()"
"g(x) : check(x)"
"h(x) : check(x, 1, 2, 3)")
(with-nodes ((f "f") (g "g") (h "h")) modules
(is-true (call-meta-node f '(t)))
(is (= nil (call-meta-node g '(2))))
(is (= nil (call-meta-node h '(2)))))))
(test call-higher-order-meta-node
"Test calling meta-node with higher order meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, +, not)"
"apply(f, x) : f(x)"
"1+(n) : n + 1"
"f(a) : apply(..(not), a)"
"g(a) : apply(..(1+), a)")
(with-nodes ((f "f") (g "g")) modules
(is (= t (call-meta-node f '(nil))))
(is (= nil (call-meta-node f '(t))))
(is (= 2 (call-meta-node g '(1))))
(is (= 4 (call-meta-node g '(3)))))))
(test call-higher-order-meta-node-optional-arguments
"Test calling meta-node with higher-order meta-node with optional arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, fail-type?)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"1+(n, :(d)) : n + d"
"f(a) : apply(1+, a)"
"g(a, b) : apply2(1+, a, b)"
"h(x) : fail-type?(apply(1+, x), &(No-Value%))")
(with-nodes ((f "f") (g "g") (h "h")) modules
(signals tridash-fail (call-meta-node f '(0)))
(is (= 3 (call-meta-node g '(1 2))))
(is (= 8 (call-meta-node g '(5 3))))
(is-true (call-meta-node h '(1))))))
(test call-higher-order-meta-node-optional-argument-with-default
"Test calling meta-node with higher order meta-node with optional argument default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"1+(n, d : 1) : n + d"
"f(a) : apply(1+, a)"
"g(a, b) : apply2(1+, a, b)")
(with-nodes ((f "f") (g "g")) modules
(is (= 1 (call-meta-node f '(0))))
(is (= 2 (call-meta-node f '(1))))
(is (= 3 (call-meta-node g '(1 2))))
(is (= 8 (call-meta-node g '(5 3)))))))
(test call-higher-order-meta-node-rest-argument
"Test calling meta-node with higher order meta-node with rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, +, cons)"
"apply3(f, x, y, z) : f(x, y, z)"
"apply(f, x) : f(x)"
"l(x, ..(xs)) : cons(x + 1, xs)"
"f(a, b, c) : apply3(l, a, b, c)"
"g(x) : apply(l, x)")
(with-nodes ((f "f") (g "g")) modules
(is (= '(2 3 4) (call-meta-node f '(1 3 4))))
(is (= '(2) (call-meta-node g '(1)))))))
(test call-higher-order-meta-node-rest-argument-empty
"Test calling meta-node with higher order meta-node with empty rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, Empty, =)"
"apply(f, x) : f(x)"
"l(x, ..(xs)) : xs = Empty"
"f(a) : apply(l, a)")
(with-nodes ((f "f")) modules
(is-true (bool-value (call-meta-node f '(1)))))))
(test call-higher-order-meta-node-optional-arguments-outer-nodes
"Test calling higher order meta-node with optional arguments and outer node references."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }")
(with-nodes ((test "test")) modules
(is (= 6 (call-meta-node test '(2 3)))))))
(test call-higher-order-external-meta-node
"Test calling meta-node with higher-order external meta-node."
(with-module-table modules
(build-core-module)
(build "/import(core, -)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"f(a) : apply(-, a)"
"g(a, b) : apply2(-, a, b)")
(with-nodes ((f "f") (g "g")) modules
(is (= -1 (call-meta-node f '(1))))
(is (= -2 (call-meta-node f '(2))))
(is (= 1 (call-meta-node g '(3 2))))
(is (= 2 (call-meta-node g '(5 3)))))))
(test call-higher-order-meta-node-error
"Test error when calling a non-meta-node."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"x+(n) : n + ..(x)"
"x"
"f(a) : apply(..(x+), a)")
(with-nodes ((f "f")) modules
(signals semantic-error (call-meta-node f '(1))))))
(test call-primitive-function-subtract-and-negate
"Test calling `-` meta-node with 2 arguments and 1 argument."
(with-module-table modules
(build-core-module)
(build "/import(core, -)"
"sub(a, b) : a - b"
"neg(x) : -(x)")
(with-nodes ((sub "sub") (neg "neg")) modules
(is (= 3 (call-meta-node sub '(5 2))))
(is (= -5 (call-meta-node neg '(5)))))))
(test call-meta-node-object-expressions
"Test calling meta-node with object expressions."
(with-module-table modules
(build "Person(first, last) : { first -> self.first; last -> self.last }"
"get-first(p) : p.first"
"get-last(p) : p.last")
(with-nodes ((person "Person") (get-first "get-first") (get-last "get-last"))
modules
(let ((p (call-meta-node person '("John" "Doe"))))
(is (= "John" (call-meta-node get-first (list p))))
(is (= "Doe" (call-meta-node get-last (list p))))))))
(test call-meta-node-catch-fail-operand
"Test catching failures in functor operand."
(with-module-table modules
(build-core-module)
(build "/import(core, !=)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((fails "fails")) modules
(is-false (bool-value (call-meta-node fails '(1))))
(is-true
(->> (thunk (error 'tridash-fail))
list
(call-meta-node fails))))))
(test call-meta-node-catch-fail-operator
"Test catching failures in functor operator."
;; Test that failures in the operator of a functor are caught.
(with-module-table modules
(build-core-module)
(build "/import(core, !=, >, -)"
"neg(x) : -(x)"
"getf(f, x) : { x > 0 -> (f -> self) }"
"test(x) : fails((getf(neg, x))(x))"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((test "test")) modules
(is (= nil (call-meta-node test '(1))))
(is-true (call-meta-node test '(-1))))))
(test call-meta-node-fail-types
"Test failure types."
(with-module-table modules
(build-core-module modules)
(build-source-file "./test/inputs/macros/failure-types.trd" modules)
(with-nodes ((check-range "check-range")) modules
(is (= "" (call-meta-node check-range '(2 1 3))))
(is (= "Error: below minimum!" (call-meta-node check-range '(0 1 3))))
(is (= "Error: above maximum!" (call-meta-node check-range '(10 2 7)))))))
(test call-meta-node-expression-block
"Test calling meta-node with one expression-block."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"f(x) : (x + 1) + (x + 1)")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(1)) 4)
(is-true (call-meta-node f '(2)) 6))))
(test call-meta-node-expression-block-multiple-references
"Test calling meta-node with expression-block with multiple references."
(with-module-table modules
(build-core-module)
(build "/import(core, *, +, -)"
"f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(3 7)) -65)
(is-true (call-meta-node f '(5 2)) 20))))
(test call-meta-node-cyclic-references
"Test calling a meta-node with cyclic references."
(with-module-table modules
(build-core-module)
(build-source-file "./test/inputs/macros/cyclic-references.trd" modules)
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(1 2)) '(1 2 1 2 1)))))
(test call-meta-node-type-error-arithmetic-functions
"Test type errors in arithmetic functions."
(with-module-table modules
(build-core-module)
(build "/import(core, +, !=)"
"1+(x) : fails(x + 1)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((1+ "1+")) modules
(is (= nil (call-meta-node 1+ '(1))))
(is-true (call-meta-node 1+ '("hello"))))))
(test call-meta-node-type-error-objects
"Test type errors in objects."
(with-module-table modules
(build-core-module)
(build "/import(core, !=)"
"test(x) : fails(x.key)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((test "test")) modules
(is-true (bool-value (call-meta-node test '(1))))
(is-true (bool-value (call-meta-node test (list (make-hash-map)))))
(is (= nil
(->> (list (cons (id-symbol "key") 1))
alist-hash-map
list
(call-meta-node test)
bool-value))))))
;;; Test Actual Macros
(test macro-compile-time-computation
"Test macro which performs computation at compile-time."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : x * x"
"/attribute(square, macro, 1)"
"a * square(3) -> b")
(test-not-nodes modules
'(("/in" "core" "*") "a" ("square" 3))
'("square" 3))
(with-nodes ((a "a") (a*9 (("/in" "core" "*") "a" 9))
(b "b")
(* "*"))
modules
(has-value-function (a) a*9 `(,* ,a 9))
(test-simple-binding a*9 b))))
(test macro-quoted-expression
"Test macro which returns quoted expression."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : list(/quote(*), x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-meta-node-reference
"Test macro which returns expression with meta-node references."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : list(& *, x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-with-macros
"Test expansion of macros in macro meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, ->, list, *)"
"'(x) : list(/quote(/quote), x)"
"/attribute(', macro, 1)"
"square(x) : list('(*), x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-multiple-arguments
"Test macros with multiple arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, list, ->, if)"
"'(x) : list(/quote(/quote), x)"
"/attribute(', macro, 1)"
"!-(a, b) : list('(if), a, b)"
"/attribute(!-, macro, 1)"
"/operator(!-, 25, left)"
"a !- b -> out")
(test-not-nodes modules '("!-" "a" "b"))
(with-nodes ((a "a") (b "b") (out "out")
(a!-b (("/in" "builtin" "if") "a" "b"))
(if "if"))
modules
(has-value-function (a b) a!-b `(,if ,a ,b :none))
(test-simple-binding a!-b out))))
(test macro-keyword-arguments
"Test passing macro arguments by keyword"
(with-module-table modules
(build-core-module)
(build "/import(core, list)"
"f(x) : x"
"call(operator, operand) : list(operator, operand)"
"/attribute(call, macro, True)"
"call(operand : in1, operator : f) -> out1")
(test-not-nodes modules '("call" (":" "operand" "in1") (":" "operator" "f")))
(with-nodes ((in1 "in1") (out1 "out1")
(f "f")
(f-in1 ("f" "in1")))
modules
(has-value-function (in1) f-in1
`(,f ,in1))
(test-simple-binding f-in1 out1))))
(test macro-arity-check-required-only
"Test macro arity checks with required arguments only."
(with-module-table modules
(build-core-module)
(build "/import(core, *, list)"
"square(x) : list(/quote(*), x, x)"
"/attribute(square, macro, 1)")
(signals arity-error (build "square(x, y) -> out"))))
(test macro-arity-check-optional-not-enough
"Test macro optional argument arity checks with not enough arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))"
"/attribute(add3, macro, 1)")
(signals arity-error (build "add3(x)"))))
(test macro-arity-check-optional-too-many
"Test macro optional argument arity checks with too many arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(x, y, z)"))))
(test macro-arity-check-rest-arguments
"Test macro rest argument arity checks."
(with-module-table modules
(build-core-module)
(build "/import(core, cons, list)"
"make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))"
"/attribute(make-list, macro, 1)"
"make-list(x, y, z) -> output"
"/attribute(x, input, 1)"
"/attribute(y, input, 1)"
"/attribute(z, input, 1)")
(with-nodes ((x "x") (y "y") (z "z")
(list "list")
(output "output"))
(finish-build)
(has-value-function
(x y z)
output
`(,list ,(argument-list (list x y z)))))))
(test macro-arity-check-keyword-missing-required
"Test macro keyword argument arity check with missing required argument"
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(d : 2)"))))
(test macro-arity-check-keyword-unknown
"Test macro keyword argument arity check with unknown keyword"
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(d : 2, n : 1, delta : 100)"))))
(test macro-rest-argument-outer-nodes
"Test macros with rest arguments and outer nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, cons, list)"
"make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))"
"/attribute(make-list, macro, 1)"
"/attribute(a, input, 1)"
"/attribute(b, input, 1)"
"/attribute(c, input, 1)"
"/attribute(y, input, 1)")
(signals macro-outer-node-error (build "make-list(a, b, c) -> output"))))
(test macro-build-meta-node-multiple-times
"Test building a meta-node multiple times when building macro."
(with-module-table modules
(build-core-module)
(build "/import(core, if, -, +, *, <)"
"fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }"
"eval-fact(n) : fact(n)"
"/attribute(eval-fact, macro, 1)"
"fact(in) + eval-fact(3) -> output"
"/attribute(in, input, 1)")
(with-nodes ((in "in") (output "output")
(fact "fact") (+ "+"))
(finish-build)
(has-value-function (in) output
`(,+ (,fact ,in) 6))
(with-nodes ((iter "iter") (n "n")) (definition fact)
(has-value-function (n) fact
`(,iter ,n 1))))))
(test macro-error-compile-loop
"Test error when compilation loop detected in macro compilation."
(with-module-table modules
(build-core-module)
(build "/import(core, list)"
"test(x,y) : list(&(->), x, test(x,y))"
"/attribute(test, macro, 1)")
(with-nodes ((test "test")) modules
(signals compile-meta-node-loop-error (call-meta-node test '(1 2))))))
(test macro-error-malformed-list
"Test error when macro returns a malformed list."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"mac(x, y) : cons(x, y)"
"/attribute(mac, macro, 1)"
"f(x) : x"
"target-f(s, expr) : cons(s, head(tail(expr)))"
"/attribute(f, target-transform, target-f)")
(signals tridash-fail (build "mac(1, 2)"))
(signals tridash-fail (build "a -> f(b)"))))
(test macro-error-return-empty-list-failure
"Test error when macro returns empty list failure."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"mac(x) : list(x, Empty!)"
"/attribute(mac, macro, 1)"
"f(x) : x"
"target-f(s, expr) : list(s, Empty!)"
"/attribute(f, target-transform, target-f)")
(signals tridash-fail (build "mac(a)"))
(signals tridash-fail (build "x -> f(y)"))))
Test Target Node Transform Macros
(test target-transform-single-argument
"Test target transformation with single argument."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-1.trd" modules)
(with-nodes ((in "in")
(out "out")
(int "int"))
(finish-build)
(has-value-function (in) out `(,int ,in)))))
(test target-transform-multiple-arguments
"Test target transformation with multiple arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-2.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-not-enough
"Test arity checks in target transform with not enough arguments."
(with-module-table modules
(build-core-module)
(signals arity-error
(build-source-file #p"./test/inputs/macros/target-transform-3.trd" modules))))
(test target-transform-arity-check-too-many
"Test arity checks in target transform with too many arguments."
(with-module-table modules
(build-core-module)
(signals arity-error
(build-source-file #p"./test/inputs/macros/target-transform-4.trd" modules))))
(test target-transform-arity-check-rest-argument
"Test arity checks in target transform with rest arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-5.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-optional-and-rest
"Test arity checks in target transform with optional and rest arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-6.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-optional-extra
"Test arity checks in target transform with optional extra arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-7.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
;;; Test Attribute Processor Nodes
(test attribute-processor-meta-node
"Test attribute processor with meta-node."
(with-module-table modules
(build-source-file #p"./test/inputs/macros/attribute-processor-1.trd" modules)
(with-nodes ((f "f") (match-f "match-f"))
modules
(is (eq match-f (attribute :matcher f))))))
(test attribute-processor-external-meta-node
"Test attribute processor with external meta-node"
(with-module-table modules
(build-source-file #p"./test/inputs/macros/attribute-processor-2.trd" modules)
(with-nodes ((f "f") (match-f "match-f"))
modules
(is (eq match-f (attribute :matcher f))))))
| null | https://raw.githubusercontent.com/alex-gutev/tridash/c7dbb36efe32a14ad9c4484ed45b1000e2f7132e/test/macros.lisp | lisp |
Tridash Programming Language.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
User-Defined Macro Tests
Test Suite Definition
Create an empty `FLAT-NODE-TABLE' to mark meta-node as
already built
Test that failures in the operator of a functor are caught.
Test Actual Macros
Test Attribute Processor Nodes | macros.lisp
Copyright ( C ) 2019 - 2021
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(defpackage :tridash/test.macros
(:use :generic-cl
:alexandria
:anaphora
:arrows
:iterate
:optima
:named-readtables
:tridash.parser
:tridash.frontend
:fiveam
:tridash/test
:tridash/test.util)
(:shadowing-import-from :generic-cl
:emptyp
:multiply
:accumulate)
(:shadowing-import-from :fiveam :fail)
(:import-from :lol
:defmacro!
:lol-syntax)
(:import-from :tridash.frontend
:tridash->cl-function
:call-meta-node
:call-tridash-meta-node
:call-node
:thunk
:resolve
:resolve%
:tridash-fail
:fail-thunk
:+empty-list+
:group-rest-args
:check-arity
:correct-arity?%
:fail-arity-error
:+optional-argument+
:+rest-argument+))
(in-package :tridash/test.macros)
(in-readtable lol-syntax)
(def-suite macros
:description "Test user-defined Tridash macros."
:in frontend)
(in-suite macros)
Utilities
(defun functor (operator &rest arguments)
"Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments
ARGUMENTS."
(functor-expression operator arguments))
(defun expression= (expected got)
"Checks that the CL expression GOT is equal to EXPECTED. Symbols in
EXPECTED, beginning with $, are replaced with the symbol in GOT
corresponding to the first occurrence."
(let ((aliases (make-hash-map)))
(flet ((equal? (got expected)
(match* (got expected)
(((type symbol) (type symbol))
(= got
(cond
((starts-with #\$ (symbol-name expected))
(ensure-get expected aliases got))
((starts-with #\! (symbol-name expected))
(id-symbol (subseq (symbol-name expected) 1)))
(t
expected))))
((_ _)
(= got expected)))))
(tree-equal got expected :test #'equal?))))
(defmacro with-external-meta-nodes ((&rest names) &body body)
"Creates `EXTERNAL-META-NODE's with names NAMES and binds to
variables with the same identifiers as the names upcased."
`(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names)
,@body))
(defmacro! with-core-nodes ((&rest names) &body body)
"Builds the core module and binds the node with names NAMES to
variables with the same identifiers as the names, upcased."
`(with-module-table ,g!modules
(build-core-module)
(with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules
,@body)))
(defmacro mock-meta-node ((&rest operands) expression)
"Creates a `META-NODE' which takes operands OPERANDS and has a value
function consisting of EXPRESSION. OPERANDS is a list of symbols
naming the dependency nodes. EXPRESSION is evaluated in an
environment where each symbol in OPERANDS is bound to the
`NODE-LINK' object corresponding to the operand, and the symbol
SELF is bound to the `META-NODE' object."
(flet ((make-operand (operand)
(match operand
((or (list 'optional symb value)
(list 'optional symb))
(list +optional-argument+ (make-instance 'node :name symb) value))
((list 'rest symb)
(list +rest-argument+ (make-instance 'node :name symb)))
(_ (make-instance 'node :name operand))))
(operand-node (operand)
(match operand
((list* 'optional symb _)
symb)
((list 'rest symb)
symb)
(_ operand))))
`(let ((self (make-instance 'final-meta-node
:name 'test-meta-node
:operands ',(map #'make-operand operands)))
,@(map #`(,a1 (node-link (make-instance 'node :name ',a1)))
(map #'operand-node operands)))
(setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set)))
(setf (value-function (context self nil))
,expression)
,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1)
(map #'operand-node operands))
,@(map #`(setf (get ',a1 (dependencies self)) ,a1)
(map #'operand-node operands))
self)))
(defmacro test-compile-meta-node ((&rest operands) expression args body)
"Creates and compiles a `META-NODE' to a CL LAMBDA expression and
checks that it has arguments ARGS and body BODY, by EXPRESSION=.
OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION
arguments of MOCK-META-NODE.
ARGS (not evaluated) is the expected lambda-list of the function.
BODY is the expected body expression within the BLOCK, TAGBODY,
RETURN expression. The symbol $recur, occurring in BODY is
substituted with the TAGBODY tag for tail-recursive self
calls. BODY is evaluated in an environment in which the symbol SELF
is bound to the `META-NODE' object."
(flet ((lambda-args (lambda-list)
(->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list)
(map #'ensure-car)
(map (compose #'gensym #'symbol-name)))))
`(let ((self (mock-meta-node ,operands ,expression)))
(is (expression=
`(lambda ,',args
(declare (ignorable ,@',(lambda-args args)))
,,body)
(tridash->cl-function self))))))
Tridash to CL Compilation Tests
(test compile-functor-expression
"Test compilation of functor expressions to CL."
(with-core-nodes ("if" "<" "-")
(test-compile-meta-node
(a b)
(functor if (functor < a b) (functor - b a) (functor - a b))
($a $b)
'(let nil
(!|if| (!< $a $b)
(thunk (!- $b $a))
(thunk (!- $a $b)))))))
(test compile-if-expression
"Test compilation of if expressions to CL."
(with-core-nodes ("<" "-")
(test-compile-meta-node
(a b)
(if-expression (functor < a b) (functor - b a) (functor - a b))
($a $b)
'(let nil
(!|if| (!< $a $b)
(thunk (!- $b $a))
(thunk (!- $a $b)))))))
(test compile-object-expression
"Test compilation of object expressions to CL."
(with-core-nodes ("+" "-")
(test-compile-meta-node
(x y)
(object-expression
`((sum ,(functor + x y))
(diff ,(functor - x y))))
($x $y)
'(let nil
(alist-hash-map
(list
(cons 'sum (thunk (!+ $x $y)))
(cons 'diff (thunk (!- $x $y)))))))))
(test compile-member-expression
"Test compilation of member expressions to CL."
(test-compile-meta-node
(object)
(member-expression
(member-expression object 'key1) 'key2)
($obj)
'(let nil
(!|member| (!|member| $obj 'key1) 'key2))))
(test compile-catch-expression
"Test compilation of catch expressions to CL."
(with-core-nodes ("/" "*")
(test-compile-meta-node
(a b)
(catch-expression
(functor / a b)
(functor * a b))
($a $b)
'(let nil
(!|catch| (!/ $a $b) (thunk (!* $a $b)))))))
(test compile-fail-expression
"Test compilation of fail expressions to CL."
(test-compile-meta-node
()
(fail-expression)
()
'(let nil
(!|fail|))))
(test compile-expression-block
"Test compilation of expression blocks, with reference count = 1, to CL."
(with-core-nodes ("+")
(test-compile-meta-node
(a)
(expression-block
(functor + a 1))
($a)
'(let nil
(!+ $a 1)))))
(test compile-expression-block-muliple-references
"Test compilation of expression blocks, with reference count > 1, to CL."
(with-core-nodes ("+")
(test-compile-meta-node
(a)
(let ((block (expression-block (functor + a 1) :count 2)))
(functor + block block))
($a)
'(let ($a+1)
(setf $a+1 (thunk (!+ $a 1)))
(!+ $a+1 $a+1)))))
(test compile-meta-node-call
"Test compilation of calls to other meta-nodes, to CL."
(with-core-nodes ("-")
(let ((meta-node (mock-meta-node (a) a)))
(test-compile-meta-node
(a)
(functor meta-node (functor - a))
($a)
`(let nil
(call-tridash-meta-node ,meta-node (list (!- $a))))))))
(test compile-higher-order-external-meta-node
"Test compilation of higher order external meta-node."
(with-core-nodes ("not")
(let ((apply (mock-meta-node (f x) (functor f x))))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref not) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 1) (length $args))
(apply #'!|not| $args)
(fail-arity-error)))
$x)))))))
(test compile-higher-order-if-meta-node
"Test compilation of higher order if meta-node."
(with-core-nodes ("if")
(let ((apply (mock-meta-node (f x y z) (functor f x y z))))
(test-compile-meta-node
(x y z)
(functor apply (meta-node-ref if) x y z)
($x $y $z)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 3) (length $args))
(apply #'!|if| $args)
(fail-arity-error)))
$x $y $z)))))))
(test compile-higher-order-and-meta-node
"Test compilation of higher order `and` meta-node."
(with-core-nodes ("and")
(let ((apply (mock-meta-node (f x y) (functor f x y))))
(test-compile-meta-node
(x y)
(functor apply (meta-node-ref and) x y)
($x $y)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 2) (length $args))
(apply #'!|and| $args)
(fail-arity-error)))
$x $y)))))))
(test compile-higher-order-or-meta-node
"Test compilation of higher order `or` meta-node."
(with-core-nodes ("or")
(let ((apply (mock-meta-node (f x y) (functor f x y))))
(test-compile-meta-node
(x y)
(functor apply (meta-node-ref or) x y)
($x $y)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2 . 2) (length $args))
(apply #'!|or| $args)
(fail-arity-error)))
$x $y)))))))
(test compile-higher-order-meta-node
"Test compilation of higher-order user defined meta-node."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x) x)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 1) (length $args))
(destructuring-bind ($x2) $args
(call-tridash-meta-node ,f (list $x2)))
(fail-arity-error)))
$x))))))
(test compile-higher-order-meta-node-optional-arguments
"Test compilation of higher-order meta-node with optional arguments."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x (optional y) (optional z)) x)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f :optional (list 1 2)) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(1 . 3) (length $args))
(destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args
(call-tridash-meta-node ,f (list $x2 $y $z)))
(fail-arity-error)))
$x))))))
(test compile-higher-order-meta-node-rest-arguments
"Test compilation of higher-order meta-node with rest arguments."
(let ((apply (mock-meta-node (f x) (functor f x)))
(f (mock-meta-node (x y (rest xs)) xs)))
(test-compile-meta-node
(x)
(functor apply (meta-node-ref f) x)
($x)
`(let nil
(call-tridash-meta-node
,apply
(list
#'(lambda (&rest $args)
(if (correct-arity?% '(2) (length $args))
(destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+)))
$args
(call-tridash-meta-node ,f (list $x2 $y $rest)))
(fail-arity-error)))
$x))))))
(test compile-invoke-higher-order-node
"Test compilation of invoking value nodes."
(test-compile-meta-node
(f x y)
(functor f x y)
($f $x $y)
`(let nil
(call-node $f (list $x $y)))))
(test compile-literals
"Test compilation of literal values."
(with-core-nodes ("and")
(test-compile-meta-node
()
(functor and "hello" (functor and 1 (functor and 2.3 'symbol)))
()
'(let nil
(!|and| "hello"
(thunk
(!|and| 1
(thunk
(!|and| 2.3 'symbol)))))))))
(test compile-core-arithmetic
"Test compilation of core arithmetic meta-nodes."
(with-core-nodes ("/" "*" "+" "-")
(test-compile-meta-node
(a b c d)
(functor
/
(functor * (functor + a b) (functor - c d))
(functor - d))
($a $b $c $d)
'(let nil
(!/
(!* (!+ $a $b) (!- $c $d))
(!- $d))))))
(test compile-core-comparison-and-logical
"Test compilation of core comparison and logical meta-nodes."
(with-core-nodes ("not" "or" "and" "=" "!=" "<" "<=" ">" ">=")
(test-compile-meta-node
(x y)
(functor
not
(functor
or
(functor and (functor < x y) (functor = y x))
(functor
or
(functor <= x 10)
(functor
or
(functor > 1 y)
(functor
or
(functor >= 8 y)
(functor != x y))))))
($x $y)
'(let nil
(!|not|
(!|or|
(!|and| (!< $x $y) (thunk (!= $y $x)))
(thunk
(!|or|
(!<= $x 10)
(thunk
(!|or|
(!> 1 $y)
(thunk
(!|or|
(!>= 8 $y)
(thunk (!!= $x $y))))))))))))))
(test compile-core-type-checks
"Test compilation of core type checking meta-nodes."
(with-core-nodes ("or" "int?" "real?" "string?")
(test-compile-meta-node
(x y z)
(functor
or
(functor int? x)
(functor
or
(functor real? y)
(functor string? z)))
($x $y $z)
'(let nil
(!|or|
(!|int?| $x)
(thunk
(!|or|
(!|real?| $y)
(thunk (!|string?| $z)))))))))
(test compile-tail-recursive-if
"Test compilation of if expression in recursive tail position."
(with-core-nodes ("-" "*" "<")
(test-compile-meta-node
(n acc)
(if-expression (functor < n 2)
acc
(functor self (functor - n 1) (functor * n acc)))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-if-functor
"Test compilation of if functor in recursive tail position."
(with-core-nodes ("if" "-" "*" "<")
(test-compile-meta-node
(n acc)
(functor if
(functor < n 2)
acc
(functor self (functor - n 1) (functor * n acc)))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-expression-block
"Test compilation of expression blocks in recursive tail position."
(with-core-nodes ("if" "-" "*" "<")
(test-compile-meta-node
(n acc)
(functor if
(functor < n 2)
acc
(expression-block
(functor self (functor - n 1) (functor * n acc))))
($n $acc)
`(let nil
(!|if| (!< $n 2)
$acc
(thunk
(call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))
(test compile-tail-recursive-or-functor
"Test compilation of `or` functor in recursive tail position."
(with-core-nodes ("or" "=" "!=" "-")
(test-compile-meta-node
(n)
(functor or (functor = n 0) (functor self (functor - n 1)))
($n)
`(let nil
(!|or| (!= $n 0)
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-tail-recursive-and-functor
"Test compilation of `and` functor in recursive tail position."
(with-core-nodes ("and" "=" "!=" "-")
(test-compile-meta-node
(n)
(functor and (functor = n 0) (functor self (functor - n 1)))
($n)
`(let nil
(!|and| (!= $n 0)
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-tail-recursive-catch-expression
"Test compilation of catch expressions in recursive tail position."
(with-core-nodes ("-" "+")
(test-compile-meta-node
(n)
(catch-expression (functor self (functor + n 1))
(functor self (functor - n 1)))
($n)
`(let nil
(!|catch| (call-tridash-meta-node ,self (list (!+ $n 1)))
(thunk
(call-tridash-meta-node ,self (list (!- $n 1)))))))))
(test compile-meta-node-optional-arguments
"Test compilation of meta-node with optional arguments."
(with-core-nodes ("+")
(test-compile-meta-node
(n (optional d 1))
(functor + n d)
($n &optional ($d 1))
'(let nil
(!|+| $n $d)))))
(test compile-meta-node-multiple-optional-arguments
"Test compilation of meta-node with multiple optional arguments."
(with-core-nodes ("+")
(test-compile-meta-node
(n (optional d 1) (optional e 2))
(functor + n (functor + d e))
($n &optional ($d 1) ($e 2))
'(let nil
(!|+| $n (!|+| $d $e))))))
(test compile-meta-node-rest-argument
"Test compilation of meta-node with rest argument."
(with-core-nodes ("cons")
(test-compile-meta-node
(x (rest xs))
(functor cons x xs)
($x &optional ($xs +empty-list+))
'(let nil
(!|cons| $x $xs)))))
(test compile-meta-node-optional-and-rest-arguments
"Test compilation of meta-node with optional and rest arguments."
(with-core-nodes ("cons")
(test-compile-meta-node
(x (optional y 2) (rest xs))
(functor cons x (functor cons y xs))
($x &optional ($y 2) ($xs +empty-list+))
'(let nil
(!|cons| $x (thunk (!|cons| $y $xs)))))))
(test compile-cyclic-references
"Test compilation of cyclic references."
(with-core-nodes ("cons")
(test-compile-meta-node
(a b)
(aprog1 (expression-block nil :count 2)
(setf (expression-block-expression it)
(functor cons a (functor cons b (cyclic-reference it)))))
($a $b)
'(let ($block)
(setf $block
(thunk (!|cons| $a (thunk (!|cons| $b $block)))))
$block))))
(test compile-error-usupported-external-meta-node
"Test that compiling an unsupported external-meta-node results in an error."
(with-external-meta-nodes ("not-a-function")
(signals
unsupported-meta-node-error
(tridash->cl-function
(mock-meta-node
(arg)
(functor not-a-function arg))))))
Test Calling Tridash Meta - Nodes from CL
(test call-meta-node-single-expression
"Test calling a single expression meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"min(x,y) : case{x < y : x; y}")
(with-nodes ((min "min")) modules
(is (= 2 (call-meta-node min '(2 10))))
(is (= 2 (call-meta-node min '(10 2))))
(is (= -5.3 (call-meta-node min '(-5.3 7.6))))
(is (= 1 (call-meta-node min '(1 1)))))))
(test call-meta-node-with-if-expression
"Test calling a meta-node with if expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : if(cond, x, 0)")
(with-nodes ((f "f")) modules
(is (= 10 (call-meta-node f '(t 10))))
(is (= 0 (call-meta-node f '(nil 5))))
(signals tridash-fail (call-meta-node f '(1 5))))))
(test call-meta-node-with-and-expression
"Test calling a meta-node with `and` expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : cond and x")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(t t)))
(is (= nil (call-meta-node f '(nil t))))
(is (= nil (call-meta-node f '(t nil))))
(is (= nil (call-meta-node f '(nil nil)))))))
(test call-meta-node-with-or-expression
"Test calling a meta-node with `or` expressions from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(cond, x) : cond or x")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(t t)))
(is-true (call-meta-node f '(nil t)))
(is-true (call-meta-node f '(t nil)))
(is (= nil (call-meta-node f '(nil nil)))))))
(test call-meta-node-catch-fail-expression
"Test calling a meta-node with multiple nodes and CATCH-FAIL expressions."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }")
(with-nodes ((min "min")) modules
(is (= 2 (resolve (call-meta-node min '(2 10)))))
(is (= 2 (resolve (call-meta-node min '(10 2)))))
(is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6)))))
(is (= 1 (resolve (call-meta-node min '(1 1))))))))
(test call-meta-node-recursive
"Test calling a recursive meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }")
(with-nodes ((fact "fact")) modules
(is (= 6 (call-meta-node fact '(3))))
(is (= 120 (call-meta-node fact '(5))))
(is (= 1 (call-meta-node fact '(0)))))))
(test call-meta-node-tail-recursive
"Test calling a tail-recursive meta-node from CL."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }")
(with-nodes ((fact "fact")) modules
(is (= 6 (call-meta-node fact '(3))))
(is (= 120 (call-meta-node fact '(5))))
(is (= 1 (call-meta-node fact '(0)))))))
(test call-meta-node-with-meta-node-call
"Test calling a meta-node which calls other meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"1-(n) : n - 1"
"1+(n) : n + 1"
"f(a, b) : 1-(a) * 1+(b)")
(with-nodes ((f "f")) modules
(is (= 0 (call-meta-node f '(1 5))))
(is (= 45 (call-meta-node f '(10 4))))
(is (= 33 (call-meta-node f '(4 10)))))))
(test call-meta-node-nested-meta-nodes
"Test calling a meta-node with nested meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }")
(with-nodes ((f "f")) modules
(is (= 0 (call-meta-node f '(1 2 3))))
(is (= 2 (call-meta-node f '(2 3 7)))))))
(test call-meta-node-optional-arguments-no-default
"Test calling a meta-node with optional arguments without default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +, fail-type?)"
"inc(n, :(d)) : n + d"
"f(x) : inc(x)"
"g(x) : inc(x, 2)"
"h(x) : fail-type?(inc(x), &(No-Value%))")
(with-nodes ((f "f") (g "g") (h "h")) modules
(signals tridash-fail (call-meta-node f (list 3)))
(is (= 7 (call-meta-node g (list 5))))
(is-true (call-meta-node h (list 2))))))
(test call-meta-node-optional-arguments-with-default
"Test calling a meta-node with optional arguments without default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"inc(n, d : 1) : n + d"
"f(x) : inc(x)"
"g(x) : inc(x, 2)")
(with-nodes ((f "f") (g "g")) modules
(is (= 4 (call-meta-node f (list 3))))
(is (= 7 (call-meta-node g (list 5)))))))
(test call-meta-node-keyword-arguments
"Test calling a meta-node with keyword arguments"
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"add(a, b, c : 3, d : 4) : a + b + c + d"
"f(x, y) : add(x, d : 10, b : y)")
(with-nodes ((f "f")) modules
(is (= 16 (call-meta-node f '(1 2))))
(is (= 45 (call-meta-node f '(15 17)))))))
(test call-meta-node-rest-argument
"Test calling a meta-node with rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, and, =, Empty)"
"check(..(xs)) : xs = Empty"
"f(x) : x and check()"
"g(x) : check(x)"
"h(x) : check(x, 1, 2, 3)")
(with-nodes ((f "f") (g "g") (h "h")) modules
(is-true (call-meta-node f '(t)))
(is (= nil (call-meta-node g '(2))))
(is (= nil (call-meta-node h '(2)))))))
(test call-higher-order-meta-node
"Test calling meta-node with higher order meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, +, not)"
"apply(f, x) : f(x)"
"1+(n) : n + 1"
"f(a) : apply(..(not), a)"
"g(a) : apply(..(1+), a)")
(with-nodes ((f "f") (g "g")) modules
(is (= t (call-meta-node f '(nil))))
(is (= nil (call-meta-node f '(t))))
(is (= 2 (call-meta-node g '(1))))
(is (= 4 (call-meta-node g '(3)))))))
(test call-higher-order-meta-node-optional-arguments
"Test calling meta-node with higher-order meta-node with optional arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, fail-type?)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"1+(n, :(d)) : n + d"
"f(a) : apply(1+, a)"
"g(a, b) : apply2(1+, a, b)"
"h(x) : fail-type?(apply(1+, x), &(No-Value%))")
(with-nodes ((f "f") (g "g") (h "h")) modules
(signals tridash-fail (call-meta-node f '(0)))
(is (= 3 (call-meta-node g '(1 2))))
(is (= 8 (call-meta-node g '(5 3))))
(is-true (call-meta-node h '(1))))))
(test call-higher-order-meta-node-optional-argument-with-default
"Test calling meta-node with higher order meta-node with optional argument default values."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"1+(n, d : 1) : n + d"
"f(a) : apply(1+, a)"
"g(a, b) : apply2(1+, a, b)")
(with-nodes ((f "f") (g "g")) modules
(is (= 1 (call-meta-node f '(0))))
(is (= 2 (call-meta-node f '(1))))
(is (= 3 (call-meta-node g '(1 2))))
(is (= 8 (call-meta-node g '(5 3)))))))
(test call-higher-order-meta-node-rest-argument
"Test calling meta-node with higher order meta-node with rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, +, cons)"
"apply3(f, x, y, z) : f(x, y, z)"
"apply(f, x) : f(x)"
"l(x, ..(xs)) : cons(x + 1, xs)"
"f(a, b, c) : apply3(l, a, b, c)"
"g(x) : apply(l, x)")
(with-nodes ((f "f") (g "g")) modules
(is (= '(2 3 4) (call-meta-node f '(1 3 4))))
(is (= '(2) (call-meta-node g '(1)))))))
(test call-higher-order-meta-node-rest-argument-empty
"Test calling meta-node with higher order meta-node with empty rest argument."
(with-module-table modules
(build-core-module)
(build "/import(core, Empty, =)"
"apply(f, x) : f(x)"
"l(x, ..(xs)) : xs = Empty"
"f(a) : apply(l, a)")
(with-nodes ((f "f")) modules
(is-true (bool-value (call-meta-node f '(1)))))))
(test call-higher-order-meta-node-optional-arguments-outer-nodes
"Test calling higher order meta-node with optional arguments and outer node references."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }")
(with-nodes ((test "test")) modules
(is (= 6 (call-meta-node test '(2 3)))))))
(test call-higher-order-external-meta-node
"Test calling meta-node with higher-order external meta-node."
(with-module-table modules
(build-core-module)
(build "/import(core, -)"
"apply(f, x) : f(x)"
"apply2(f, x, y) : f(x, y)"
"f(a) : apply(-, a)"
"g(a, b) : apply2(-, a, b)")
(with-nodes ((f "f") (g "g")) modules
(is (= -1 (call-meta-node f '(1))))
(is (= -2 (call-meta-node f '(2))))
(is (= 1 (call-meta-node g '(3 2))))
(is (= 2 (call-meta-node g '(5 3)))))))
(test call-higher-order-meta-node-error
"Test error when calling a non-meta-node."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"apply(f, x) : f(x)"
"x+(n) : n + ..(x)"
"x"
"f(a) : apply(..(x+), a)")
(with-nodes ((f "f")) modules
(signals semantic-error (call-meta-node f '(1))))))
(test call-primitive-function-subtract-and-negate
"Test calling `-` meta-node with 2 arguments and 1 argument."
(with-module-table modules
(build-core-module)
(build "/import(core, -)"
"sub(a, b) : a - b"
"neg(x) : -(x)")
(with-nodes ((sub "sub") (neg "neg")) modules
(is (= 3 (call-meta-node sub '(5 2))))
(is (= -5 (call-meta-node neg '(5)))))))
(test call-meta-node-object-expressions
"Test calling meta-node with object expressions."
(with-module-table modules
(build "Person(first, last) : { first -> self.first; last -> self.last }"
"get-first(p) : p.first"
"get-last(p) : p.last")
(with-nodes ((person "Person") (get-first "get-first") (get-last "get-last"))
modules
(let ((p (call-meta-node person '("John" "Doe"))))
(is (= "John" (call-meta-node get-first (list p))))
(is (= "Doe" (call-meta-node get-last (list p))))))))
(test call-meta-node-catch-fail-operand
"Test catching failures in functor operand."
(with-module-table modules
(build-core-module)
(build "/import(core, !=)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((fails "fails")) modules
(is-false (bool-value (call-meta-node fails '(1))))
(is-true
(->> (thunk (error 'tridash-fail))
list
(call-meta-node fails))))))
(test call-meta-node-catch-fail-operator
"Test catching failures in functor operator."
(with-module-table modules
(build-core-module)
(build "/import(core, !=, >, -)"
"neg(x) : -(x)"
"getf(f, x) : { x > 0 -> (f -> self) }"
"test(x) : fails((getf(neg, x))(x))"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((test "test")) modules
(is (= nil (call-meta-node test '(1))))
(is-true (call-meta-node test '(-1))))))
(test call-meta-node-fail-types
"Test failure types."
(with-module-table modules
(build-core-module modules)
(build-source-file "./test/inputs/macros/failure-types.trd" modules)
(with-nodes ((check-range "check-range")) modules
(is (= "" (call-meta-node check-range '(2 1 3))))
(is (= "Error: below minimum!" (call-meta-node check-range '(0 1 3))))
(is (= "Error: above maximum!" (call-meta-node check-range '(10 2 7)))))))
(test call-meta-node-expression-block
"Test calling meta-node with one expression-block."
(with-module-table modules
(build-core-module)
(build "/import(core, +)"
"f(x) : (x + 1) + (x + 1)")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(1)) 4)
(is-true (call-meta-node f '(2)) 6))))
(test call-meta-node-expression-block-multiple-references
"Test calling meta-node with expression-block with multiple references."
(with-module-table modules
(build-core-module)
(build "/import(core, *, +, -)"
"f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }")
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(3 7)) -65)
(is-true (call-meta-node f '(5 2)) 20))))
(test call-meta-node-cyclic-references
"Test calling a meta-node with cyclic references."
(with-module-table modules
(build-core-module)
(build-source-file "./test/inputs/macros/cyclic-references.trd" modules)
(with-nodes ((f "f")) modules
(is-true (call-meta-node f '(1 2)) '(1 2 1 2 1)))))
(test call-meta-node-type-error-arithmetic-functions
"Test type errors in arithmetic functions."
(with-module-table modules
(build-core-module)
(build "/import(core, +, !=)"
"1+(x) : fails(x + 1)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((1+ "1+")) modules
(is (= nil (call-meta-node 1+ '(1))))
(is-true (call-meta-node 1+ '("hello"))))))
(test call-meta-node-type-error-objects
"Test type errors in objects."
(with-module-table modules
(build-core-module)
(build "/import(core, !=)"
"test(x) : fails(x.key)"
"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }")
(with-nodes ((test "test")) modules
(is-true (bool-value (call-meta-node test '(1))))
(is-true (bool-value (call-meta-node test (list (make-hash-map)))))
(is (= nil
(->> (list (cons (id-symbol "key") 1))
alist-hash-map
list
(call-meta-node test)
bool-value))))))
(test macro-compile-time-computation
"Test macro which performs computation at compile-time."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : x * x"
"/attribute(square, macro, 1)"
"a * square(3) -> b")
(test-not-nodes modules
'(("/in" "core" "*") "a" ("square" 3))
'("square" 3))
(with-nodes ((a "a") (a*9 (("/in" "core" "*") "a" 9))
(b "b")
(* "*"))
modules
(has-value-function (a) a*9 `(,* ,a 9))
(test-simple-binding a*9 b))))
(test macro-quoted-expression
"Test macro which returns quoted expression."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : list(/quote(*), x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-meta-node-reference
"Test macro which returns expression with meta-node references."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"square(x) : list(& *, x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-with-macros
"Test expansion of macros in macro meta-nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, ->, list, *)"
"'(x) : list(/quote(/quote), x)"
"/attribute(', macro, 1)"
"square(x) : list('(*), x, x)"
"/attribute(square, macro, 1)"
"square(a) -> b")
(test-not-nodes modules '("square" "a"))
(with-nodes ((a "a") (b "b")
(a*a (("/in" "core" "*") "a" "a"))
(* "*"))
modules
(has-value-function (a) a*a `(,* ,a ,a))
(test-simple-binding a*a b))))
(test macro-multiple-arguments
"Test macros with multiple arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, list, ->, if)"
"'(x) : list(/quote(/quote), x)"
"/attribute(', macro, 1)"
"!-(a, b) : list('(if), a, b)"
"/attribute(!-, macro, 1)"
"/operator(!-, 25, left)"
"a !- b -> out")
(test-not-nodes modules '("!-" "a" "b"))
(with-nodes ((a "a") (b "b") (out "out")
(a!-b (("/in" "builtin" "if") "a" "b"))
(if "if"))
modules
(has-value-function (a b) a!-b `(,if ,a ,b :none))
(test-simple-binding a!-b out))))
(test macro-keyword-arguments
"Test passing macro arguments by keyword"
(with-module-table modules
(build-core-module)
(build "/import(core, list)"
"f(x) : x"
"call(operator, operand) : list(operator, operand)"
"/attribute(call, macro, True)"
"call(operand : in1, operator : f) -> out1")
(test-not-nodes modules '("call" (":" "operand" "in1") (":" "operator" "f")))
(with-nodes ((in1 "in1") (out1 "out1")
(f "f")
(f-in1 ("f" "in1")))
modules
(has-value-function (in1) f-in1
`(,f ,in1))
(test-simple-binding f-in1 out1))))
(test macro-arity-check-required-only
"Test macro arity checks with required arguments only."
(with-module-table modules
(build-core-module)
(build "/import(core, *, list)"
"square(x) : list(/quote(*), x, x)"
"/attribute(square, macro, 1)")
(signals arity-error (build "square(x, y) -> out"))))
(test macro-arity-check-optional-not-enough
"Test macro optional argument arity checks with not enough arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))"
"/attribute(add3, macro, 1)")
(signals arity-error (build "add3(x)"))))
(test macro-arity-check-optional-too-many
"Test macro optional argument arity checks with too many arguments."
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(x, y, z)"))))
(test macro-arity-check-rest-arguments
"Test macro rest argument arity checks."
(with-module-table modules
(build-core-module)
(build "/import(core, cons, list)"
"make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))"
"/attribute(make-list, macro, 1)"
"make-list(x, y, z) -> output"
"/attribute(x, input, 1)"
"/attribute(y, input, 1)"
"/attribute(z, input, 1)")
(with-nodes ((x "x") (y "y") (z "z")
(list "list")
(output "output"))
(finish-build)
(has-value-function
(x y z)
output
`(,list ,(argument-list (list x y z)))))))
(test macro-arity-check-keyword-missing-required
"Test macro keyword argument arity check with missing required argument"
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(d : 2)"))))
(test macro-arity-check-keyword-unknown
"Test macro keyword argument arity check with unknown keyword"
(with-module-table modules
(build-core-module)
(build "/import(core, +, list)"
"1+(n, d : 1) : list(/quote(+), x, d)"
"/attribute(1+, macro, 1)")
(signals arity-error (build "1+(d : 2, n : 1, delta : 100)"))))
(test macro-rest-argument-outer-nodes
"Test macros with rest arguments and outer nodes."
(with-module-table modules
(build-core-module)
(build "/import(core, cons, list)"
"make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))"
"/attribute(make-list, macro, 1)"
"/attribute(a, input, 1)"
"/attribute(b, input, 1)"
"/attribute(c, input, 1)"
"/attribute(y, input, 1)")
(signals macro-outer-node-error (build "make-list(a, b, c) -> output"))))
(test macro-build-meta-node-multiple-times
"Test building a meta-node multiple times when building macro."
(with-module-table modules
(build-core-module)
(build "/import(core, if, -, +, *, <)"
"fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }"
"eval-fact(n) : fact(n)"
"/attribute(eval-fact, macro, 1)"
"fact(in) + eval-fact(3) -> output"
"/attribute(in, input, 1)")
(with-nodes ((in "in") (output "output")
(fact "fact") (+ "+"))
(finish-build)
(has-value-function (in) output
`(,+ (,fact ,in) 6))
(with-nodes ((iter "iter") (n "n")) (definition fact)
(has-value-function (n) fact
`(,iter ,n 1))))))
(test macro-error-compile-loop
"Test error when compilation loop detected in macro compilation."
(with-module-table modules
(build-core-module)
(build "/import(core, list)"
"test(x,y) : list(&(->), x, test(x,y))"
"/attribute(test, macro, 1)")
(with-nodes ((test "test")) modules
(signals compile-meta-node-loop-error (call-meta-node test '(1 2))))))
(test macro-error-malformed-list
"Test error when macro returns a malformed list."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"mac(x, y) : cons(x, y)"
"/attribute(mac, macro, 1)"
"f(x) : x"
"target-f(s, expr) : cons(s, head(tail(expr)))"
"/attribute(f, target-transform, target-f)")
(signals tridash-fail (build "mac(1, 2)"))
(signals tridash-fail (build "a -> f(b)"))))
(test macro-error-return-empty-list-failure
"Test error when macro returns empty list failure."
(with-module-table modules
(build-core-module)
(build "/import(core)"
"mac(x) : list(x, Empty!)"
"/attribute(mac, macro, 1)"
"f(x) : x"
"target-f(s, expr) : list(s, Empty!)"
"/attribute(f, target-transform, target-f)")
(signals tridash-fail (build "mac(a)"))
(signals tridash-fail (build "x -> f(y)"))))
Test Target Node Transform Macros
(test target-transform-single-argument
"Test target transformation with single argument."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-1.trd" modules)
(with-nodes ((in "in")
(out "out")
(int "int"))
(finish-build)
(has-value-function (in) out `(,int ,in)))))
(test target-transform-multiple-arguments
"Test target transformation with multiple arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-2.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-not-enough
"Test arity checks in target transform with not enough arguments."
(with-module-table modules
(build-core-module)
(signals arity-error
(build-source-file #p"./test/inputs/macros/target-transform-3.trd" modules))))
(test target-transform-arity-check-too-many
"Test arity checks in target transform with too many arguments."
(with-module-table modules
(build-core-module)
(signals arity-error
(build-source-file #p"./test/inputs/macros/target-transform-4.trd" modules))))
(test target-transform-arity-check-rest-argument
"Test arity checks in target transform with rest arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-5.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-optional-and-rest
"Test arity checks in target transform with optional and rest arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-6.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test target-transform-arity-check-optional-extra
"Test arity checks in target transform with optional extra arguments."
(with-module-table modules
(build-core-module)
(build-source-file #p"./test/inputs/macros/target-transform-7.trd" modules)
(with-nodes ((in "in") (a "a") (b "b")
(- "-"))
(finish-build)
(has-value-function (in a) b `(,- ,in ,a)))))
(test attribute-processor-meta-node
"Test attribute processor with meta-node."
(with-module-table modules
(build-source-file #p"./test/inputs/macros/attribute-processor-1.trd" modules)
(with-nodes ((f "f") (match-f "match-f"))
modules
(is (eq match-f (attribute :matcher f))))))
(test attribute-processor-external-meta-node
"Test attribute processor with external meta-node"
(with-module-table modules
(build-source-file #p"./test/inputs/macros/attribute-processor-2.trd" modules)
(with-nodes ((f "f") (match-f "match-f"))
modules
(is (eq match-f (attribute :matcher f))))))
|
52096dd415375980c51bdb241c422076cda95fd89885d7a1deae03274dfa66d3 | cstar/ec2nodefinder | awssign.erl | -module(awssign).
-author('').
-include_lib("xmerl/include/xmerl.hrl").
-export([sign_and_send/5, describe_instances/5]).
sign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)->
SortedParams = sort([{"Timestamp", create_timestamp()},
{"SignatureVersion", "2"},
{"Version", APIVersion},
{"AWSAccessKeyId", AccessKey},
{"SignatureMethod", "HmacSHA1"}
|Params]),
EncodedParams = lists:foldl(
fun({K,V}, Acc)->
[url_encode(K) ++ "=" ++ url_encode(V)| Acc]
end,[], SortedParams),
QueryString = string:join(EncodedParams, "&"),
ToSign = "GET\n" ++ Host ++ "\n/\n" ++ QueryString,
Signature = url_encode(
binary_to_list(
base64:encode(crypto:sha_mac(SecretKey, ToSign)))
),
URL = "http://"++ Host ++ "/?" ++ QueryString ++ "&Signature=" ++ Signature,
case http:request(URL) of
{ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body};
{ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} }
end.
% lifted from
create_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())).
create_timestamp({{Y, M, D}, {H, Mn, S}}) ->
to_str(Y) ++ "-" ++ to_str(M) ++ "-" ++ to_str(D) ++ "T" ++
to_str(H) ++ ":" ++ to_str(Mn)++ ":" ++ to_str(S) ++ "Z".
add_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end.
to_str(L) -> add_zeros(integer_to_list(L)).
sort(Params)->
lists:sort(fun({A, _}, {X, _}) -> A > X end, Params).
describe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)->
Params =[ {"Action", "DescribeInstances"}],
Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey),
case Res of
{ok, XML} ->
{R,_} = xmerl_scan:string(XML),
[ V#xmlText.value
|| V<- xmerl_xpath:string("/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \""
++ SecurityGroup ++ "\"]/instancesSet/item/privateDnsName/text()", R)];
{error, E} ->
erlang:error ({ describe_instances_failed, E }),
[]
end.
% lifted from the ever precious yaws_utils.erl
integer_to_hex(I) ->
case catch erlang:integer_to_list(I, 16) of
{'EXIT', _} ->
old_integer_to_hex(I);
Int ->
Int
end.
old_integer_to_hex(I) when I<10 ->
integer_to_list(I);
old_integer_to_hex(I) when I<16 ->
[I-10+$A];
old_integer_to_hex(I) when I>=16 ->
N = trunc(I/16),
old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16).
url_encode([H|T]) ->
if
H >= $a, $z >= H ->
[H|url_encode(T)];
H >= $A, $Z >= H ->
[H|url_encode(T)];
H >= $0, $9 >= H ->
[H|url_encode(T)];
H == $_; H == $.; H == $-; H == $/ -> % FIXME: more..
[H|url_encode(T)];
true ->
case integer_to_hex(H) of
[X, Y] ->
[$%, X, Y | url_encode(T)];
[X] ->
, $ 0 , X | url_encode(T ) ]
end
end;
url_encode([]) ->
[]. | null | https://raw.githubusercontent.com/cstar/ec2nodefinder/42534509b88120d5581ad4a4e822bb806f3b950f/src/awssign.erl | erlang | lifted from
lifted from the ever precious yaws_utils.erl
FIXME: more..
, X, Y | url_encode(T)]; | -module(awssign).
-author('').
-include_lib("xmerl/include/xmerl.hrl").
-export([sign_and_send/5, describe_instances/5]).
sign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)->
SortedParams = sort([{"Timestamp", create_timestamp()},
{"SignatureVersion", "2"},
{"Version", APIVersion},
{"AWSAccessKeyId", AccessKey},
{"SignatureMethod", "HmacSHA1"}
|Params]),
EncodedParams = lists:foldl(
fun({K,V}, Acc)->
[url_encode(K) ++ "=" ++ url_encode(V)| Acc]
end,[], SortedParams),
QueryString = string:join(EncodedParams, "&"),
ToSign = "GET\n" ++ Host ++ "\n/\n" ++ QueryString,
Signature = url_encode(
binary_to_list(
base64:encode(crypto:sha_mac(SecretKey, ToSign)))
),
URL = "http://"++ Host ++ "/?" ++ QueryString ++ "&Signature=" ++ Signature,
case http:request(URL) of
{ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body};
{ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} }
end.
create_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())).
create_timestamp({{Y, M, D}, {H, Mn, S}}) ->
to_str(Y) ++ "-" ++ to_str(M) ++ "-" ++ to_str(D) ++ "T" ++
to_str(H) ++ ":" ++ to_str(Mn)++ ":" ++ to_str(S) ++ "Z".
add_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end.
to_str(L) -> add_zeros(integer_to_list(L)).
sort(Params)->
lists:sort(fun({A, _}, {X, _}) -> A > X end, Params).
describe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)->
Params =[ {"Action", "DescribeInstances"}],
Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey),
case Res of
{ok, XML} ->
{R,_} = xmerl_scan:string(XML),
[ V#xmlText.value
|| V<- xmerl_xpath:string("/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \""
++ SecurityGroup ++ "\"]/instancesSet/item/privateDnsName/text()", R)];
{error, E} ->
erlang:error ({ describe_instances_failed, E }),
[]
end.
integer_to_hex(I) ->
case catch erlang:integer_to_list(I, 16) of
{'EXIT', _} ->
old_integer_to_hex(I);
Int ->
Int
end.
old_integer_to_hex(I) when I<10 ->
integer_to_list(I);
old_integer_to_hex(I) when I<16 ->
[I-10+$A];
old_integer_to_hex(I) when I>=16 ->
N = trunc(I/16),
old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16).
url_encode([H|T]) ->
if
H >= $a, $z >= H ->
[H|url_encode(T)];
H >= $A, $Z >= H ->
[H|url_encode(T)];
H >= $0, $9 >= H ->
[H|url_encode(T)];
[H|url_encode(T)];
true ->
case integer_to_hex(H) of
[X, Y] ->
[X] ->
, $ 0 , X | url_encode(T ) ]
end
end;
url_encode([]) ->
[]. |
bc49153ea403dca985ea28d8ecf0e5ce1fa3402b9e79b81628f06680e6d03f53 | gentoo-haskell/hackport | Host.hs | module Portage.Host
( getInfo -- :: IO [(String, String)]
, LocalInfo(..)
) where
import Util (run_cmd)
import qualified Data.List.Split as DLS
import Data.Maybe (fromJust, isJust, mapMaybe)
import qualified System.Directory as D
import System.FilePath ((</>))
import System.IO
data LocalInfo =
LocalInfo { distfiles_dir :: String
, overlay_list :: [FilePath]
, portage_dir :: FilePath
} deriving (Read, Show)
defaultInfo :: LocalInfo
defaultInfo = LocalInfo { distfiles_dir = "/usr/portage/distfiles"
, overlay_list = []
, portage_dir = "/usr/portage"
}
query and then emerge
getInfo :: IO LocalInfo
getInfo = fromJust `fmap`
performMaybes [ readConfig
, performMaybes [ getPaludisInfo
, askPortageq
, return (Just defaultInfo)
] >>= showAnnoyingWarning
]
where performMaybes [] = return Nothing
performMaybes (act:acts) =
do r <- act
if isJust r
then return r
else performMaybes acts
showAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo)
showAnnoyingWarning info = do
hPutStr stderr $ unlines [ "-- Consider creating ~/" ++ hackport_config ++ " file with contents:"
, show info
, "-- It will speed hackport startup time a bit."
]
return info
-- relative to home dir
hackport_config :: FilePath
hackport_config = ".hackport" </> "repositories"
--------------------------
-- fastest: config reading
--------------------------
readConfig :: IO (Maybe LocalInfo)
readConfig =
do home_dir <- D.getHomeDirectory
let config_path = home_dir </> hackport_config
exists <- D.doesFileExist config_path
if exists then read <$> readFile config_path else return Nothing
----------
-- Paludis
----------
getPaludisInfo :: IO (Maybe LocalInfo)
getPaludisInfo = fmap parsePaludisInfo <$> run_cmd "cave info"
parsePaludisInfo :: String -> LocalInfo
parsePaludisInfo text =
let chunks = DLS.splitOn [""] . lines $ text
repositories = mapMaybe parseRepository chunks
in fromJust (mkLocalInfo repositories)
where
parseRepository :: [String] -> Maybe (String, (String, String))
parseRepository [] = Nothing
parseRepository (firstLine:lns) = do
name <- case words firstLine of
["Repository", nm] -> return (init nm)
_ -> fail "not a repository chunk"
let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ]
location <- lookup "location" dict
distfiles <- lookup "distdir" dict
return (name, (location, distfiles))
mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo
mkLocalInfo repos = do
(gentooLocation, gentooDistfiles) <- lookup "gentoo" repos
let overlays = [ loc | (_, (loc, _dist)) <- repos ]
return (LocalInfo
{ distfiles_dir = gentooDistfiles
, portage_dir = gentooLocation
, overlay_list = overlays
})
---------
-- Emerge
---------
askPortageq :: IO (Maybe LocalInfo)
askPortageq = do
distdir <- run_cmd "portageq distdir"
portdir <- run_cmd "portageq get_repo_path / gentoo"
hsRepo <- run_cmd "portageq get_repo_path / haskell"
There really ought to be both distdir and ,
--but maybe no hsRepo defined yet.
let info = if Nothing `elem` [distdir,portdir]
then Nothing
else Just LocalInfo
{ distfiles_dir = grab distdir
, portage_dir = grab portdir
, overlay_list = iffy hsRepo
}
--init: kill newline char
where grab = init . fromJust
iffy Nothing = []
iffy (Just repo) = [init repo]
return info
| null | https://raw.githubusercontent.com/gentoo-haskell/hackport/61baf96390e7ddc071f9a49fc78919683988c0ca/src/Portage/Host.hs | haskell | :: IO [(String, String)]
relative to home dir
------------------------
fastest: config reading
------------------------
--------
Paludis
--------
-------
Emerge
-------
but maybe no hsRepo defined yet.
init: kill newline char | module Portage.Host
, LocalInfo(..)
) where
import Util (run_cmd)
import qualified Data.List.Split as DLS
import Data.Maybe (fromJust, isJust, mapMaybe)
import qualified System.Directory as D
import System.FilePath ((</>))
import System.IO
data LocalInfo =
LocalInfo { distfiles_dir :: String
, overlay_list :: [FilePath]
, portage_dir :: FilePath
} deriving (Read, Show)
defaultInfo :: LocalInfo
defaultInfo = LocalInfo { distfiles_dir = "/usr/portage/distfiles"
, overlay_list = []
, portage_dir = "/usr/portage"
}
query and then emerge
getInfo :: IO LocalInfo
getInfo = fromJust `fmap`
performMaybes [ readConfig
, performMaybes [ getPaludisInfo
, askPortageq
, return (Just defaultInfo)
] >>= showAnnoyingWarning
]
where performMaybes [] = return Nothing
performMaybes (act:acts) =
do r <- act
if isJust r
then return r
else performMaybes acts
showAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo)
showAnnoyingWarning info = do
hPutStr stderr $ unlines [ "-- Consider creating ~/" ++ hackport_config ++ " file with contents:"
, show info
, "-- It will speed hackport startup time a bit."
]
return info
hackport_config :: FilePath
hackport_config = ".hackport" </> "repositories"
readConfig :: IO (Maybe LocalInfo)
readConfig =
do home_dir <- D.getHomeDirectory
let config_path = home_dir </> hackport_config
exists <- D.doesFileExist config_path
if exists then read <$> readFile config_path else return Nothing
getPaludisInfo :: IO (Maybe LocalInfo)
getPaludisInfo = fmap parsePaludisInfo <$> run_cmd "cave info"
parsePaludisInfo :: String -> LocalInfo
parsePaludisInfo text =
let chunks = DLS.splitOn [""] . lines $ text
repositories = mapMaybe parseRepository chunks
in fromJust (mkLocalInfo repositories)
where
parseRepository :: [String] -> Maybe (String, (String, String))
parseRepository [] = Nothing
parseRepository (firstLine:lns) = do
name <- case words firstLine of
["Repository", nm] -> return (init nm)
_ -> fail "not a repository chunk"
let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ]
location <- lookup "location" dict
distfiles <- lookup "distdir" dict
return (name, (location, distfiles))
mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo
mkLocalInfo repos = do
(gentooLocation, gentooDistfiles) <- lookup "gentoo" repos
let overlays = [ loc | (_, (loc, _dist)) <- repos ]
return (LocalInfo
{ distfiles_dir = gentooDistfiles
, portage_dir = gentooLocation
, overlay_list = overlays
})
askPortageq :: IO (Maybe LocalInfo)
askPortageq = do
distdir <- run_cmd "portageq distdir"
portdir <- run_cmd "portageq get_repo_path / gentoo"
hsRepo <- run_cmd "portageq get_repo_path / haskell"
There really ought to be both distdir and ,
let info = if Nothing `elem` [distdir,portdir]
then Nothing
else Just LocalInfo
{ distfiles_dir = grab distdir
, portage_dir = grab portdir
, overlay_list = iffy hsRepo
}
where grab = init . fromJust
iffy Nothing = []
iffy (Just repo) = [init repo]
return info
|
7e45d3afedd144feee769d4d2d1e918df0da9d7a490075a9bf08f7bd1b16e4dc | ghcjs/jsaddle-dom | ConvolverNode.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.ConvolverNode
(setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked,
setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
setBuffer ::
(MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m ()
setBuffer self val = liftDOM (self ^. jss "buffer" (toJSVal val))
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer)
getBuffer self = liftDOM ((self ^. js "buffer") >>= fromJSVal)
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBufferUnsafe ::
(MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer
getBufferUnsafe self
= liftDOM
(((self ^. js "buffer") >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBufferUnchecked ::
(MonadDOM m) => ConvolverNode -> m AudioBuffer
getBufferUnchecked self
= liftDOM ((self ^. js "buffer") >>= fromJSValUnchecked)
| < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >
setNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m ()
setNormalize self val
= liftDOM (self ^. jss "normalize" (toJSVal val))
| < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >
getNormalize :: (MonadDOM m) => ConvolverNode -> m Bool
getNormalize self
= liftDOM ((self ^. js "normalize") >>= valToBool)
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/ConvolverNode.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.ConvolverNode
(setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked,
setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
setBuffer ::
(MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m ()
setBuffer self val = liftDOM (self ^. jss "buffer" (toJSVal val))
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer)
getBuffer self = liftDOM ((self ^. js "buffer") >>= fromJSVal)
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBufferUnsafe ::
(MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer
getBufferUnsafe self
= liftDOM
(((self ^. js "buffer") >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >
getBufferUnchecked ::
(MonadDOM m) => ConvolverNode -> m AudioBuffer
getBufferUnchecked self
= liftDOM ((self ^. js "buffer") >>= fromJSValUnchecked)
| < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >
setNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m ()
setNormalize self val
= liftDOM (self ^. jss "normalize" (toJSVal val))
| < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >
getNormalize :: (MonadDOM m) => ConvolverNode -> m Bool
getNormalize self
= liftDOM ((self ^. js "normalize") >>= valToBool)
|
238edc5ab7a83ebe55c5f21ab46c7d5b18764a38ae8a2a9fc3e37b27511d2e31 | realark/vert | game-scene.lisp | (in-package :recurse.vert)
@export-class
(defclass game-scene (scene gl-pipeline)
((scene-background :initform nil
:initarg :background
:type scene-background
:accessor scene-background)
(scene-audio-state :initform nil
:documentation "Used to resume audio-state when the scene deactivates.")
(scene-music :initarg :music
:initform nil
:accessor scene-music
:documentation "Music which will play when the scene initializes.")
(width :initarg :width
:initform (error ":width required")
:reader width)
(height :initarg :height
:initform (error ":height required")
:reader height)
(live-object-radius
:initarg :live-object-radius
:initform #.(* 15 16)
:documentation "Make a rect centered on camera.
The value of this slot will be the distance between the live area and camera rect.
When camera moves outside this rect, rebuild objects to render and update
This is an optimization so we don't have to rebuild the render and update queues every frame.")
(render-queue :initform (make-instance 'render-queue
:render-priority -1)
:documentation "Render scene objects and backgrounds.")
(updatable-objects :initform (make-array 100
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil))
(updating-p :initform nil :reader updating-p)
(pending-adds :initform (make-array 10
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil)
:documentation "Objects to be added to scene at the start of the next frame.")
(pending-removes :initform (make-array 10
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil)
:documentation "Objects to be removed from scene at the start of the next frame.")
(live-object-rebuild-camera-position
:initform (vector2)
:documentation "Centered camera position used to compute render-queue rebuilds.")
(reset-instance-renderers
:initform (make-array 5
:adjustable t
:fill-pointer 0)
:documentation "Sequence of instance renderers which have been reset in the current frame.")
(spatial-partition :initform nil
:documentation "Optimized spatial partition containing every object in the scene."
:reader spatial-partition))
(:documentation "A scene which updates and renders game-objects."))
(defmethod initialize-instance :after ((game-scene game-scene) &rest args)
(declare (ignore args))
(with-slots (spatial-partition render-queue) game-scene
(gl-pipeline-add game-scene render-queue)
(setf spatial-partition
(make-instance 'quadtree))))
@export
(defgeneric add-to-scene (scene object)
(:documentation "Add an object to the game scene")
(:method ((scene scene) (overlay overlay))
(with-slots (scene-overlays render-queue) scene
(unless (find overlay scene-overlays)
(vector-push-extend overlay scene-overlays)
overlay)))
(:method ((scene game-scene) (overlay overlay))
(with-slots (scene-overlays render-queue) scene
(unless (find overlay scene-overlays)
(vector-push-extend overlay scene-overlays)
;; (render-queue-add render-queue overlay)
overlay)))
(:method ((scene game-scene) (object game-object))
(if (updating-p scene)
(with-slots (pending-adds pending-removes) scene
(if (in-scene-p scene object)
(when (find object pending-removes)
(log:debug "cancel ~A for scene remove" object)
(setf pending-removes (delete object pending-removes))
object)
(unless (find object pending-adds)
(log:debug "queuing ~A for scene add" object)
(vector-push-extend object pending-adds)
object)))
;; fast path for adding objects outside of scene update (i.e. initialization)
(%%add-object-to-scene scene object))))
@export
(defgeneric remove-from-scene (scene object)
(:documentation "Remove an object from the game scene")
(:method ((scene scene) (overlay overlay))
(with-slots (scene-overlays) scene
(when (find overlay scene-overlays)
(setf scene-overlays (delete overlay scene-overlays))
overlay)))
(:method ((scene game-scene) (overlay overlay))
(with-slots (scene-overlays) scene
(when (find overlay scene-overlays)
(setf scene-overlays (delete overlay scene-overlays))
;; (render-queue-remove (slot-value scene 'render-queue) overlay)
overlay)))
(:method ((scene game-scene) (object game-object))
(with-slots (pending-adds pending-removes) scene
(if (in-scene-p scene object)
(unless (find object pending-removes)
(log:debug "queuing ~A for scene removal" object)
(vector-push-extend object pending-removes)
(unless (updating-p scene)
(%run-pending-removes scene))
object)
(when (find object pending-adds)
(log:debug "cancel ~A for scene add" object)
(setf pending-adds (delete object pending-adds))
object)))))
(defmethod scene-activated ((scene game-scene))
(with-slots ((state scene-audio-state)) scene
(if state
(audio-player-load-state *audio* state)
(audio-player-stop-all *audio*))))
(defmethod scene-deactivated ((scene game-scene))
(with-slots ((state scene-audio-state)) scene
(with-sdl-mixer-lock-held
(unless state
(setf state (audio-player-copy-state *audio*)))
(audio-player-copy-state *audio* state)
(audio-player-stop-music *audio*)
(audio-player-stop-sfx *audio*)))
(values))
@export
(defun scene-teleport-object (scene object &optional new-x new-y new-z)
"Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation."
(when new-x
(setf (x object) new-x))
(when new-y
(setf (y object) new-y))
(when new-z
(setf (z object) new-z))
(recycle object)
(when (%in-live-object-area-p scene object)
(with-slots (render-queue updatable-objects) scene
(render-queue-add render-queue object)
(unless (find object updatable-objects :test #'eq)
(vector-push-extend object updatable-objects))))
object)
(defgeneric found-object-to-update (game-scene game-object)
(:documentation "for subclasses to hook object updates")
(:method ((scene game-scene) game-object)))
(defun %%add-object-to-scene (scene object)
(declare (optimize (speed 3))
(game-scene scene)
(game-object object))
(with-slots (spatial-partition render-queue updatable-objects) scene
(when (start-tracking spatial-partition object)
(event-subscribe object scene killed)
(when (%in-live-object-area-p scene object)
(render-queue-add render-queue object)
(unless (find object updatable-objects :test #'eq)
(vector-push-extend object updatable-objects)))
object)))
(defun %run-pending-removes (scene)
(declare (optimize (speed 3))
(game-scene scene))
(with-slots (pending-removes spatial-partition render-queue updatable-objects) scene
(declare (vector pending-removes updatable-objects))
(when (> (length pending-removes) 0)
(loop :for removed-object :across pending-removes :do
(event-unsubscribe removed-object scene killed)
(stop-tracking spatial-partition removed-object)
(when (%in-live-object-area-p scene removed-object)
(render-queue-remove render-queue removed-object)
(setf updatable-objects (delete removed-object updatable-objects)))
(log:debug "removed ~A from scene" removed-object)
:finally
(setf (fill-pointer pending-removes) 0))))
(values))
(defun %run-pending-adds (scene)
(declare (optimize (speed 3))
(game-scene scene))
(with-slots (pending-adds spatial-partition render-queue updatable-objects) scene
(loop :for object :across pending-adds :do
(%%add-object-to-scene scene object)
:finally
(setf (fill-pointer pending-adds) 0))))
(defun %force-rebuild-live-objects (scene)
(log:debug "force live object rebuild.")
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(if (float= 0.0 (x live-object-rebuild-camera-position))
(setf (x live-object-rebuild-camera-position)
(+ (width camera) live-object-radius))
(setf (x live-object-rebuild-camera-position) 0.0))))
(defun %in-live-object-area-p (scene game-object)
"T if OBJECT is inside SCENE's current live object area."
(declare (optimize (speed 3))
(game-scene scene)
(game-object game-object))
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(let ((live-x-min (- (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-x-max (+ (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-y-min (- (y live-object-rebuild-camera-position)
(height camera)
live-object-radius))
(live-y-max (+ (y live-object-rebuild-camera-position)
(height camera)
live-object-radius)))
(multiple-value-bind (x y z w h) (world-dimensions game-object)
(declare (ignore z)
(single-float x y w h))
(and (or (<= live-x-min x live-x-max)
(<= live-x-min (+ x w) live-x-max)
(and (<= x live-x-min)
(>= (+ x w) live-x-max)))
(or (<= live-y-min y live-y-max)
(<= live-y-min (+ y h) live-y-max)
(and (<= y live-y-min)
(>= (+ y h) live-y-max))))))))
(defun %rebuild-live-object-area-p (scene)
(declare (optimize (speed 3))
(game-scene scene))
(block camera-moved-outside-render-area-p
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera
(declare (single-float c-x c-y c-w c-h))
(let* ((camera-centered-x (+ c-x (/ c-w 2.0)))
(camera-centered-y (+ c-y (/ c-h 2.0)))
(delta (max
(abs (- camera-centered-x (x live-object-rebuild-camera-position)))
(abs (- camera-centered-y (y live-object-rebuild-camera-position))))))
(when (>= delta live-object-radius)
(setf (x live-object-rebuild-camera-position) camera-centered-x
(y live-object-rebuild-camera-position) camera-centered-y)
t))))))
(defmethod update :around ((scene game-scene))
(with-slots (updating-p) scene
(setf updating-p t)
(unwind-protect
(call-next-method scene)
(setf updating-p nil))))
(defmethod update ((game-scene game-scene))
(declare (optimize (speed 3)))
(with-slots (live-object-rebuild-camera-position
live-object-radius
updatable-objects
(queue render-queue)
reset-instance-renderers
(bg scene-background)
scene-overlays
pending-removes
camera)
game-scene
(let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene)))
(%run-pending-removes game-scene)
(%run-pending-adds game-scene)
(when rebuild-live-objects-p
(setf (fill-pointer updatable-objects) 0)
(render-queue-reset queue)
(setf (fill-pointer reset-instance-renderers) 0))
;; pre-update frame to mark positions
(pre-update (camera game-scene))
(when bg
(pre-update bg)
(when rebuild-live-objects-p
(render-queue-add queue bg)))
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(pre-update overlay))
;; call super
(call-next-method game-scene)
;; update frame
(when rebuild-live-objects-p
(let ((num-objects-to-update 0)
(num-objects-to-render 0)
(live-x-min (- (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-x-max (+ (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-y-min (- (y live-object-rebuild-camera-position)
(height camera)
live-object-radius))
(live-y-max (+ (y live-object-rebuild-camera-position)
(height camera)
live-object-radius)))
(declare (fixnum num-objects-to-render num-objects-to-update)
(single-float live-x-min live-x-max live-y-min live-y-max))
(log:debug "rebuilding live-objects")
(do-spatial-partition (game-object
(spatial-partition game-scene)
:static-iteration-p t
:min-x live-x-min :max-x live-x-max
:min-y live-y-min :max-y live-y-max)
(block found-object-to-render
;; TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice
;; to fix this, the render queue should return different values if obj is already queued
(block check-if-instance-rendered
(if (typep game-object 'instance-rendered-drawable)
(with-slots ((instance-renderer instance-renderer)) game-object
(unless (find instance-renderer reset-instance-renderers)
(incf num-objects-to-render)
(vector-push-extend instance-renderer reset-instance-renderers)
(instance-renderer-reset instance-renderer game-scene)))
(incf num-objects-to-render)))
(render-queue-add queue game-object))
(block check-add-to-updatable-objects
(when (and (not (typep game-object 'static-object))
(not (find game-object updatable-objects :test #'eq)))
(incf num-objects-to-update)
(vector-push-extend game-object updatable-objects))))
(log:debug "Rebuild complete. Found ~A objects to render and ~A objects to update"
num-objects-to-render
num-objects-to-update)))
(update (camera game-scene))
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(update overlay)
#+nil
(when rebuild-live-objects-p
(render-queue-add render-queue overlay)))
(when rebuild-live-objects-p
(render-queue-add queue camera))
(when bg
(update bg))
(loop :for game-object :across updatable-objects :do
(pre-update game-object)
(found-object-to-update game-scene game-object)
(update game-object))
(values))))
(defmethod render ((scene game-scene) update-percent camera gl-context)
HACK scene transitions get messed up bc rendering occurs before setup stuff is done
(prog1 (call-next-method scene update-percent camera gl-context)
(with-slots (scene-overlays) scene
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(render overlay update-percent (camera scene) gl-context))))))
(defevent-handler killed ((object obb) (game-scene game-scene))
""
(remove-from-scene game-scene object))
;; TODO: remove this fn and use scheduler util directly
@export
(defun schedule (game-scene timestamp zero-arg-fn)
"When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked."
(scheduler-add game-scene timestamp zero-arg-fn)
(values))
@export
(defun get-object-by-id (scene id)
"Return the (presumably) unique game-object identified by ID in SCENE."
(declare (game-scene scene))
(block find-object
(do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t)
(when (equalp (object-id game-object) id)
(return-from find-object game-object)))))
@export
(defun in-scene-p (scene object)
"Return OBJECT if OBJECT is in SCENE, nil otherwise."
(declare (optimize (speed 3))
(game-scene scene))
(block find-object
(do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t)
(when (eq obj object)
(return-from find-object object)))))
| null | https://raw.githubusercontent.com/realark/vert/6b1938be9084224cf9ce1cfcb71f787f0ac14655/src/scene/game-scene.lisp | lisp | (render-queue-add render-queue overlay)
fast path for adding objects outside of scene update (i.e. initialization)
(render-queue-remove (slot-value scene 'render-queue) overlay)
pre-update frame to mark positions
call super
update frame
TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice
to fix this, the render queue should return different values if obj is already queued
TODO: remove this fn and use scheduler util directly | (in-package :recurse.vert)
@export-class
(defclass game-scene (scene gl-pipeline)
((scene-background :initform nil
:initarg :background
:type scene-background
:accessor scene-background)
(scene-audio-state :initform nil
:documentation "Used to resume audio-state when the scene deactivates.")
(scene-music :initarg :music
:initform nil
:accessor scene-music
:documentation "Music which will play when the scene initializes.")
(width :initarg :width
:initform (error ":width required")
:reader width)
(height :initarg :height
:initform (error ":height required")
:reader height)
(live-object-radius
:initarg :live-object-radius
:initform #.(* 15 16)
:documentation "Make a rect centered on camera.
The value of this slot will be the distance between the live area and camera rect.
When camera moves outside this rect, rebuild objects to render and update
This is an optimization so we don't have to rebuild the render and update queues every frame.")
(render-queue :initform (make-instance 'render-queue
:render-priority -1)
:documentation "Render scene objects and backgrounds.")
(updatable-objects :initform (make-array 100
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil))
(updating-p :initform nil :reader updating-p)
(pending-adds :initform (make-array 10
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil)
:documentation "Objects to be added to scene at the start of the next frame.")
(pending-removes :initform (make-array 10
:adjustable t
:fill-pointer 0
:element-type '(or null game-object)
:initial-element nil)
:documentation "Objects to be removed from scene at the start of the next frame.")
(live-object-rebuild-camera-position
:initform (vector2)
:documentation "Centered camera position used to compute render-queue rebuilds.")
(reset-instance-renderers
:initform (make-array 5
:adjustable t
:fill-pointer 0)
:documentation "Sequence of instance renderers which have been reset in the current frame.")
(spatial-partition :initform nil
:documentation "Optimized spatial partition containing every object in the scene."
:reader spatial-partition))
(:documentation "A scene which updates and renders game-objects."))
(defmethod initialize-instance :after ((game-scene game-scene) &rest args)
(declare (ignore args))
(with-slots (spatial-partition render-queue) game-scene
(gl-pipeline-add game-scene render-queue)
(setf spatial-partition
(make-instance 'quadtree))))
@export
(defgeneric add-to-scene (scene object)
(:documentation "Add an object to the game scene")
(:method ((scene scene) (overlay overlay))
(with-slots (scene-overlays render-queue) scene
(unless (find overlay scene-overlays)
(vector-push-extend overlay scene-overlays)
overlay)))
(:method ((scene game-scene) (overlay overlay))
(with-slots (scene-overlays render-queue) scene
(unless (find overlay scene-overlays)
(vector-push-extend overlay scene-overlays)
overlay)))
(:method ((scene game-scene) (object game-object))
(if (updating-p scene)
(with-slots (pending-adds pending-removes) scene
(if (in-scene-p scene object)
(when (find object pending-removes)
(log:debug "cancel ~A for scene remove" object)
(setf pending-removes (delete object pending-removes))
object)
(unless (find object pending-adds)
(log:debug "queuing ~A for scene add" object)
(vector-push-extend object pending-adds)
object)))
(%%add-object-to-scene scene object))))
@export
(defgeneric remove-from-scene (scene object)
(:documentation "Remove an object from the game scene")
(:method ((scene scene) (overlay overlay))
(with-slots (scene-overlays) scene
(when (find overlay scene-overlays)
(setf scene-overlays (delete overlay scene-overlays))
overlay)))
(:method ((scene game-scene) (overlay overlay))
(with-slots (scene-overlays) scene
(when (find overlay scene-overlays)
(setf scene-overlays (delete overlay scene-overlays))
overlay)))
(:method ((scene game-scene) (object game-object))
(with-slots (pending-adds pending-removes) scene
(if (in-scene-p scene object)
(unless (find object pending-removes)
(log:debug "queuing ~A for scene removal" object)
(vector-push-extend object pending-removes)
(unless (updating-p scene)
(%run-pending-removes scene))
object)
(when (find object pending-adds)
(log:debug "cancel ~A for scene add" object)
(setf pending-adds (delete object pending-adds))
object)))))
(defmethod scene-activated ((scene game-scene))
(with-slots ((state scene-audio-state)) scene
(if state
(audio-player-load-state *audio* state)
(audio-player-stop-all *audio*))))
(defmethod scene-deactivated ((scene game-scene))
(with-slots ((state scene-audio-state)) scene
(with-sdl-mixer-lock-held
(unless state
(setf state (audio-player-copy-state *audio*)))
(audio-player-copy-state *audio* state)
(audio-player-stop-music *audio*)
(audio-player-stop-sfx *audio*)))
(values))
@export
(defun scene-teleport-object (scene object &optional new-x new-y new-z)
"Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation."
(when new-x
(setf (x object) new-x))
(when new-y
(setf (y object) new-y))
(when new-z
(setf (z object) new-z))
(recycle object)
(when (%in-live-object-area-p scene object)
(with-slots (render-queue updatable-objects) scene
(render-queue-add render-queue object)
(unless (find object updatable-objects :test #'eq)
(vector-push-extend object updatable-objects))))
object)
(defgeneric found-object-to-update (game-scene game-object)
(:documentation "for subclasses to hook object updates")
(:method ((scene game-scene) game-object)))
(defun %%add-object-to-scene (scene object)
(declare (optimize (speed 3))
(game-scene scene)
(game-object object))
(with-slots (spatial-partition render-queue updatable-objects) scene
(when (start-tracking spatial-partition object)
(event-subscribe object scene killed)
(when (%in-live-object-area-p scene object)
(render-queue-add render-queue object)
(unless (find object updatable-objects :test #'eq)
(vector-push-extend object updatable-objects)))
object)))
(defun %run-pending-removes (scene)
(declare (optimize (speed 3))
(game-scene scene))
(with-slots (pending-removes spatial-partition render-queue updatable-objects) scene
(declare (vector pending-removes updatable-objects))
(when (> (length pending-removes) 0)
(loop :for removed-object :across pending-removes :do
(event-unsubscribe removed-object scene killed)
(stop-tracking spatial-partition removed-object)
(when (%in-live-object-area-p scene removed-object)
(render-queue-remove render-queue removed-object)
(setf updatable-objects (delete removed-object updatable-objects)))
(log:debug "removed ~A from scene" removed-object)
:finally
(setf (fill-pointer pending-removes) 0))))
(values))
(defun %run-pending-adds (scene)
(declare (optimize (speed 3))
(game-scene scene))
(with-slots (pending-adds spatial-partition render-queue updatable-objects) scene
(loop :for object :across pending-adds :do
(%%add-object-to-scene scene object)
:finally
(setf (fill-pointer pending-adds) 0))))
(defun %force-rebuild-live-objects (scene)
(log:debug "force live object rebuild.")
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(if (float= 0.0 (x live-object-rebuild-camera-position))
(setf (x live-object-rebuild-camera-position)
(+ (width camera) live-object-radius))
(setf (x live-object-rebuild-camera-position) 0.0))))
(defun %in-live-object-area-p (scene game-object)
"T if OBJECT is inside SCENE's current live object area."
(declare (optimize (speed 3))
(game-scene scene)
(game-object game-object))
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(let ((live-x-min (- (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-x-max (+ (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-y-min (- (y live-object-rebuild-camera-position)
(height camera)
live-object-radius))
(live-y-max (+ (y live-object-rebuild-camera-position)
(height camera)
live-object-radius)))
(multiple-value-bind (x y z w h) (world-dimensions game-object)
(declare (ignore z)
(single-float x y w h))
(and (or (<= live-x-min x live-x-max)
(<= live-x-min (+ x w) live-x-max)
(and (<= x live-x-min)
(>= (+ x w) live-x-max)))
(or (<= live-y-min y live-y-max)
(<= live-y-min (+ y h) live-y-max)
(and (<= y live-y-min)
(>= (+ y h) live-y-max))))))))
(defun %rebuild-live-object-area-p (scene)
(declare (optimize (speed 3))
(game-scene scene))
(block camera-moved-outside-render-area-p
(with-slots (camera live-object-radius live-object-rebuild-camera-position) scene
(with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera
(declare (single-float c-x c-y c-w c-h))
(let* ((camera-centered-x (+ c-x (/ c-w 2.0)))
(camera-centered-y (+ c-y (/ c-h 2.0)))
(delta (max
(abs (- camera-centered-x (x live-object-rebuild-camera-position)))
(abs (- camera-centered-y (y live-object-rebuild-camera-position))))))
(when (>= delta live-object-radius)
(setf (x live-object-rebuild-camera-position) camera-centered-x
(y live-object-rebuild-camera-position) camera-centered-y)
t))))))
(defmethod update :around ((scene game-scene))
(with-slots (updating-p) scene
(setf updating-p t)
(unwind-protect
(call-next-method scene)
(setf updating-p nil))))
(defmethod update ((game-scene game-scene))
(declare (optimize (speed 3)))
(with-slots (live-object-rebuild-camera-position
live-object-radius
updatable-objects
(queue render-queue)
reset-instance-renderers
(bg scene-background)
scene-overlays
pending-removes
camera)
game-scene
(let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene)))
(%run-pending-removes game-scene)
(%run-pending-adds game-scene)
(when rebuild-live-objects-p
(setf (fill-pointer updatable-objects) 0)
(render-queue-reset queue)
(setf (fill-pointer reset-instance-renderers) 0))
(pre-update (camera game-scene))
(when bg
(pre-update bg)
(when rebuild-live-objects-p
(render-queue-add queue bg)))
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(pre-update overlay))
(call-next-method game-scene)
(when rebuild-live-objects-p
(let ((num-objects-to-update 0)
(num-objects-to-render 0)
(live-x-min (- (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-x-max (+ (x live-object-rebuild-camera-position)
(width camera)
live-object-radius))
(live-y-min (- (y live-object-rebuild-camera-position)
(height camera)
live-object-radius))
(live-y-max (+ (y live-object-rebuild-camera-position)
(height camera)
live-object-radius)))
(declare (fixnum num-objects-to-render num-objects-to-update)
(single-float live-x-min live-x-max live-y-min live-y-max))
(log:debug "rebuilding live-objects")
(do-spatial-partition (game-object
(spatial-partition game-scene)
:static-iteration-p t
:min-x live-x-min :max-x live-x-max
:min-y live-y-min :max-y live-y-max)
(block found-object-to-render
(block check-if-instance-rendered
(if (typep game-object 'instance-rendered-drawable)
(with-slots ((instance-renderer instance-renderer)) game-object
(unless (find instance-renderer reset-instance-renderers)
(incf num-objects-to-render)
(vector-push-extend instance-renderer reset-instance-renderers)
(instance-renderer-reset instance-renderer game-scene)))
(incf num-objects-to-render)))
(render-queue-add queue game-object))
(block check-add-to-updatable-objects
(when (and (not (typep game-object 'static-object))
(not (find game-object updatable-objects :test #'eq)))
(incf num-objects-to-update)
(vector-push-extend game-object updatable-objects))))
(log:debug "Rebuild complete. Found ~A objects to render and ~A objects to update"
num-objects-to-render
num-objects-to-update)))
(update (camera game-scene))
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(update overlay)
#+nil
(when rebuild-live-objects-p
(render-queue-add render-queue overlay)))
(when rebuild-live-objects-p
(render-queue-add queue camera))
(when bg
(update bg))
(loop :for game-object :across updatable-objects :do
(pre-update game-object)
(found-object-to-update game-scene game-object)
(update game-object))
(values))))
(defmethod render ((scene game-scene) update-percent camera gl-context)
HACK scene transitions get messed up bc rendering occurs before setup stuff is done
(prog1 (call-next-method scene update-percent camera gl-context)
(with-slots (scene-overlays) scene
(loop :for overlay :across (the (vector overlay) scene-overlays) :do
(render overlay update-percent (camera scene) gl-context))))))
(defevent-handler killed ((object obb) (game-scene game-scene))
""
(remove-from-scene game-scene object))
@export
(defun schedule (game-scene timestamp zero-arg-fn)
"When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked."
(scheduler-add game-scene timestamp zero-arg-fn)
(values))
@export
(defun get-object-by-id (scene id)
"Return the (presumably) unique game-object identified by ID in SCENE."
(declare (game-scene scene))
(block find-object
(do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t)
(when (equalp (object-id game-object) id)
(return-from find-object game-object)))))
@export
(defun in-scene-p (scene object)
"Return OBJECT if OBJECT is in SCENE, nil otherwise."
(declare (optimize (speed 3))
(game-scene scene))
(block find-object
(do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t)
(when (eq obj object)
(return-from find-object object)))))
|
16f37d0f61a1dd10973c1d70eae652e9e4e695b83cb8aeb9e0189cb2bb4bd772 | ndmitchell/catch | Blur.hs |
module RegExp.Blur(blur) where
import RegExp.Type
-- the output of blur must be a finite set
-- assuming a finite input
blur :: (Eq a, Show a) => RegExp a -> RegExp a
blur x = f x
where
f (RegKleene x) = regKleene (f x)
f (RegUnion x) = regUnion (map f x)
f (RegConcat x) = regConcat (g x)
f x = x
g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c)
g (RegKleene a : b : c : d : e) | a == b && b == c && c == d =
g (RegKleene a : c : d : e)
g (a:b:c:d) | a == b && b == c =
g (RegKleene a : b : c : d)
g (x:xs) = x : g xs
g [] = []
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/src/RegExp/Blur.hs | haskell | the output of blur must be a finite set
assuming a finite input |
module RegExp.Blur(blur) where
import RegExp.Type
blur :: (Eq a, Show a) => RegExp a -> RegExp a
blur x = f x
where
f (RegKleene x) = regKleene (f x)
f (RegUnion x) = regUnion (map f x)
f (RegConcat x) = regConcat (g x)
f x = x
g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c)
g (RegKleene a : b : c : d : e) | a == b && b == c && c == d =
g (RegKleene a : c : d : e)
g (a:b:c:d) | a == b && b == c =
g (RegKleene a : b : c : d)
g (x:xs) = x : g xs
g [] = []
|
6f38c355955756cad424182fdb05b92e563b0b1feff4286292687e994838d2c9 | ropas/sparrow | instrumentedMem.ml | (***********************************************************************)
(* *)
Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
(* All rights reserved. *)
(* *)
This software is distributed under the term of the BSD license .
(* See the LICENSE file for details. *)
(* *)
(***********************************************************************)
module type S =
sig
include MapDom.CPO
module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t
val init_access : unit -> unit
val return_access : unit -> Access.info
end
module Make (Mem : MapDom.CPO) =
struct
include Mem
module Loc = A
module Val = B
module Access = Access.Make(Mem)
let access = ref Access.Info.empty
let access_mode = ref false
let init_access : unit -> unit
= fun () -> access_mode := true; access := Access.Info.empty; ()
let return_access : unit -> Access.info
= fun () -> access_mode := false; !access
let add k v m =
(if !access_mode then
access := Access.Info.add Access.Info.def k !access);
add k v m
let weak_add k v m =
(if !access_mode then
access := Access.Info.add Access.Info.all k !access);
weak_add k v m
let find : A.t -> t -> B.t
= fun k m ->
(if !access_mode && not (eq m bot) then
access := Access.Info.add Access.Info.use k !access);
find k m
end
| null | https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/instrumentedMem.ml | ocaml | *********************************************************************
All rights reserved.
See the LICENSE file for details.
********************************************************************* | Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
This software is distributed under the term of the BSD license .
module type S =
sig
include MapDom.CPO
module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t
val init_access : unit -> unit
val return_access : unit -> Access.info
end
module Make (Mem : MapDom.CPO) =
struct
include Mem
module Loc = A
module Val = B
module Access = Access.Make(Mem)
let access = ref Access.Info.empty
let access_mode = ref false
let init_access : unit -> unit
= fun () -> access_mode := true; access := Access.Info.empty; ()
let return_access : unit -> Access.info
= fun () -> access_mode := false; !access
let add k v m =
(if !access_mode then
access := Access.Info.add Access.Info.def k !access);
add k v m
let weak_add k v m =
(if !access_mode then
access := Access.Info.add Access.Info.all k !access);
weak_add k v m
let find : A.t -> t -> B.t
= fun k m ->
(if !access_mode && not (eq m bot) then
access := Access.Info.add Access.Info.use k !access);
find k m
end
|
02dd07a645cc9e530baec9c86afa0826065c0687915f626790bb9d08de33a90d | apache/couchdb-mochiweb | mochiweb_websocket_tests.erl | -module(mochiweb_websocket_tests).
-author('').
The MIT License ( MIT )
Copyright ( c ) 2012 Zadane.pl sp .
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
-include_lib("eunit/include/eunit.hrl").
make_handshake_for_correct_client_test() ->
%% Hybi handshake
Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
mochiweb_headers:make([{"Sec-WebSocket-Key",
"Xn3fdKyc3qEXPuj2A3O+ZA=="}])),
{Version1, {HttpCode1, Headers1, _}} =
mochiweb_websocket:make_handshake(Req1),
?assertEqual(hybi, Version1),
?assertEqual(101, HttpCode1),
?assertEqual("Upgrade",
(proplists:get_value("Connection", Headers1))),
?assertEqual(<<"BIFTHkJk4r5t8kuud82tZJaQsCE=">>,
(proplists:get_value("Sec-Websocket-Accept",
Headers1))),
handshake
{Version2, {HttpCode2, Headers2, Body2}} =
mochiweb_websocket:hixie_handshake("ws://", "localhost",
"/", "33j284 9 z63 e 9 7",
"TF'3|6D12659H 7 70",
<<175, 181, 191, 215, 128, 195, 144,
120>>,
"null"),
?assertEqual(hixie, Version2),
?assertEqual(101, HttpCode2),
?assertEqual("null",
(proplists:get_value("Sec-WebSocket-Origin",
Headers2))),
?assertEqual("ws/",
(proplists:get_value("Sec-WebSocket-Location",
Headers2))),
?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244,
150, 134, 167, 221, 103, 239, 246>>,
Body2).
hybi_frames_decode_test() ->
?assertEqual([{1, <<"foo">>}],
(mochiweb_websocket:parse_hybi_frames(nil,
<<129, 131, 118, 21, 153,
58, 16, 122, 246>>,
[]))),
?assertEqual([{1, <<"foo">>}, {1, <<"bar">>}],
(mochiweb_websocket:parse_hybi_frames(nil,
<<129, 131, 1, 225, 201,
42, 103, 142, 166, 129,
131, 93, 222, 214, 66,
63, 191, 164>>,
[]))).
hixie_frames_decode_test() ->
?assertEqual([],
(mochiweb_websocket:parse_hixie_frames(<<>>, []))),
?assertEqual([<<"foo">>],
(mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,
111, 255>>,
[]))),
?assertEqual([<<"foo">>, <<"bar">>],
(mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,
111, 255, 0, 98, 97,
114, 255>>,
[]))).
end_to_end_test_factory(ServerTransport) ->
mochiweb_test_util:with_server(ServerTransport,
fun end_to_end_server/1,
fun (Transport, Port) ->
end_to_end_client(mochiweb_test_util:sock_fun(Transport,
Port))
end).
end_to_end_server(Req) ->
?assertEqual("Upgrade",
(mochiweb_request:get_header_value("connection", Req))),
?assertEqual("websocket",
(mochiweb_request:get_header_value("upgrade", Req))),
{ReentryWs, _ReplyChannel} =
mochiweb_websocket:upgrade_connection(Req,
fun end_to_end_ws_loop/3),
ReentryWs(ok).
end_to_end_ws_loop(Payload, State, ReplyChannel) ->
Echo server
lists:foreach(ReplyChannel, Payload),
State.
end_to_end_client(S) ->
%% Key and Accept per
UpgradeReq = string:join(["GET / HTTP/1.1",
"Host: localhost", "Upgrade: websocket",
"Connection: Upgrade",
"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==", "",
""],
"\r\n"),
ok = S({send, UpgradeReq}),
{ok, {http_response, {1, 1}, 101, _}} = S(recv),
read_expected_headers(S,
[{'Upgrade', "websocket"}, {'Connection', "Upgrade"},
{'Content-Length', "0"},
{"Sec-Websocket-Accept",
"s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}]),
The first message sent over telegraph :)
SmallMessage = <<"What hath God wrought?">>,
ok = S({send,
<<1:1, %% Fin
0:1, %% Rsv1
0:1, %% Rsv2
0:1, %% Rsv3
Opcode , 1 = text frame
1:1, %% Mask on
Length , < 125 case
0:32, %% Mask (trivial)
SmallMessage/binary>>}),
{ok, WsFrames} = S(recv),
<<1:1, %% Fin
0:1, %% Rsv1
0:1, %% Rsv2
0:1, %% Rsv3
1:4, %% Opcode, text frame (all mochiweb supports for now)
MsgSize:8, %% Expecting small size
SmallMessage/binary>> =
WsFrames,
?assertEqual(MsgSize, (byte_size(SmallMessage))),
ok.
read_expected_headers(S, D) ->
Headers = mochiweb_test_util:read_server_headers(S),
lists:foreach(fun ({K, V}) ->
?assertEqual(V,
(mochiweb_headers:get_value(K, Headers)))
end,
D).
end_to_end_http_test() ->
end_to_end_test_factory(plain).
end_to_end_https_test() -> end_to_end_test_factory(ssl).
| null | https://raw.githubusercontent.com/apache/couchdb-mochiweb/fce80ef5e2c105405a39d3cdf4615f21e0d1d734/test/mochiweb_websocket_tests.erl | erlang | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Hybi handshake
Key and Accept per
Fin
Rsv1
Rsv2
Rsv3
Mask on
Mask (trivial)
Fin
Rsv1
Rsv2
Rsv3
Opcode, text frame (all mochiweb supports for now)
Expecting small size | -module(mochiweb_websocket_tests).
-author('').
The MIT License ( MIT )
Copyright ( c ) 2012 Zadane.pl sp .
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-include_lib("eunit/include/eunit.hrl").
make_handshake_for_correct_client_test() ->
Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
mochiweb_headers:make([{"Sec-WebSocket-Key",
"Xn3fdKyc3qEXPuj2A3O+ZA=="}])),
{Version1, {HttpCode1, Headers1, _}} =
mochiweb_websocket:make_handshake(Req1),
?assertEqual(hybi, Version1),
?assertEqual(101, HttpCode1),
?assertEqual("Upgrade",
(proplists:get_value("Connection", Headers1))),
?assertEqual(<<"BIFTHkJk4r5t8kuud82tZJaQsCE=">>,
(proplists:get_value("Sec-Websocket-Accept",
Headers1))),
handshake
{Version2, {HttpCode2, Headers2, Body2}} =
mochiweb_websocket:hixie_handshake("ws://", "localhost",
"/", "33j284 9 z63 e 9 7",
"TF'3|6D12659H 7 70",
<<175, 181, 191, 215, 128, 195, 144,
120>>,
"null"),
?assertEqual(hixie, Version2),
?assertEqual(101, HttpCode2),
?assertEqual("null",
(proplists:get_value("Sec-WebSocket-Origin",
Headers2))),
?assertEqual("ws/",
(proplists:get_value("Sec-WebSocket-Location",
Headers2))),
?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244,
150, 134, 167, 221, 103, 239, 246>>,
Body2).
hybi_frames_decode_test() ->
?assertEqual([{1, <<"foo">>}],
(mochiweb_websocket:parse_hybi_frames(nil,
<<129, 131, 118, 21, 153,
58, 16, 122, 246>>,
[]))),
?assertEqual([{1, <<"foo">>}, {1, <<"bar">>}],
(mochiweb_websocket:parse_hybi_frames(nil,
<<129, 131, 1, 225, 201,
42, 103, 142, 166, 129,
131, 93, 222, 214, 66,
63, 191, 164>>,
[]))).
hixie_frames_decode_test() ->
?assertEqual([],
(mochiweb_websocket:parse_hixie_frames(<<>>, []))),
?assertEqual([<<"foo">>],
(mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,
111, 255>>,
[]))),
?assertEqual([<<"foo">>, <<"bar">>],
(mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,
111, 255, 0, 98, 97,
114, 255>>,
[]))).
end_to_end_test_factory(ServerTransport) ->
mochiweb_test_util:with_server(ServerTransport,
fun end_to_end_server/1,
fun (Transport, Port) ->
end_to_end_client(mochiweb_test_util:sock_fun(Transport,
Port))
end).
end_to_end_server(Req) ->
?assertEqual("Upgrade",
(mochiweb_request:get_header_value("connection", Req))),
?assertEqual("websocket",
(mochiweb_request:get_header_value("upgrade", Req))),
{ReentryWs, _ReplyChannel} =
mochiweb_websocket:upgrade_connection(Req,
fun end_to_end_ws_loop/3),
ReentryWs(ok).
end_to_end_ws_loop(Payload, State, ReplyChannel) ->
Echo server
lists:foreach(ReplyChannel, Payload),
State.
end_to_end_client(S) ->
UpgradeReq = string:join(["GET / HTTP/1.1",
"Host: localhost", "Upgrade: websocket",
"Connection: Upgrade",
"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==", "",
""],
"\r\n"),
ok = S({send, UpgradeReq}),
{ok, {http_response, {1, 1}, 101, _}} = S(recv),
read_expected_headers(S,
[{'Upgrade', "websocket"}, {'Connection', "Upgrade"},
{'Content-Length', "0"},
{"Sec-Websocket-Accept",
"s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}]),
The first message sent over telegraph :)
SmallMessage = <<"What hath God wrought?">>,
ok = S({send,
Opcode , 1 = text frame
Length , < 125 case
SmallMessage/binary>>}),
{ok, WsFrames} = S(recv),
SmallMessage/binary>> =
WsFrames,
?assertEqual(MsgSize, (byte_size(SmallMessage))),
ok.
read_expected_headers(S, D) ->
Headers = mochiweb_test_util:read_server_headers(S),
lists:foreach(fun ({K, V}) ->
?assertEqual(V,
(mochiweb_headers:get_value(K, Headers)))
end,
D).
end_to_end_http_test() ->
end_to_end_test_factory(plain).
end_to_end_https_test() -> end_to_end_test_factory(ssl).
|
ad0c2e6c0fa094fe033c197f80346df8bac985220b3467aa53920dafb8550612 | naoto-ogawa/h-xproto-mysql | NodeSession.hs | |
module : Database . MySQLX.NodeSession
description : Session management
copyright : ( c ) , 2017
license : MIT
maintainer :
stability : experimental
portability :
Session ( a.k.a . Connection )
module : Database.MySQLX.NodeSession
description : Session management
copyright : (c) naoto ogawa, 2017
license : MIT
maintainer :
stability : experimental
portability :
Session (a.k.a. Connection)
-}
# LANGUAGE RecordWildCards #
module DataBase.MySQLX.NodeSession
(
-- * Message
Message
-- * Session Infomation
, NodeSessionInfo(..)
, defaultNodeSesssionInfo
-- * Node Session
, NodeSession(clientId, auth_data)
-- * Session Management
, openNodeSession
, closeNodeSession
-- * Transaction
, begenTrxNodeSession
, commitNodeSession
, rollbackNodeSession
-- * Expectation
, sendExpectNoError
, sendExpectUnset
, sendExpectClose
--
, readMessagesR
, writeMessageR
, repeatreadMessagesR
-- * Helper functions
, isSocketConnected
-- * Internal Use Only
, readMsgLengthR
, readAllMsgR
) where
-- general, standard library
import qualified Data.Binary as BIN
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Int as I
import Data.Typeable (TypeRep, Typeable, typeRep, typeOf)
import qualified Data.Word as W
import Network.Socket hiding (recv)
import Network.Socket.ByteString (send, sendAll, recv)
import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
-- protocol buffer library
import qualified Text.ProtocolBuffers as PB
import qualified Text.ProtocolBuffers.Basic as PBB
import qualified Text.ProtocolBuffers.Header as PBH
import qualified Text.ProtocolBuffers.TextMessage as PBT
import qualified Text.ProtocolBuffers.WireMessage as PBW
import qualified Text.ProtocolBuffers.Reflections as PBR
-- generated library
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk
-- my library
import DataBase.MySQLX.Exception
import DataBase.MySQLX.Model
import DataBase.MySQLX.Util
-- -----------------------------------------------------------------------------
--
-- -----------------------------------------------------------------------------
-- | Node Session Object
data NodeSession = NodeSession
{ _socket :: Socket -- ^ socket
, clientId :: W.Word64 -- ^ client id given by MySQL Server
^ auth_data given by MySQL Server
} deriving Show
-- | Infomation Object of Node Session
data NodeSessionInfo = NodeSessionInfo
{ host :: HostName -- ^ host name
, port :: PortNumber -- ^ port nummber
, database :: String -- ^ database name
, user :: String -- ^ user
, password :: String -- ^ password
, charset :: String -- ^ charset
} deriving Show
-- | Default NodeSessionInfo
--
* host : 127.0.0.1
* port : 33600
-- * database : ""
-- * user : "root"
-- * password : ""
-- * charset : ""
--
defaultNodeSesssionInfo :: NodeSessionInfo
defaultNodeSesssionInfo = NodeSessionInfo "127.0.0.1" 33060 "" "root" "" ""
-- | a message (type, payload)
type Message = (Int, B.ByteString)
-- -----------------------------------------------------------------------------
-- Session Management
-- -----------------------------------------------------------------------------
-- | Open node session.
openNodeSession :: (MonadIO m, MonadThrow m)
=> NodeSessionInfo -- ^ NodeSessionInfo
-> m NodeSession -- ^ NodeSession
openNodeSession sessionInfo = do
socket <- _client (host sessionInfo) (port sessionInfo)
let session = NodeSession socket (fromIntegral 0) BL.empty
x <- runReaderT _negociate session
(t, msg):xs <- runReaderT (_auth sessionInfo) session
case t of
TODO
-- debug "success"
frm <- getFrame msg
case PFr.payload frm of
Just x -> do
changed <- getSessionStateChanged $ BL.toStrict x
-- debug changed
ok <- mkAuthenticateOk $ snd $ head xs
-- debug ok
id <- getClientId changed
-- debug $ "NodeSession is opend; clientId =" ++ (show id)
return session {clientId = id}
Nothing -> throwM $ XProtocolException "Payload is Nothing"
TODO
err <- getError msg
throwM $ XProtocolError err
_ -> error $ "message type unknown, =" ++ show t
-- | Close node session.
closeNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m ()
closeNodeSession nodeSess = do
runReaderT (sendClose >> recieveOk) nodeSess
liftIO . close $ _socket nodeSess
debug " NodeSession is closed . "
return ()
-- | Make a socket for session.
_client :: (MonadIO m) => HostName -> PortNumber -> m Socket
_client host port = liftIO $ withSocketsDo $ do
addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port)
let serverAddr = head addrInfo
sock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect sock (addrAddress serverAddr)
return sock
_auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message]
_auth NodeSessionInfo{..} = do
sendAuthenticateStart user
salt <- recieveSalt
sendAutenticateContinue database user password salt
msgs <- readMessagesR
return msgs
sendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m ()
sendCapabilitiesGet = writeMessageR mkCapabilitiesGet
_negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message]
_negociate = do
sendCapabilitiesGet
ret@(x:xs) <- readMessagesR
if fst x == s_error
then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
return ret
sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m ()
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
sendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m ()
sendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password
-- | Send Close message to the server.
sendClose :: (MonadIO m) => ReaderT NodeSession m ()
sendClose = writeMessageR mkClose
-- | Retreive a salt given by the server.
recieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString
recieveSalt = do
msg <- getAuthenticateContinueR
return $ BL.toStrict $ PAC.auth_data msg
recieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok
recieveOk = getOkR
-- | Send NoError expectation message to the server.
sendExpectNoError :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectNoError = writeMessageR mkExpectNoError
-- | Send Unset expectation message to the server.
sendExpectUnset :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectUnset = writeMessageR mkExpectUnset
interfaces as follows :
openNodeSession = do
sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( )
salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString
sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( )
reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( )
interfaces as follows:
openNodeSession = do
sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> ()
salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString
sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> ()
reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> ()
-}
-- {- [C]->[S] -} -- putMsg sock $ getAuthMsg "root"
--
-- {- [S]->[C] -}
-- x <- parse2AuthenticateContinue sock
let salt = S.toStrict $ PAC.auth_data x
-- print salt
--
-- {- [C]->[S] -}
-- putMsg sock $ getAutCont "world_x" "root" salt (B8.pack "root")
--
-- {- [S]->[C] -}
-- frame <- parse2Frame sock
-- getSessionStateChanged frame
-- parse2AuthenticateOK sock
--
Using NodeSession and making ReaderT
--
writeMessage :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m ) => NodeSession -> msg -> m ()
writeMessage NodeSession{..} msg = do
liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)
-- liftIO $ putStrLn $ PBT.messagePutText msg
where
bytes = PBW.messagePut msg
len = fromIntegral $ PBW.messageSize msg
ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg
sendExpectClose :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectClose = do
nodeSess <- ask
liftIO $ writeExpectClose nodeSess
writeExpectClose NodeSession{..} = do
liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)
where
bytes = PBW.messagePut mkClose
len = fromIntegral 0
ty = putMessageType $ fromIntegral 25
-- | write a message.
writeMessageR :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m ) => msg -> ReaderT NodeSession m ()
writeMessageR msg = do
session <- ask
liftIO $ writeMessage session msg
getErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error
getErrorR = readOneMessageR >>= \(_, msg) -> getError msg
getFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame
getFrameR = readOneMessageR >>= \(_, msg) -> getFrame msg
getAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue
getAuthenticateContinueR = readOneMessageR >>= \(_, msg) -> getAuthenticateContinue msg
getOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok
getOkR = readOneMessageR >>= \(_, msg) -> getOk msg
getOneMessageR :: (MonadIO m
,MonadThrow m
,PBW.Wire a
,PBR.ReflectDescriptor a
,PBT.TextMsg a
,Typeable a) => ReaderT NodeSession m a
getOneMessageR = do
session <- ask
(_, msg) <- liftIO $ readOneMessage session
getMessage msg
readMessages :: (MonadIO m) => NodeSession -> m [Message]
readMessages NodeSession{..} = do
len <- runReaderT readMsgLengthR _socket
debug $ " 1st length = " + + ( show $ getIntFromLE len )
ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket
return ret
readMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message])
readMessagesEither NodeSession{..} = do
len <- runReaderT readMsgLengthR _socket
debug $ " 1st length = " + + ( show $ getIntFromLE len )
ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket
if hasError ret
then return $ Left ret -- Error
Success
where hasError r = length (filterError r) >= 1
filterError xs = filter (\(t,m) -> t == s_error) xs
-- | retrieve messages from Node session.
readMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message]
readMessagesR = ask >>= liftIO . readMessages
-- | retrieve messages from Node session.
repeatreadMessagesR :: (MonadIO m)
=> Bool -- ^ True : Expectation No Error , False : Otherwise
-> Int -- ^ The number of sending messages.
-> ([Message], [Message]) -- ^ Initial empty value, whichi should be ([], [])
^ fst : Success messages , snd : Error messages
repeatreadMessagesR noError num acc = do
if num == 0
then return acc
else do
nodeSess <- ask
r <- readMessagesEither nodeSess
case r of
Left m -> if noError
then return (fst acc , m )
else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m)
Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc )
readOneMessage :: (MonadIO m) => NodeSession -> m Message
readOneMessage NodeSession{..} = runReaderT readOneMsgR _socket
readOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message
readOneMessageR = ask >>= liftIO . readOneMessage
readNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message]
readNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket
readNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message]
readNMessageR n = ask >>= liftIO . readNMessage n
--
-- Using Socket
--
readSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString
readSocketR len = ask >>= (\x -> liftIO $ recv x len)
readMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString
readMsgLengthR = readSocketR 4
readMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString
readMsgTypeR = readSocketR 1
readNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString)
readNextMsgR len = do
bytes <- readSocketR (len + 4)
return $ if B.length bytes == len
then
(bytes, B.empty)
else
B.splitAt len bytes
readOneMsgR :: (MonadIO m) => ReaderT Socket m Message
readOneMsgR = do
l <- readMsgLengthR
t <- readMsgTypeR
m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1
return (byte2Int t, m)
readNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]
readNMsgR n = sequence $ take n . repeat $ readOneMsgR
readAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]
readAllMsgR len = do
t <- readMsgTypeR
let t' = byte2Int t
if t' == s_sql_stmt_execute_ok
SQL_STMT_EXECUTE_OK is the last message and has no data .
return [(s_sql_stmt_execute_ok, B.empty)]
else do
debug $ " type= " + + ( show $ byte2Int t ) + + " , reading len= " + + ( show ( len-1 ` max ` 0 ) ) + + " , plus 4 byte "
(msg, len) <- readNextMsgR (len-1)
-- debug $ (show msg) ++ " , next length of reading chunk byte is " ++ (show $ if B.null len then 0 else getIntFromLE len)
if B.null len
then
return [(t', msg)]
else do
msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len
return $ (t', msg): msgs
-- | Begin a transaction.
begenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
begenTrxNodeSession = doSimpleSessionStateChangeStmt "begin"
-- | Commit a transaction.
commitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
commitNodeSession = doSimpleSessionStateChangeStmt "commit"
-- | Rollback a transaction.
rollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
rollbackNodeSession = doSimpleSessionStateChangeStmt "rollback"
--
-- helper
--
doSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64
doSimpleSessionStateChangeStmt sql nodeSess = do
-- debug $ "session state change statement : " ++ sql
runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess
ret@(x:xs) <- runReaderT readMessagesR nodeSess -- [Message]
if fst x == 1
then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
frm <- (getFrame . snd ) $ head $ filter (\(t, b) -> t == s_notice) ret -- Frame
ssc <- getPayloadSessionStateChanged frm
getRowsAffected ssc
-- | check a raw socket connectin.
isSocketConnected :: NodeSession -> IO Bool
isSocketConnected NodeSession{..} = do
isConnected _socket
naming rule
Application Data < -- recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ]
Application Data -- > send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ]
mkFoo -- > [ Protocol Buffer Object ]
( a ) client - > server message implementatin pattern
1 ) make pure function from some params to a PB object = = > hidden
2 ) make the above function to Reader -- > open package
ex )
mkAuthenticateStart
|
V
sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( )
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
( b ) server - > client message implemention patten
1 ) make pure function from ByteString to a PB object
ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden
getAuthenticateContinue ' = getMessage
2 ) make the above function to Reader Monad
3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open
ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString
( c ) client - > server - > client message implementation
1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server .
naming rule
Application Data <-- recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket]
Application Data --> send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket]
mkFoo --> [Protocol Buffer Object]
(a) client -> server message implementatin pattern
1) make pure function from some params to a PB object ==> hidden
2) make the above function to Reader Monad
--> open package
ex)
mkAuthenticateStart
|
V
sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m ()
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
(b) server -> client message implemention patten
1) make pure function from ByteString to a PB object
ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden
getAuthenticateContinue' = getMessage
2) make the above function to Reader Monad
3) make a function to get concrete data, not Protocol Buffer Objects ==> open
ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString
(c) client -> server -> client message implementation
1) combine (a) and (b) so that we get a turn-around function between client and server.
-}
| null | https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/DataBase/MySQLX/NodeSession.hs | haskell | * Message
* Session Infomation
* Node Session
* Session Management
* Transaction
* Expectation
* Helper functions
* Internal Use Only
general, standard library
protocol buffer library
generated library
my library
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Node Session Object
^ socket
^ client id given by MySQL Server
| Infomation Object of Node Session
^ host name
^ port nummber
^ database name
^ user
^ password
^ charset
| Default NodeSessionInfo
* database : ""
* user : "root"
* password : ""
* charset : ""
| a message (type, payload)
-----------------------------------------------------------------------------
Session Management
-----------------------------------------------------------------------------
| Open node session.
^ NodeSessionInfo
^ NodeSession
debug "success"
debug changed
debug ok
debug $ "NodeSession is opend; clientId =" ++ (show id)
| Close node session.
| Make a socket for session.
| Send Close message to the server.
| Retreive a salt given by the server.
| Send NoError expectation message to the server.
| Send Unset expectation message to the server.
{- [C]->[S] -} -- putMsg sock $ getAuthMsg "root"
{- [S]->[C] -}
x <- parse2AuthenticateContinue sock
print salt
{- [C]->[S] -}
putMsg sock $ getAutCont "world_x" "root" salt (B8.pack "root")
{- [S]->[C] -}
frame <- parse2Frame sock
getSessionStateChanged frame
parse2AuthenticateOK sock
liftIO $ putStrLn $ PBT.messagePutText msg
| write a message.
Error
| retrieve messages from Node session.
| retrieve messages from Node session.
^ True : Expectation No Error , False : Otherwise
^ The number of sending messages.
^ Initial empty value, whichi should be ([], [])
Using Socket
debug $ (show msg) ++ " , next length of reading chunk byte is " ++ (show $ if B.null len then 0 else getIntFromLE len)
| Begin a transaction.
| Commit a transaction.
| Rollback a transaction.
helper
debug $ "session state change statement : " ++ sql
[Message]
Frame
| check a raw socket connectin.
recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ]
> send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ]
> [ Protocol Buffer Object ]
> open package
recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket]
> send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket]
> [Protocol Buffer Object]
> open package | |
module : Database . MySQLX.NodeSession
description : Session management
copyright : ( c ) , 2017
license : MIT
maintainer :
stability : experimental
portability :
Session ( a.k.a . Connection )
module : Database.MySQLX.NodeSession
description : Session management
copyright : (c) naoto ogawa, 2017
license : MIT
maintainer :
stability : experimental
portability :
Session (a.k.a. Connection)
-}
# LANGUAGE RecordWildCards #
module DataBase.MySQLX.NodeSession
(
Message
, NodeSessionInfo(..)
, defaultNodeSesssionInfo
, NodeSession(clientId, auth_data)
, openNodeSession
, closeNodeSession
, begenTrxNodeSession
, commitNodeSession
, rollbackNodeSession
, sendExpectNoError
, sendExpectUnset
, sendExpectClose
, readMessagesR
, writeMessageR
, repeatreadMessagesR
, isSocketConnected
, readMsgLengthR
, readAllMsgR
) where
import qualified Data.Binary as BIN
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Int as I
import Data.Typeable (TypeRep, Typeable, typeRep, typeOf)
import qualified Data.Word as W
import Network.Socket hiding (recv)
import Network.Socket.ByteString (send, sendAll, recv)
import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import qualified Text.ProtocolBuffers as PB
import qualified Text.ProtocolBuffers.Basic as PBB
import qualified Text.ProtocolBuffers.Header as PBH
import qualified Text.ProtocolBuffers.TextMessage as PBT
import qualified Text.ProtocolBuffers.WireMessage as PBW
import qualified Text.ProtocolBuffers.Reflections as PBR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk
import DataBase.MySQLX.Exception
import DataBase.MySQLX.Model
import DataBase.MySQLX.Util
data NodeSession = NodeSession
^ auth_data given by MySQL Server
} deriving Show
data NodeSessionInfo = NodeSessionInfo
} deriving Show
* host : 127.0.0.1
* port : 33600
defaultNodeSesssionInfo :: NodeSessionInfo
defaultNodeSesssionInfo = NodeSessionInfo "127.0.0.1" 33060 "" "root" "" ""
type Message = (Int, B.ByteString)
openNodeSession :: (MonadIO m, MonadThrow m)
openNodeSession sessionInfo = do
socket <- _client (host sessionInfo) (port sessionInfo)
let session = NodeSession socket (fromIntegral 0) BL.empty
x <- runReaderT _negociate session
(t, msg):xs <- runReaderT (_auth sessionInfo) session
case t of
TODO
frm <- getFrame msg
case PFr.payload frm of
Just x -> do
changed <- getSessionStateChanged $ BL.toStrict x
ok <- mkAuthenticateOk $ snd $ head xs
id <- getClientId changed
return session {clientId = id}
Nothing -> throwM $ XProtocolException "Payload is Nothing"
TODO
err <- getError msg
throwM $ XProtocolError err
_ -> error $ "message type unknown, =" ++ show t
closeNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m ()
closeNodeSession nodeSess = do
runReaderT (sendClose >> recieveOk) nodeSess
liftIO . close $ _socket nodeSess
debug " NodeSession is closed . "
return ()
_client :: (MonadIO m) => HostName -> PortNumber -> m Socket
_client host port = liftIO $ withSocketsDo $ do
addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port)
let serverAddr = head addrInfo
sock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect sock (addrAddress serverAddr)
return sock
_auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message]
_auth NodeSessionInfo{..} = do
sendAuthenticateStart user
salt <- recieveSalt
sendAutenticateContinue database user password salt
msgs <- readMessagesR
return msgs
sendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m ()
sendCapabilitiesGet = writeMessageR mkCapabilitiesGet
_negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message]
_negociate = do
sendCapabilitiesGet
ret@(x:xs) <- readMessagesR
if fst x == s_error
then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
return ret
sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m ()
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
sendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m ()
sendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password
sendClose :: (MonadIO m) => ReaderT NodeSession m ()
sendClose = writeMessageR mkClose
recieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString
recieveSalt = do
msg <- getAuthenticateContinueR
return $ BL.toStrict $ PAC.auth_data msg
recieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok
recieveOk = getOkR
sendExpectNoError :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectNoError = writeMessageR mkExpectNoError
sendExpectUnset :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectUnset = writeMessageR mkExpectUnset
interfaces as follows :
openNodeSession = do
sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( )
salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString
sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( )
reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( )
interfaces as follows:
openNodeSession = do
sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> ()
salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString
sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> ()
reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> ()
-}
let salt = S.toStrict $ PAC.auth_data x
Using NodeSession and making ReaderT
writeMessage :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m ) => NodeSession -> msg -> m ()
writeMessage NodeSession{..} msg = do
liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)
where
bytes = PBW.messagePut msg
len = fromIntegral $ PBW.messageSize msg
ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg
sendExpectClose :: (MonadIO m) => ReaderT NodeSession m ()
sendExpectClose = do
nodeSess <- ask
liftIO $ writeExpectClose nodeSess
writeExpectClose NodeSession{..} = do
liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)
where
bytes = PBW.messagePut mkClose
len = fromIntegral 0
ty = putMessageType $ fromIntegral 25
writeMessageR :: (PBT.TextMsg msg
,PBR.ReflectDescriptor msg
,PBW.Wire msg
,Show msg
,Typeable msg
,MonadIO m ) => msg -> ReaderT NodeSession m ()
writeMessageR msg = do
session <- ask
liftIO $ writeMessage session msg
getErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error
getErrorR = readOneMessageR >>= \(_, msg) -> getError msg
getFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame
getFrameR = readOneMessageR >>= \(_, msg) -> getFrame msg
getAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue
getAuthenticateContinueR = readOneMessageR >>= \(_, msg) -> getAuthenticateContinue msg
getOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok
getOkR = readOneMessageR >>= \(_, msg) -> getOk msg
getOneMessageR :: (MonadIO m
,MonadThrow m
,PBW.Wire a
,PBR.ReflectDescriptor a
,PBT.TextMsg a
,Typeable a) => ReaderT NodeSession m a
getOneMessageR = do
session <- ask
(_, msg) <- liftIO $ readOneMessage session
getMessage msg
readMessages :: (MonadIO m) => NodeSession -> m [Message]
readMessages NodeSession{..} = do
len <- runReaderT readMsgLengthR _socket
debug $ " 1st length = " + + ( show $ getIntFromLE len )
ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket
return ret
readMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message])
readMessagesEither NodeSession{..} = do
len <- runReaderT readMsgLengthR _socket
debug $ " 1st length = " + + ( show $ getIntFromLE len )
ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket
if hasError ret
Success
where hasError r = length (filterError r) >= 1
filterError xs = filter (\(t,m) -> t == s_error) xs
readMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message]
readMessagesR = ask >>= liftIO . readMessages
repeatreadMessagesR :: (MonadIO m)
^ fst : Success messages , snd : Error messages
repeatreadMessagesR noError num acc = do
if num == 0
then return acc
else do
nodeSess <- ask
r <- readMessagesEither nodeSess
case r of
Left m -> if noError
then return (fst acc , m )
else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m)
Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc )
readOneMessage :: (MonadIO m) => NodeSession -> m Message
readOneMessage NodeSession{..} = runReaderT readOneMsgR _socket
readOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message
readOneMessageR = ask >>= liftIO . readOneMessage
readNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message]
readNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket
readNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message]
readNMessageR n = ask >>= liftIO . readNMessage n
readSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString
readSocketR len = ask >>= (\x -> liftIO $ recv x len)
readMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString
readMsgLengthR = readSocketR 4
readMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString
readMsgTypeR = readSocketR 1
readNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString)
readNextMsgR len = do
bytes <- readSocketR (len + 4)
return $ if B.length bytes == len
then
(bytes, B.empty)
else
B.splitAt len bytes
readOneMsgR :: (MonadIO m) => ReaderT Socket m Message
readOneMsgR = do
l <- readMsgLengthR
t <- readMsgTypeR
m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1
return (byte2Int t, m)
readNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]
readNMsgR n = sequence $ take n . repeat $ readOneMsgR
readAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]
readAllMsgR len = do
t <- readMsgTypeR
let t' = byte2Int t
if t' == s_sql_stmt_execute_ok
SQL_STMT_EXECUTE_OK is the last message and has no data .
return [(s_sql_stmt_execute_ok, B.empty)]
else do
debug $ " type= " + + ( show $ byte2Int t ) + + " , reading len= " + + ( show ( len-1 ` max ` 0 ) ) + + " , plus 4 byte "
(msg, len) <- readNextMsgR (len-1)
if B.null len
then
return [(t', msg)]
else do
msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len
return $ (t', msg): msgs
begenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
begenTrxNodeSession = doSimpleSessionStateChangeStmt "begin"
commitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
commitNodeSession = doSimpleSessionStateChangeStmt "commit"
rollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64
rollbackNodeSession = doSimpleSessionStateChangeStmt "rollback"
doSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64
doSimpleSessionStateChangeStmt sql nodeSess = do
runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess
if fst x == 1
then do
msg <- getError $ snd x
throwM $ XProtocolError msg
else do
ssc <- getPayloadSessionStateChanged frm
getRowsAffected ssc
isSocketConnected :: NodeSession -> IO Bool
isSocketConnected NodeSession{..} = do
isConnected _socket
naming rule
( a ) client - > server message implementatin pattern
1 ) make pure function from some params to a PB object = = > hidden
ex )
mkAuthenticateStart
|
V
sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( )
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
( b ) server - > client message implemention patten
1 ) make pure function from ByteString to a PB object
ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden
getAuthenticateContinue ' = getMessage
2 ) make the above function to Reader Monad
3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open
ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString
( c ) client - > server - > client message implementation
1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server .
naming rule
(a) client -> server message implementatin pattern
1) make pure function from some params to a PB object ==> hidden
2) make the above function to Reader Monad
ex)
mkAuthenticateStart
|
V
sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m ()
sendAuthenticateStart = writeMessageR . mkAuthenticateStart
(b) server -> client message implemention patten
1) make pure function from ByteString to a PB object
ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden
getAuthenticateContinue' = getMessage
2) make the above function to Reader Monad
3) make a function to get concrete data, not Protocol Buffer Objects ==> open
ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString
(c) client -> server -> client message implementation
1) combine (a) and (b) so that we get a turn-around function between client and server.
-}
|
146dd9da32755c31eb2b8d40877a8ded50fee86dfa05ee58556bc2cf291c02bf | rubenbarroso/EOPL | 3_34.scm | (load "/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm")
(load "/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm")
(load "/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm")
(define-datatype environment nameless-environment?
(empty-nameless-env-record)
(extended-nameless-env-record
(vals vector?)
(env nameless-environment?)))
(define empty-nameless-env
(lambda ()
(empty-nameless-env-record)))
(define extend-nameless-env
(lambda (vals env)
(extended-nameless-env-record (list->vector vals) env)))
(define extend-nameless-env-recursively
(lambda (proc-names bodies old-env)
(let ((len (length proc-names)))
(let ((vec (make-vector len)))
(let ((env (extended-nameless-env-record
vec old-env)))
(for-each
(lambda (pos body)
(vector-set! vec pos (closure body env)))
(iota len) bodies)
env)))))
;> (apply-nameless-env
; (extend-nameless-env-recursively
; '(even odd)
; '((var-exp h) (var-exp j))
; (extend-nameless-env
' ( 5 28 )
; (empty-nameless-env)))
1 1 )
28
(define apply-nameless-env
(lambda (env depth pos)
(if (= pos -1)
(eopl:error 'apply-nameless-env
"Error accessing free variable at (~s ~s)"
depth pos))
(cases environment env
(empty-nameless-env-record ()
(eopl:error 'apply-nameless-env "No binding for ~s" sym))
(extended-nameless-env-record (vals env)
(if (= depth 0)
(vector-ref vals pos)
(apply-nameless-env env (- depth 1) pos))))))
(define scanner-spec-3-13
'((white-sp
(whitespace) skip)
(comment
("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "?"))) symbol)
(number
(digit (arbno digit)) number)))
(define grammar-3-13
'((program
(expression)
a-program)
(expression
(number)
lit-exp)
(expression
(identifier)
var-exp)
(expression
("lexvar" "(" number number ")")
lexvar-exp)
(expression
(primitive "(" (separated-list expression ",") ")")
primapp-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("letrec" (arbno identifier "(" (separated-list identifier ",") ")" "=" expression) "in" expression)
letrec-exp)
(expression
("proc" "(" (separated-list identifier ",") ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
app-exp)
(primitive
("+")
add-prim)
(primitive
("-")
substract-prim)
(primitive
("*")
mult-prim)
(primitive
("add1")
incr-prim)
(primitive
("sub1")
decr-prim)
(primitive
("equal?")
equal-prim)
(primitive
("zero?")
zero-prim)
(primitive
("greater?")
greater-prim)
(primitive
("less?")
less-prim)))
(define scan&parse
(sllgen:make-string-parser
scanner-spec-3-13
grammar-3-13))
(sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13)
(define run
(lambda (string)
(eval-program
(lexical-address-calc
(scan&parse string)))))
;helpers
(define true-value?
(lambda (x)
(not (zero? x))))
; the interpreter
(define eval-program
(lambda (pgm)
(cases program pgm
(a-program (body)
(eval-expression body (init-nameless-env))))))
(define eval-expression
(lambda (exp env)
(cases expression exp
(lit-exp (datum) datum)
(var-exp (id) (eopl:error
'eval-expression
"var-exp should not appear in the instrumented interpreter"))
(lexvar-exp (depth pos) (apply-nameless-env env depth pos))
(primapp-exp (prim rands)
(let ((args (eval-rands rands env)))
(apply-primitive prim args)))
(if-exp (test-exp true-exp false-exp)
(if (true-value? (eval-expression test-exp env))
(eval-expression true-exp env)
(eval-expression false-exp env)))
(let-exp (ids rands body)
(let ((args (eval-rands rands env)))
(eval-expression body (extend-nameless-env (list->vector args) env))))
(proc-exp (ids body) (closure body env))
(app-exp (rator rands)
(let ((proc (eval-expression rator env))
(args (eval-rands rands env)))
(if (procval? proc)
(apply-procval proc args)
(eopl:error 'eval-expression
"Attempt to apply a non-procedure ~s" proc))))
(letrec-exp (proc-names idss bodies letrec-body)
(eval-expression
letrec-body
(extend-nameless-env-recursively
proc-names bodies env))))))
(define eval-rands
(lambda (rands env)
(map (lambda (x) (eval-rand x env)) rands)))
(define eval-rand
(lambda (rand env)
(eval-expression rand env)))
(define apply-primitive
(lambda (prim args)
(cases primitive prim
(add-prim () (+ (car args) (cadr args)))
(substract-prim () (- (car args) (cadr args)))
(mult-prim () (* (car args) (cadr args)))
(incr-prim () (+ (car args) 1))
(decr-prim () (- (car args) 1))
(equal-prim () (if (= (car args) (cadr args)) 1 0))
(zero-prim () (if (zero? (car args)) 1 0))
(greater-prim () (if (> (car args) (cadr args)) 1 0))
(less-prim () (if (< (car args) (cadr args)) 1 0)))))
(define-datatype procval procval?
(closure
(body expression?)
(env nameless-environment?)))
(define apply-procval
(lambda (proc args)
(cases procval proc
(closure (body env)
(eval-expression body
(extend-nameless-env args env))))))
(define init-nameless-env
(lambda ()
(extend-nameless-env
'(1 5 10)
(empty-nameless-env))))
;Helper procedures from exercise 1.31
(define make-lexical-address
(lambda (v d p)
(list v ': d p)))
(define get-v
(lambda (address)
(car address)))
(define get-d
(lambda (address)
(caddr address)))
(define get-p
(lambda (address)
(cadddr address)))
(define increment-depth
(lambda (address)
(make-lexical-address (get-v address)
(+ 1 (get-d address))
(get-p address))))
(define get-lexical-address
(lambda (exp addresses)
(define iter
(lambda (lst)
(cond ((null? lst) (make-lexical-address exp -1 -1))
((eqv? exp (get-v (car lst))) (car lst))
(else (get-lexical-address exp (cdr lst))))))
(iter addresses)))
(define index-of
(lambda (v declarations)
(define helper
(lambda (lst index)
(cond ((null? lst) 'free)
((eqv? (car lst) v) index)
(else (helper (cdr lst) (+ index 1))))))
(helper declarations 0)))
(define cross-contour
(lambda (declarations addresses)
(let ((bound (filter-bound declarations))
(free (filter-free declarations addresses)))
(append bound free))))
(define filter-bound
(lambda (declarations)
(map (lambda (decl)
(make-lexical-address decl
0
(index-of decl declarations)))
declarations)))
(define filter-free
(lambda (declarations addresses)
(define iter
(lambda (lst)
(cond ((null? lst) '())
((not (memq (get-v (car lst)) declarations))
(cons (increment-depth (car lst))
(iter (cdr lst))))
(else (iter (cdr lst))))))
(iter addresses)))
(define lexical-address-calc-helper
(lambda (exp addresses)
(cases expression exp
(lit-exp (datum)
(lit-exp datum))
(var-exp (id)
(let ((lexical-address (get-lexical-address id addresses)))
(lexvar-exp (get-d lexical-address)
(get-p lexical-address))))
(lexvar-exp (depth pos)
(lexvar-exp depth pos))
(primapp-exp (prim rands)
(primapp-exp prim
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)))
(if-exp (test-exp true-exp false-exp)
(if-exp (lexical-address-calc-helper test-exp addresses)
(lexical-address-calc-helper true-exp addresses)
(lexical-address-calc-helper false-exp addresses)))
(let-exp (ids rands body)
(let-exp ids
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)
(lexical-address-calc-helper
body
(cross-contour ids addresses))))
(proc-exp (ids body)
(proc-exp ids
(lexical-address-calc-helper
body
(cross-contour ids addresses))))
(app-exp (rator rands)
(app-exp (lexical-address-calc-helper
rator
addresses)
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)))
(letrec-exp (proc-names idss bodies letrec-body)
(let ((new-addresses (cross-contour proc-names addresses)))
(letrec-exp proc-names
idss
(map (lambda (ids body)
(lexical-address-calc-helper
body
(cross-contour ids new-addresses)))
idss
bodies)
(lexical-address-calc-helper
letrec-body
new-addresses)))))))
(define letrec-cross-contour
(lambda (proc-names idss addresses)
(define iter
(lambda (the-ids the-addresses)
(if (null? the-ids)
the-addresses
(iter (cdr the-ids) (cross-contour (car the-ids) the-addresses)))))
(iter idss (cross-contour proc-names addresses))))
(define lexical-address-calc
(lambda (pgm)
(a-program
(cases program pgm
(a-program (body)
(lexical-address-calc-helper body '()))))))
;> (lexical-address-calc
; (scan&parse
; "letrec
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
; in (odd 13)"))
;(a-program
( letrec - exp
; (even odd)
; ((x) (x))
; ((if-exp
( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )
; (lit-exp 1)
; (app-exp
; (lexvar-exp 1 1)
; ((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))
; (if-exp
( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )
; (lit-exp 0)
; (app-exp
; (lexvar-exp 1 0)
; ((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))))
; (app-exp (lexvar-exp 0 1) ((lit-exp 13)))))
;
;> (run
; "letrec
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
; in (odd 13)")
1
;> (run
; "letrec
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
in ( even 13 ) " )
0
;> (run
; "letrec
fact(x ) = if ) then 1 else * ( x,(fact ) ) )
in ( fact 6 ) " )
720
| null | https://raw.githubusercontent.com/rubenbarroso/EOPL/f9b3c03c2fcbaddf64694ee3243d54be95bfe31d/src/chapter3/3_34.scm | scheme | > (apply-nameless-env
(extend-nameless-env-recursively
'(even odd)
'((var-exp h) (var-exp j))
(extend-nameless-env
(empty-nameless-env)))
helpers
the interpreter
Helper procedures from exercise 1.31
> (lexical-address-calc
(scan&parse
"letrec
in (odd 13)"))
(a-program
(even odd)
((x) (x))
((if-exp
(lit-exp 1)
(app-exp
(lexvar-exp 1 1)
((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))
(if-exp
(lit-exp 0)
(app-exp
(lexvar-exp 1 0)
((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))))
(app-exp (lexvar-exp 0 1) ((lit-exp 13)))))
> (run
"letrec
in (odd 13)")
> (run
"letrec
> (run
"letrec | (load "/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm")
(load "/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm")
(load "/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm")
(define-datatype environment nameless-environment?
(empty-nameless-env-record)
(extended-nameless-env-record
(vals vector?)
(env nameless-environment?)))
(define empty-nameless-env
(lambda ()
(empty-nameless-env-record)))
(define extend-nameless-env
(lambda (vals env)
(extended-nameless-env-record (list->vector vals) env)))
(define extend-nameless-env-recursively
(lambda (proc-names bodies old-env)
(let ((len (length proc-names)))
(let ((vec (make-vector len)))
(let ((env (extended-nameless-env-record
vec old-env)))
(for-each
(lambda (pos body)
(vector-set! vec pos (closure body env)))
(iota len) bodies)
env)))))
' ( 5 28 )
1 1 )
28
(define apply-nameless-env
(lambda (env depth pos)
(if (= pos -1)
(eopl:error 'apply-nameless-env
"Error accessing free variable at (~s ~s)"
depth pos))
(cases environment env
(empty-nameless-env-record ()
(eopl:error 'apply-nameless-env "No binding for ~s" sym))
(extended-nameless-env-record (vals env)
(if (= depth 0)
(vector-ref vals pos)
(apply-nameless-env env (- depth 1) pos))))))
(define scanner-spec-3-13
'((white-sp
(whitespace) skip)
(comment
("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "?"))) symbol)
(number
(digit (arbno digit)) number)))
(define grammar-3-13
'((program
(expression)
a-program)
(expression
(number)
lit-exp)
(expression
(identifier)
var-exp)
(expression
("lexvar" "(" number number ")")
lexvar-exp)
(expression
(primitive "(" (separated-list expression ",") ")")
primapp-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("letrec" (arbno identifier "(" (separated-list identifier ",") ")" "=" expression) "in" expression)
letrec-exp)
(expression
("proc" "(" (separated-list identifier ",") ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
app-exp)
(primitive
("+")
add-prim)
(primitive
("-")
substract-prim)
(primitive
("*")
mult-prim)
(primitive
("add1")
incr-prim)
(primitive
("sub1")
decr-prim)
(primitive
("equal?")
equal-prim)
(primitive
("zero?")
zero-prim)
(primitive
("greater?")
greater-prim)
(primitive
("less?")
less-prim)))
(define scan&parse
(sllgen:make-string-parser
scanner-spec-3-13
grammar-3-13))
(sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13)
(define run
(lambda (string)
(eval-program
(lexical-address-calc
(scan&parse string)))))
(define true-value?
(lambda (x)
(not (zero? x))))
(define eval-program
(lambda (pgm)
(cases program pgm
(a-program (body)
(eval-expression body (init-nameless-env))))))
(define eval-expression
(lambda (exp env)
(cases expression exp
(lit-exp (datum) datum)
(var-exp (id) (eopl:error
'eval-expression
"var-exp should not appear in the instrumented interpreter"))
(lexvar-exp (depth pos) (apply-nameless-env env depth pos))
(primapp-exp (prim rands)
(let ((args (eval-rands rands env)))
(apply-primitive prim args)))
(if-exp (test-exp true-exp false-exp)
(if (true-value? (eval-expression test-exp env))
(eval-expression true-exp env)
(eval-expression false-exp env)))
(let-exp (ids rands body)
(let ((args (eval-rands rands env)))
(eval-expression body (extend-nameless-env (list->vector args) env))))
(proc-exp (ids body) (closure body env))
(app-exp (rator rands)
(let ((proc (eval-expression rator env))
(args (eval-rands rands env)))
(if (procval? proc)
(apply-procval proc args)
(eopl:error 'eval-expression
"Attempt to apply a non-procedure ~s" proc))))
(letrec-exp (proc-names idss bodies letrec-body)
(eval-expression
letrec-body
(extend-nameless-env-recursively
proc-names bodies env))))))
(define eval-rands
(lambda (rands env)
(map (lambda (x) (eval-rand x env)) rands)))
(define eval-rand
(lambda (rand env)
(eval-expression rand env)))
(define apply-primitive
(lambda (prim args)
(cases primitive prim
(add-prim () (+ (car args) (cadr args)))
(substract-prim () (- (car args) (cadr args)))
(mult-prim () (* (car args) (cadr args)))
(incr-prim () (+ (car args) 1))
(decr-prim () (- (car args) 1))
(equal-prim () (if (= (car args) (cadr args)) 1 0))
(zero-prim () (if (zero? (car args)) 1 0))
(greater-prim () (if (> (car args) (cadr args)) 1 0))
(less-prim () (if (< (car args) (cadr args)) 1 0)))))
(define-datatype procval procval?
(closure
(body expression?)
(env nameless-environment?)))
(define apply-procval
(lambda (proc args)
(cases procval proc
(closure (body env)
(eval-expression body
(extend-nameless-env args env))))))
(define init-nameless-env
(lambda ()
(extend-nameless-env
'(1 5 10)
(empty-nameless-env))))
(define make-lexical-address
(lambda (v d p)
(list v ': d p)))
(define get-v
(lambda (address)
(car address)))
(define get-d
(lambda (address)
(caddr address)))
(define get-p
(lambda (address)
(cadddr address)))
(define increment-depth
(lambda (address)
(make-lexical-address (get-v address)
(+ 1 (get-d address))
(get-p address))))
(define get-lexical-address
(lambda (exp addresses)
(define iter
(lambda (lst)
(cond ((null? lst) (make-lexical-address exp -1 -1))
((eqv? exp (get-v (car lst))) (car lst))
(else (get-lexical-address exp (cdr lst))))))
(iter addresses)))
(define index-of
(lambda (v declarations)
(define helper
(lambda (lst index)
(cond ((null? lst) 'free)
((eqv? (car lst) v) index)
(else (helper (cdr lst) (+ index 1))))))
(helper declarations 0)))
(define cross-contour
(lambda (declarations addresses)
(let ((bound (filter-bound declarations))
(free (filter-free declarations addresses)))
(append bound free))))
(define filter-bound
(lambda (declarations)
(map (lambda (decl)
(make-lexical-address decl
0
(index-of decl declarations)))
declarations)))
(define filter-free
(lambda (declarations addresses)
(define iter
(lambda (lst)
(cond ((null? lst) '())
((not (memq (get-v (car lst)) declarations))
(cons (increment-depth (car lst))
(iter (cdr lst))))
(else (iter (cdr lst))))))
(iter addresses)))
(define lexical-address-calc-helper
(lambda (exp addresses)
(cases expression exp
(lit-exp (datum)
(lit-exp datum))
(var-exp (id)
(let ((lexical-address (get-lexical-address id addresses)))
(lexvar-exp (get-d lexical-address)
(get-p lexical-address))))
(lexvar-exp (depth pos)
(lexvar-exp depth pos))
(primapp-exp (prim rands)
(primapp-exp prim
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)))
(if-exp (test-exp true-exp false-exp)
(if-exp (lexical-address-calc-helper test-exp addresses)
(lexical-address-calc-helper true-exp addresses)
(lexical-address-calc-helper false-exp addresses)))
(let-exp (ids rands body)
(let-exp ids
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)
(lexical-address-calc-helper
body
(cross-contour ids addresses))))
(proc-exp (ids body)
(proc-exp ids
(lexical-address-calc-helper
body
(cross-contour ids addresses))))
(app-exp (rator rands)
(app-exp (lexical-address-calc-helper
rator
addresses)
(map (lambda (rand)
(lexical-address-calc-helper rand addresses))
rands)))
(letrec-exp (proc-names idss bodies letrec-body)
(let ((new-addresses (cross-contour proc-names addresses)))
(letrec-exp proc-names
idss
(map (lambda (ids body)
(lexical-address-calc-helper
body
(cross-contour ids new-addresses)))
idss
bodies)
(lexical-address-calc-helper
letrec-body
new-addresses)))))))
(define letrec-cross-contour
(lambda (proc-names idss addresses)
(define iter
(lambda (the-ids the-addresses)
(if (null? the-ids)
the-addresses
(iter (cdr the-ids) (cross-contour (car the-ids) the-addresses)))))
(iter idss (cross-contour proc-names addresses))))
(define lexical-address-calc
(lambda (pgm)
(a-program
(cases program pgm
(a-program (body)
(lexical-address-calc-helper body '()))))))
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
( letrec - exp
( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )
( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
1
even(x ) = if ) then 1 else ( odd ) )
odd(x ) = if ) then 0 else ( even ) )
in ( even 13 ) " )
0
fact(x ) = if ) then 1 else * ( x,(fact ) ) )
in ( fact 6 ) " )
720
|
ad9c4785c7b740ba34b9f2dc0d64d14cbfdc8c0ec6ff040bd568e927bec1d9cd | jappeace/awesome-project-name | frontend.hs | module Main where
import qualified Awe.Front.Main as App
import Reflex.Dom
main :: IO ()
main = mainWidget $ App.main $ App.IniState Nothing
| null | https://raw.githubusercontent.com/jappeace/awesome-project-name/e80a52dc2673c748a922ec19945cf75368aa3a53/frontend/app/frontend.hs | haskell | module Main where
import qualified Awe.Front.Main as App
import Reflex.Dom
main :: IO ()
main = mainWidget $ App.main $ App.IniState Nothing
|
|
0b661532ff59e0c8aa19c3913ebeb1f8d4a5a1d3f696805485357057203940b2 | haskell-gi/gi-gtk-examples | FastDraw.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
{-# OPTIONS -O #-}
-- Example of an drawing graphics onto a canvas.
import Control.Applicative
import Prelude
import Data.IORef
import Graphics.Rendering.Cairo
import Foreign (allocaArray)
import Graphics.Rendering.Cairo.Types (Cairo(..), PixelData)
import Foreign.Storable (Storable(..))
import Foreign.C (CUChar)
import qualified GI.Gtk as GI (init)
import GI.Gtk
(dialogRun, widgetShow, boxPackStart, onWidgetDraw,
widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest,
drawingAreaNew, dialogGetContentArea, dialogAddButton,
dialogNew)
import GI.Gtk.Enums (ResponseType(..))
import GI.GLib (pattern PRIORITY_LOW, idleAdd)
import GI.Cairo.Structs.Context (Context(..))
import Control.Monad.Trans.Reader (runReaderT)
import Foreign.Ptr (castPtr)
import Graphics.Rendering.Cairo.Internal (Render(..))
import Data.GI.Base.ManagedPtr (withManagedPtr)
main = do
GI.init Nothing
dia <- dialogNew
dialogAddButton dia "_OK" (fromIntegral $ fromEnum ResponseTypeOk)
contain <- dialogGetContentArea dia
canvas <- drawingAreaNew
let w = 256
h = 256
chan = 4
row = w * chan
stride = row
setWidgetWidthRequest canvas 256
setWidgetHeightRequest canvas 256
create the Pixbuf
allocaArray (w * h * chan) $ \ pbData -> do
draw into the Pixbuf
doFromTo 0 (h-1) $ \y ->
doFromTo 0 (w-1) $ \x -> do
pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar)
pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar)
pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar)
a function to update the Pixbuf
blueRef <- newIORef (0 :: CUChar)
dirRef <- newIORef True
let updateBlue = do
blue <- readIORef blueRef
-- print blue
doFromTo 0 (h-1) $ \y ->
doFromTo 0 (w-1) $ \x ->
pokeByteOff pbData (0+x*chan+y*row) blue -- unchecked indexing
-- arrange for the canvas to be redrawn now that we've changed
the Pixbuf
widgetQueueDraw canvas
-- update the blue state ready for next time
dir <- readIORef dirRef
let diff = 1
let blue' = if dir then blue+diff else blue-diff
if dir then
if blue<=maxBound-diff then writeIORef blueRef blue' else
writeIORef blueRef maxBound >> modifyIORef dirRef not
else
if blue>=minBound+diff then writeIORef blueRef blue' else
writeIORef blueRef minBound >> modifyIORef dirRef not
return True
idleAdd PRIORITY_LOW updateBlue
onWidgetDraw canvas $ \(Context fp) -> withManagedPtr fp $ \p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do
updateCanvas pbData w h stride
return True
boxPackStart contain canvas True True 0
widgetShow canvas
dialogRun dia
return ()
updateCanvas :: PixelData -> Int -> Int -> Int -> Render ()
updateCanvas pb w h stride = do
s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride
setSourceSurface s 0 0
paint
GHC is much better at opimising loops like this :
--
> doFromTo 0 255 $ \y - >
> doFromTo 0 255 $ \x - > do ...
--
-- Than it is at optimising loops like this:
--
-- > sequence_ [ do ...
> | x < - [ 0 .. 255 ]
> , y < - [ 0 .. 255 ] ]
--
The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 )
# INLINE doFromTo #
-- do the action for [from..to], ie it's inclusive.
doFromTo :: Int -> Int -> (Int -> IO ()) -> IO ()
doFromTo from to action =
let loop n | n > to = return ()
| otherwise = do action n
loop (n+1)
in loop from
| null | https://raw.githubusercontent.com/haskell-gi/gi-gtk-examples/4c4f06dc91fbb9b9f50cdad295c8afe782e0bdec/fastdraw/FastDraw.hs | haskell | # LANGUAGE OverloadedStrings #
# OPTIONS -O #
Example of an drawing graphics onto a canvas.
print blue
unchecked indexing
arrange for the canvas to be redrawn now that we've changed
update the blue state ready for next time
Than it is at optimising loops like this:
> sequence_ [ do ...
do the action for [from..to], ie it's inclusive. | # LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
import Control.Applicative
import Prelude
import Data.IORef
import Graphics.Rendering.Cairo
import Foreign (allocaArray)
import Graphics.Rendering.Cairo.Types (Cairo(..), PixelData)
import Foreign.Storable (Storable(..))
import Foreign.C (CUChar)
import qualified GI.Gtk as GI (init)
import GI.Gtk
(dialogRun, widgetShow, boxPackStart, onWidgetDraw,
widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest,
drawingAreaNew, dialogGetContentArea, dialogAddButton,
dialogNew)
import GI.Gtk.Enums (ResponseType(..))
import GI.GLib (pattern PRIORITY_LOW, idleAdd)
import GI.Cairo.Structs.Context (Context(..))
import Control.Monad.Trans.Reader (runReaderT)
import Foreign.Ptr (castPtr)
import Graphics.Rendering.Cairo.Internal (Render(..))
import Data.GI.Base.ManagedPtr (withManagedPtr)
main = do
GI.init Nothing
dia <- dialogNew
dialogAddButton dia "_OK" (fromIntegral $ fromEnum ResponseTypeOk)
contain <- dialogGetContentArea dia
canvas <- drawingAreaNew
let w = 256
h = 256
chan = 4
row = w * chan
stride = row
setWidgetWidthRequest canvas 256
setWidgetHeightRequest canvas 256
create the Pixbuf
allocaArray (w * h * chan) $ \ pbData -> do
draw into the Pixbuf
doFromTo 0 (h-1) $ \y ->
doFromTo 0 (w-1) $ \x -> do
pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar)
pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar)
pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar)
a function to update the Pixbuf
blueRef <- newIORef (0 :: CUChar)
dirRef <- newIORef True
let updateBlue = do
blue <- readIORef blueRef
doFromTo 0 (h-1) $ \y ->
doFromTo 0 (w-1) $ \x ->
the Pixbuf
widgetQueueDraw canvas
dir <- readIORef dirRef
let diff = 1
let blue' = if dir then blue+diff else blue-diff
if dir then
if blue<=maxBound-diff then writeIORef blueRef blue' else
writeIORef blueRef maxBound >> modifyIORef dirRef not
else
if blue>=minBound+diff then writeIORef blueRef blue' else
writeIORef blueRef minBound >> modifyIORef dirRef not
return True
idleAdd PRIORITY_LOW updateBlue
onWidgetDraw canvas $ \(Context fp) -> withManagedPtr fp $ \p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do
updateCanvas pbData w h stride
return True
boxPackStart contain canvas True True 0
widgetShow canvas
dialogRun dia
return ()
updateCanvas :: PixelData -> Int -> Int -> Int -> Render ()
updateCanvas pb w h stride = do
s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride
setSourceSurface s 0 0
paint
GHC is much better at opimising loops like this :
> doFromTo 0 255 $ \y - >
> doFromTo 0 255 $ \x - > do ...
> | x < - [ 0 .. 255 ]
> , y < - [ 0 .. 255 ] ]
The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 )
# INLINE doFromTo #
doFromTo :: Int -> Int -> (Int -> IO ()) -> IO ()
doFromTo from to action =
let loop n | n > to = return ()
| otherwise = do action n
loop (n+1)
in loop from
|
3ccad40dd1db3c7b2e9ee962c405a8f537247873317933be62d5a75a48ee543c | spell-music/csound-expression | Pretty.hs | module Csound.Dynamic.Render.Pretty(
Doc, vcatSep,
ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur,
PrettyE(..), PrettyShowE(..),
ppE
) where
import Control.Monad.Trans.State.Strict
import qualified Data.IntMap as IM
import Text.PrettyPrint.Leijen.Text
import Csound.Dynamic.Types
import Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..))
import Data.Text (Text)
import Data.Text qualified as Text
import Text.Show.Pretty (ppShow)
import Data.Fix (foldFix)
import Data.ByteString.Base64 qualified as Base64
import Data.Text.Encoding qualified as Text
vcatSep :: [Doc] -> Doc
vcatSep = vcat . punctuate line
binaries, unaries :: Text -> [Doc] -> Doc
binaries op as = binary op (as !! 0) (as !! 1)
unaries op as = unary op (as !! 0)
binary :: Text -> Doc -> Doc -> Doc
binary op a b = parens $ a <+> textStrict op <+> b
unary :: Text -> Doc -> Doc
unary op a = parens $ textStrict op <> a
func :: Text -> Doc -> Doc
func op a = textStrict op <> parens a
ppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc
ppCsdFile flags orc sco plugins =
tag "CsoundSynthesizer" $ vcatSep [
tag "CsOptions" flags,
tag "CsInstruments" orc,
tag "CsScore" sco,
ppPlugins plugins
]
ppPlugins :: [Plugin] -> Doc
ppPlugins plugins = vcatSep $ fmap (\(Plugin name body) -> tag name (textStrict body)) plugins
tag :: Text -> Doc -> Doc
tag name content = vcatSep [
char '<' <> textStrict name <> char '>',
content,
text "</" <> textStrict name <> char '>']
ppNotes :: InstrId -> [CsdEvent] -> Doc
ppNotes instrId = vcat . fmap (ppNote instrId)
ppNote :: InstrId -> CsdEvent -> Doc
ppNote instrId evt = char 'i'
<+> ppInstrId instrId
<+> double (csdEventStart evt) <+> double (csdEventDur evt)
<+> hsep (fmap ppPrim $ csdEventContent evt)
ppPrim :: Prim -> Doc
ppPrim x = case x of
P n -> char 'p' <> int n
PrimInstrId a -> ppInstrId a
PString a -> int a
PrimInt n -> int n
PrimDouble d -> double d
PrimString s -> dquotes $ textStrict s
PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v
where
ppConverter dst src t
| dst == src = t
| dst == Ar && src == Kr = a(t)
| dst == Ar && src == Ir = a(k(t))
| dst == Kr = k(t)
| dst == Ir && src == Kr = i(t)
| dst == Ir && src == Ar = i(k(t))
| otherwise = t
where
tfm ch v = hcat [char ch, parens v]
a = tfm 'a'
k = tfm 'k'
i = tfm 'i'
ppGen :: Int -> Gen -> Doc
ppGen tabId ft = char 'f'
<> int tabId
<+> int 0
<+> (int $ genSize ft)
<+> (ppGenId $ genId ft)
<+> (maybe empty (textStrict . Text.pack . show) $ genFile ft)
<+> (hsep $ map double $ genArgs ft)
ppGenId :: GenId -> Doc
ppGenId x = case x of
IntGenId a -> int a
StringGenId a -> dquotes $ textStrict a
ppInstr :: InstrId -> Doc -> Doc
ppInstr instrId body = vcat [
text "instr" <+> ppInstrHeadId instrId,
body,
text "endin"]
ppInstrHeadId :: InstrId -> Doc
ppInstrHeadId x = case x of
InstrId den nom -> int nom <> maybe empty ppAfterDot den
InstrLabel name -> textStrict name
where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a
ppInstrId :: InstrId -> Doc
ppInstrId x = case x of
InstrId den nom -> int nom <> maybe empty ppAfterDot den
InstrLabel name -> dquotes $ textStrict name
where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a
type TabDepth = Int
ppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc
ppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr)
maybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc)
maybeStringCopy outs expr = case (outs, expr) of
([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)
([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)
([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a)
([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as)
([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a)
_ -> Nothing
ppStringCopy :: Doc -> Doc -> Doc
ppStringCopy outs src = ppOpc outs "strcpyk" [src]
ppExp :: Doc -> Exp R.Var -> State TabDepth Doc
ppExp res expr = case fmap ppPrimOrVar expr of
ExpPrim (PString n) -> tab $ ppStrget res n
ExpPrim p -> tab $ res $= ppPrim p
Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b
Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs
Tfm info xs -> tab $ ppOpc res (infoName info) xs
ConvertRate to from x -> tab $ ppConvertRate res to from x
If _ifRate info t e -> tab $ ppIf res (ppCond info) t e
ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as
WriteVar v a -> tab $ ppVar v $= a
InitVar v a -> tab $ ppOpc (ppVar v) "init" [a]
ReadVar v -> tab $ res $= ppVar v
InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) "init" as
ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text "strcpy" <+> ppReadArr v as
WriteArr v as b -> tab $ ppWriteArr v as b
WriteInitArr v as b -> tab $ ppWriteInitArr v as b
TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b
TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args
TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs
InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) "fillarray" initVals
ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text "strcpy" <+> ppReadPureArr arr [index]
IfBegin _ a -> succTab $ text "if " <> ppCond a <> text " then"
IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th
IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el
-- ElseIfBegin a -> left >> (succTab $ text "elseif " <> ppCond a <> text " then")
ElseBegin -> left >> (succTab $ text "else")
IfEnd -> left >> (tab $ text "endif")
UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th
WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th
WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th
UntilBegin _ a -> succTab $ text "until " <> ppCond a <> text " do"
UntilEnd -> left >> (tab $ text "od")
WhileBegin _ a -> succTab $ text "while " <> ppCond a <> text " do"
WhileRefBegin var -> succTab $ text "while " <> ppVar var <+> equals <+> text "1" <+> text "do"
WhileEnd -> left >> (tab $ text "od")
InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue)
InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue)
InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue)
ReadMacrosString name -> tab $ res <+> text "strcpy" <+> readMacro name
ReadMacrosDouble name -> tab $ res $= readMacro name
ReadMacrosInt name -> tab $ res $= readMacro name
EmptyExp -> return empty
Verbatim str -> return $ textStrict str
Select _rate _n a -> tab $ res $= ("SELECTS" <+> a)
Starts -> tab $ res $= "STARTS"
Seq a b -> tab $ hsep ["SEQ", a, b]
Ends _a -> tab $ "ENDS"
ExpBool _ -> tab "ExpBool"
-- x -> error $ "unknown expression: " ++ show x
-- pp macros
readMacro :: Text -> Doc
readMacro name = char '$' <> textStrict name
initMacros :: Doc -> Doc -> Doc
initMacros name initValue = vcat
[ text "#ifndef" <+> name
, text "#define " <+> name <+> char '#' <> initValue <> char '#'
, text "#end"
]
-- pp arrays
ppTfmArrOut :: Bool -> Var -> Doc
ppTfmArrOut isInit v = ppVar v <> (if isInit then (text "[]") else empty)
ppArrIndex :: Var -> [Doc] -> Doc
ppArrIndex v as = ppVar v <> (hcat $ fmap brackets as)
ppArrVar :: Int -> Doc -> Doc
ppArrVar n v = v <> (hcat $ replicate n $ text "[]")
ppReadArr :: Var -> [Doc] -> Doc
ppReadArr v as = ppArrIndex v as
ppReadPureArr :: Doc -> [Doc] -> Doc
ppReadPureArr v as = v <> (hcat $ fmap brackets as)
ppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc
ppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b
where equalsWord = if (varRate v == Sr) then text "strcpy" else equals
ppWriteInitArr :: Var -> [Doc] -> Doc -> Doc
ppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b
where initWord = text $ if (varRate v == Sr) then "strcpy" else "init"
-------------------------------------
tab :: Monad m => Doc -> StateT TabDepth m Doc
tab doc = fmap (shiftByTab doc) get
tabWidth :: TabDepth
tabWidth = 4
shiftByTab :: Doc -> TabDepth -> Doc
shiftByTab doc n
| n == 0 = doc
| otherwise = indent (tabWidth * n) doc
left :: State TabDepth ()
left = modify pred
succTab :: Monad m => Doc -> StateT TabDepth m Doc
succTab doc = do
a <- tab doc
modify succ
return a
prefix :: Text -> [Doc] -> Doc
prefix name args = textStrict name <> tupled args
ppCond :: Inline CondOp Doc -> Doc
ppCond = ppInline ppCondOp
($=) :: Doc -> Doc -> Doc
($=) a b = a <+> equals <+> b
ppOuts :: [R.Var] -> Doc
ppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs
ppPrimOrVar :: PrimOr R.Var -> Doc
ppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x
ppStrget :: Doc -> Int -> Doc
ppStrget out n = ppOpc out "strget" [char 'p' <> int n]
ppIf :: Doc -> Doc -> Doc -> Doc -> Doc
ppIf res p t e = vcat
[ text "if" <+> p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "else"
, text " " <> res <+> char '=' <+> e
, text "endif"
]
ppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc
ppIf1 = ppIfBy "if"
ppWhile = ppIfBy "while"
ppUntil = ppIfBy "until"
ppIfBy :: Text -> Doc -> Doc -> Doc -> Doc
ppIfBy leadTag res p t = vcat
[ textStrict leadTag <+> p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "endif"
]
ppWhileRef :: Doc -> Var -> Doc -> Doc
ppWhileRef res p t = vcat
[ textStrict "while" <+> ppVar p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "endif"
]
ppOpc :: Doc -> Text -> [Doc] -> Doc
ppOpc out name xs = out <+> ppProc name xs
ppProc :: Text -> [Doc] -> Doc
ppProc name xs = textStrict name <+> (hsep $ punctuate comma xs)
ppVar :: Var -> Doc
ppVar v = case v of
Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name)
VarVerbatim _ name -> textStrict name
varPrefix :: VarType -> Char
varPrefix x = case x of
LocalVar -> 'l'
GlobalVar -> 'g'
ppVarType :: VarType -> Doc
ppVarType x = case x of
LocalVar -> empty
GlobalVar -> char 'g'
ppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc
ppConvertRate out to from var = case (to, from) of
(Ar, Just Kr) -> upsamp var
(Ar, Just Ir) -> upsamp $ toK var
(Kr, Just Ar) -> downsamp var
(Kr, Just Ir) -> out $= var
(Ir, Just Ar) -> downsamp var
(Ir, Just Kr) -> out $= toI var
(Ar, Nothing) -> out $= toA var
(Kr, Nothing) -> out $= toK var
(Ir, Nothing) -> out $= toI var
(a, Just b) | a == b -> out $= var
(a, b) -> error $ "bug: no rate conversion from " ++ show b ++ " to " ++ show a ++ "."
where
upsamp x = ppOpc out "upsamp" [x]
downsamp x = ppOpc out "downsamp" [x]
toA = func "a"
toK = func "k"
toI = func "i"
-- expressions
ppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc
ppInline ppNode a = iter $ inlineExp a
where iter x = case x of
InlinePrim n -> inlineEnv a IM.! n
InlineExp op args -> ppNode op $ fmap iter args
-- booleans
ppCondOp :: CondOp -> [Doc] -> Doc
ppCondOp op = case op of
TrueOp -> const $ text "(1 == 1)"
FalseOp -> const $ text "(0 == 1)"
And -> bi "&&"
Or -> bi "||"
Equals -> bi "=="
NotEquals -> bi "!="
Less -> bi "<"
Greater -> bi ">"
LessEquals -> bi "<="
GreaterEquals -> bi ">="
where bi = binaries
-- numeric
ppNumOp :: NumOp -> [Doc] -> Doc
ppNumOp op = case op of
Add -> bi "+"
Sub -> bi "-"
Mul -> bi "*"
Div -> bi "/"
Neg -> uno "-"
Pow -> bi "^"
Mod -> bi "%"
where
bi = binaries
uno = unaries
ppRatedVar :: R.Var -> Doc
ppRatedVar v = ppRate (R.varType v) <> int (R.varId v)
ppRate :: Rate -> Doc
ppRate x = case removeArrRate x of
Sr -> char 'S'
_ -> phi x
where phi = textStrict . Text.toLower . Text.pack . show
ppTotalDur :: Double -> Doc
ppTotalDur d = text "f0" <+> double d
--------------------------------------------------------------
-- debug
newtype PrettyShowE = PrettyShowE E
newtype PrettyE = PrettyE E
instance Show PrettyShowE where
show (PrettyShowE expr) = ppShow expr
instance Show PrettyE where
show (PrettyE expr) = show $ ppE expr
ppE :: E -> Doc
ppE = foldFix go
where
go :: RatedExp Doc -> Doc
go x = fromExp (fromInfo x) x
fromInfo :: RatedExp Doc -> Doc
fromInfo RatedExp{..} =
hsep
[ ppHash ratedExpHash
, maybe mempty ppRate ratedExpRate
, maybe mempty pretty ratedExpDepends
]
ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode
fromExp :: Doc -> RatedExp Doc -> Doc
fromExp info RatedExp{..} = indent 2 $ post $
case ratedExpExp of
ExpPrim p -> ppPrim p
EmptyExp -> textStrict "EMPTY_EXPR"
Tfm inf args -> ppTfm inf args
ConvertRate to from a -> ppConvert to from a
Select r n a -> ppSelect r n a
If rate cond th el -> ppIff rate cond th el
ExpBool args -> hsep ["some bool expr", pretty $ show args]
ExpNum arg -> ppExpNum arg
InitVar v a -> ppInitVar v a
ReadVar v -> "ReadVar" <+> ppVar v
WriteVar v a -> ppVar v $= pp a
TODO
InitArr _v _size -> undefined
ReadArr _v _index -> undefined
WriteArr _v _index _ -> undefined
WriteInitArr _v _index _ -> undefined
TfmArr _isInit _v _info _args -> undefined
InitPureArr _outRate _procRate _vals -> undefined
ReadPureArr _outRate _procRate _arr _index -> undefined
IfBegin rate cond -> hsep ["IF", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
IfBlock rate cond (CodeBlock th) -> ppIfBlockBy "IF-BLOCK" rate cond th
IfElseBlock rate cond (CodeBlock th) (CodeBlock el) ->
ppFun (hsep ["IF-BLOCK", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])
[ pp th
, "ELSE-BLOCK"
, pp el
, "END-BLOCK"
]
ElseBegin -> "ELSE"
IfEnd -> "END_IF"
UntilBegin rate cond -> hsep ["UNTIL", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
UntilEnd -> "END_UNTIL"
WhileBegin rate cond -> hsep ["WHILE", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
WhileRefBegin v -> hsep ["WHILE_REF", ppVar v]
WhileEnd -> "END_WHILE"
UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy "UNTIL-BLOCK" rate cond th
WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy "WHILE-BLOCK" rate cond th
WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th
Verbatim txt -> ppFun "VERBATIM" [textStrict txt]
Starts -> "STARTS"
Seq a b -> vcat ["SEQ", pp a, pp b]
Ends a -> vcat ["ENDS", pp a]
InitMacrosInt _name _n -> undefined
InitMacrosDouble _name _d -> undefined
InitMacrosString _name _str -> undefined
ReadMacrosInt _name -> undefined
ReadMacrosDouble _name -> undefined
ReadMacrosString _name -> undefined
where
post a = hsep [hcat ["{",info, "}:"], a]
ppIfBlockBy leadTag rate cond th =
ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])
[ pp th
, "END-BLOCK"
]
ppWhileRefBlock var th =
ppFun (hsep ["WHILE-REF-BLOCK", ppVar var])
[ pp th
, "END-BLOCK"
]
ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args)
ppConvert to from a =
ppFun (hsep [textStrict "Convert-rate", ppRate to, maybe mempty ppRate from]) [pp a]
ppSelect rate n arg =
ppFun (hsep ["select", ppRate rate, pretty n]) [pp arg]
ppIff rate cond th el =
vcat
[ hsep ["if", ppRate (fromIfRate rate), ppCond $ fmap pp cond]
, indent 2 $ vcat
[ "then" <+> pp th
, "else" <+> pp el
]
]
ppExpNum (PreInline op as) = ppNumOp op (fmap pp as)
ppInitVar v a =
ppFun (hsep ["InitVar", ppVar v]) [pp a]
ppFun name args =
vcat
[ name
, indent 2 $ vcat args
]
pp = either ppPrim id . unPrimOr
| null | https://raw.githubusercontent.com/spell-music/csound-expression/345df2c91c9831dd895f58951990165598504814/csound-expression-dynamic/src/Csound/Dynamic/Render/Pretty.hs | haskell | ElseIfBegin a -> left >> (succTab $ text "elseif " <> ppCond a <> text " then")
x -> error $ "unknown expression: " ++ show x
pp macros
pp arrays
-----------------------------------
expressions
booleans
numeric
------------------------------------------------------------
debug | module Csound.Dynamic.Render.Pretty(
Doc, vcatSep,
ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur,
PrettyE(..), PrettyShowE(..),
ppE
) where
import Control.Monad.Trans.State.Strict
import qualified Data.IntMap as IM
import Text.PrettyPrint.Leijen.Text
import Csound.Dynamic.Types
import Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..))
import Data.Text (Text)
import Data.Text qualified as Text
import Text.Show.Pretty (ppShow)
import Data.Fix (foldFix)
import Data.ByteString.Base64 qualified as Base64
import Data.Text.Encoding qualified as Text
vcatSep :: [Doc] -> Doc
vcatSep = vcat . punctuate line
binaries, unaries :: Text -> [Doc] -> Doc
binaries op as = binary op (as !! 0) (as !! 1)
unaries op as = unary op (as !! 0)
binary :: Text -> Doc -> Doc -> Doc
binary op a b = parens $ a <+> textStrict op <+> b
unary :: Text -> Doc -> Doc
unary op a = parens $ textStrict op <> a
func :: Text -> Doc -> Doc
func op a = textStrict op <> parens a
ppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc
ppCsdFile flags orc sco plugins =
tag "CsoundSynthesizer" $ vcatSep [
tag "CsOptions" flags,
tag "CsInstruments" orc,
tag "CsScore" sco,
ppPlugins plugins
]
ppPlugins :: [Plugin] -> Doc
ppPlugins plugins = vcatSep $ fmap (\(Plugin name body) -> tag name (textStrict body)) plugins
tag :: Text -> Doc -> Doc
tag name content = vcatSep [
char '<' <> textStrict name <> char '>',
content,
text "</" <> textStrict name <> char '>']
ppNotes :: InstrId -> [CsdEvent] -> Doc
ppNotes instrId = vcat . fmap (ppNote instrId)
ppNote :: InstrId -> CsdEvent -> Doc
ppNote instrId evt = char 'i'
<+> ppInstrId instrId
<+> double (csdEventStart evt) <+> double (csdEventDur evt)
<+> hsep (fmap ppPrim $ csdEventContent evt)
ppPrim :: Prim -> Doc
ppPrim x = case x of
P n -> char 'p' <> int n
PrimInstrId a -> ppInstrId a
PString a -> int a
PrimInt n -> int n
PrimDouble d -> double d
PrimString s -> dquotes $ textStrict s
PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v
where
ppConverter dst src t
| dst == src = t
| dst == Ar && src == Kr = a(t)
| dst == Ar && src == Ir = a(k(t))
| dst == Kr = k(t)
| dst == Ir && src == Kr = i(t)
| dst == Ir && src == Ar = i(k(t))
| otherwise = t
where
tfm ch v = hcat [char ch, parens v]
a = tfm 'a'
k = tfm 'k'
i = tfm 'i'
ppGen :: Int -> Gen -> Doc
ppGen tabId ft = char 'f'
<> int tabId
<+> int 0
<+> (int $ genSize ft)
<+> (ppGenId $ genId ft)
<+> (maybe empty (textStrict . Text.pack . show) $ genFile ft)
<+> (hsep $ map double $ genArgs ft)
ppGenId :: GenId -> Doc
ppGenId x = case x of
IntGenId a -> int a
StringGenId a -> dquotes $ textStrict a
ppInstr :: InstrId -> Doc -> Doc
ppInstr instrId body = vcat [
text "instr" <+> ppInstrHeadId instrId,
body,
text "endin"]
ppInstrHeadId :: InstrId -> Doc
ppInstrHeadId x = case x of
InstrId den nom -> int nom <> maybe empty ppAfterDot den
InstrLabel name -> textStrict name
where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a
ppInstrId :: InstrId -> Doc
ppInstrId x = case x of
InstrId den nom -> int nom <> maybe empty ppAfterDot den
InstrLabel name -> dquotes $ textStrict name
where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a
type TabDepth = Int
ppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc
ppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr)
maybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc)
maybeStringCopy outs expr = case (outs, expr) of
([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)
([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)
([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a)
([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as)
([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a)
_ -> Nothing
ppStringCopy :: Doc -> Doc -> Doc
ppStringCopy outs src = ppOpc outs "strcpyk" [src]
ppExp :: Doc -> Exp R.Var -> State TabDepth Doc
ppExp res expr = case fmap ppPrimOrVar expr of
ExpPrim (PString n) -> tab $ ppStrget res n
ExpPrim p -> tab $ res $= ppPrim p
Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b
Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs
Tfm info xs -> tab $ ppOpc res (infoName info) xs
ConvertRate to from x -> tab $ ppConvertRate res to from x
If _ifRate info t e -> tab $ ppIf res (ppCond info) t e
ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as
WriteVar v a -> tab $ ppVar v $= a
InitVar v a -> tab $ ppOpc (ppVar v) "init" [a]
ReadVar v -> tab $ res $= ppVar v
InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) "init" as
ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text "strcpy" <+> ppReadArr v as
WriteArr v as b -> tab $ ppWriteArr v as b
WriteInitArr v as b -> tab $ ppWriteInitArr v as b
TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b
TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args
TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs
InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) "fillarray" initVals
ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text "strcpy" <+> ppReadPureArr arr [index]
IfBegin _ a -> succTab $ text "if " <> ppCond a <> text " then"
IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th
IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el
ElseBegin -> left >> (succTab $ text "else")
IfEnd -> left >> (tab $ text "endif")
UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th
WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th
WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th
UntilBegin _ a -> succTab $ text "until " <> ppCond a <> text " do"
UntilEnd -> left >> (tab $ text "od")
WhileBegin _ a -> succTab $ text "while " <> ppCond a <> text " do"
WhileRefBegin var -> succTab $ text "while " <> ppVar var <+> equals <+> text "1" <+> text "do"
WhileEnd -> left >> (tab $ text "od")
InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue)
InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue)
InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue)
ReadMacrosString name -> tab $ res <+> text "strcpy" <+> readMacro name
ReadMacrosDouble name -> tab $ res $= readMacro name
ReadMacrosInt name -> tab $ res $= readMacro name
EmptyExp -> return empty
Verbatim str -> return $ textStrict str
Select _rate _n a -> tab $ res $= ("SELECTS" <+> a)
Starts -> tab $ res $= "STARTS"
Seq a b -> tab $ hsep ["SEQ", a, b]
Ends _a -> tab $ "ENDS"
ExpBool _ -> tab "ExpBool"
readMacro :: Text -> Doc
readMacro name = char '$' <> textStrict name
initMacros :: Doc -> Doc -> Doc
initMacros name initValue = vcat
[ text "#ifndef" <+> name
, text "#define " <+> name <+> char '#' <> initValue <> char '#'
, text "#end"
]
ppTfmArrOut :: Bool -> Var -> Doc
ppTfmArrOut isInit v = ppVar v <> (if isInit then (text "[]") else empty)
ppArrIndex :: Var -> [Doc] -> Doc
ppArrIndex v as = ppVar v <> (hcat $ fmap brackets as)
ppArrVar :: Int -> Doc -> Doc
ppArrVar n v = v <> (hcat $ replicate n $ text "[]")
ppReadArr :: Var -> [Doc] -> Doc
ppReadArr v as = ppArrIndex v as
ppReadPureArr :: Doc -> [Doc] -> Doc
ppReadPureArr v as = v <> (hcat $ fmap brackets as)
ppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc
ppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b
where equalsWord = if (varRate v == Sr) then text "strcpy" else equals
ppWriteInitArr :: Var -> [Doc] -> Doc -> Doc
ppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b
where initWord = text $ if (varRate v == Sr) then "strcpy" else "init"
tab :: Monad m => Doc -> StateT TabDepth m Doc
tab doc = fmap (shiftByTab doc) get
tabWidth :: TabDepth
tabWidth = 4
shiftByTab :: Doc -> TabDepth -> Doc
shiftByTab doc n
| n == 0 = doc
| otherwise = indent (tabWidth * n) doc
left :: State TabDepth ()
left = modify pred
succTab :: Monad m => Doc -> StateT TabDepth m Doc
succTab doc = do
a <- tab doc
modify succ
return a
prefix :: Text -> [Doc] -> Doc
prefix name args = textStrict name <> tupled args
ppCond :: Inline CondOp Doc -> Doc
ppCond = ppInline ppCondOp
($=) :: Doc -> Doc -> Doc
($=) a b = a <+> equals <+> b
ppOuts :: [R.Var] -> Doc
ppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs
ppPrimOrVar :: PrimOr R.Var -> Doc
ppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x
ppStrget :: Doc -> Int -> Doc
ppStrget out n = ppOpc out "strget" [char 'p' <> int n]
ppIf :: Doc -> Doc -> Doc -> Doc -> Doc
ppIf res p t e = vcat
[ text "if" <+> p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "else"
, text " " <> res <+> char '=' <+> e
, text "endif"
]
ppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc
ppIf1 = ppIfBy "if"
ppWhile = ppIfBy "while"
ppUntil = ppIfBy "until"
ppIfBy :: Text -> Doc -> Doc -> Doc -> Doc
ppIfBy leadTag res p t = vcat
[ textStrict leadTag <+> p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "endif"
]
ppWhileRef :: Doc -> Var -> Doc -> Doc
ppWhileRef res p t = vcat
[ textStrict "while" <+> ppVar p <+> text "then"
, text " " <> res <+> char '=' <+> t
, text "endif"
]
ppOpc :: Doc -> Text -> [Doc] -> Doc
ppOpc out name xs = out <+> ppProc name xs
ppProc :: Text -> [Doc] -> Doc
ppProc name xs = textStrict name <+> (hsep $ punctuate comma xs)
ppVar :: Var -> Doc
ppVar v = case v of
Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name)
VarVerbatim _ name -> textStrict name
varPrefix :: VarType -> Char
varPrefix x = case x of
LocalVar -> 'l'
GlobalVar -> 'g'
ppVarType :: VarType -> Doc
ppVarType x = case x of
LocalVar -> empty
GlobalVar -> char 'g'
ppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc
ppConvertRate out to from var = case (to, from) of
(Ar, Just Kr) -> upsamp var
(Ar, Just Ir) -> upsamp $ toK var
(Kr, Just Ar) -> downsamp var
(Kr, Just Ir) -> out $= var
(Ir, Just Ar) -> downsamp var
(Ir, Just Kr) -> out $= toI var
(Ar, Nothing) -> out $= toA var
(Kr, Nothing) -> out $= toK var
(Ir, Nothing) -> out $= toI var
(a, Just b) | a == b -> out $= var
(a, b) -> error $ "bug: no rate conversion from " ++ show b ++ " to " ++ show a ++ "."
where
upsamp x = ppOpc out "upsamp" [x]
downsamp x = ppOpc out "downsamp" [x]
toA = func "a"
toK = func "k"
toI = func "i"
ppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc
ppInline ppNode a = iter $ inlineExp a
where iter x = case x of
InlinePrim n -> inlineEnv a IM.! n
InlineExp op args -> ppNode op $ fmap iter args
ppCondOp :: CondOp -> [Doc] -> Doc
ppCondOp op = case op of
TrueOp -> const $ text "(1 == 1)"
FalseOp -> const $ text "(0 == 1)"
And -> bi "&&"
Or -> bi "||"
Equals -> bi "=="
NotEquals -> bi "!="
Less -> bi "<"
Greater -> bi ">"
LessEquals -> bi "<="
GreaterEquals -> bi ">="
where bi = binaries
ppNumOp :: NumOp -> [Doc] -> Doc
ppNumOp op = case op of
Add -> bi "+"
Sub -> bi "-"
Mul -> bi "*"
Div -> bi "/"
Neg -> uno "-"
Pow -> bi "^"
Mod -> bi "%"
where
bi = binaries
uno = unaries
ppRatedVar :: R.Var -> Doc
ppRatedVar v = ppRate (R.varType v) <> int (R.varId v)
ppRate :: Rate -> Doc
ppRate x = case removeArrRate x of
Sr -> char 'S'
_ -> phi x
where phi = textStrict . Text.toLower . Text.pack . show
ppTotalDur :: Double -> Doc
ppTotalDur d = text "f0" <+> double d
newtype PrettyShowE = PrettyShowE E
newtype PrettyE = PrettyE E
instance Show PrettyShowE where
show (PrettyShowE expr) = ppShow expr
instance Show PrettyE where
show (PrettyE expr) = show $ ppE expr
ppE :: E -> Doc
ppE = foldFix go
where
go :: RatedExp Doc -> Doc
go x = fromExp (fromInfo x) x
fromInfo :: RatedExp Doc -> Doc
fromInfo RatedExp{..} =
hsep
[ ppHash ratedExpHash
, maybe mempty ppRate ratedExpRate
, maybe mempty pretty ratedExpDepends
]
ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode
fromExp :: Doc -> RatedExp Doc -> Doc
fromExp info RatedExp{..} = indent 2 $ post $
case ratedExpExp of
ExpPrim p -> ppPrim p
EmptyExp -> textStrict "EMPTY_EXPR"
Tfm inf args -> ppTfm inf args
ConvertRate to from a -> ppConvert to from a
Select r n a -> ppSelect r n a
If rate cond th el -> ppIff rate cond th el
ExpBool args -> hsep ["some bool expr", pretty $ show args]
ExpNum arg -> ppExpNum arg
InitVar v a -> ppInitVar v a
ReadVar v -> "ReadVar" <+> ppVar v
WriteVar v a -> ppVar v $= pp a
TODO
InitArr _v _size -> undefined
ReadArr _v _index -> undefined
WriteArr _v _index _ -> undefined
WriteInitArr _v _index _ -> undefined
TfmArr _isInit _v _info _args -> undefined
InitPureArr _outRate _procRate _vals -> undefined
ReadPureArr _outRate _procRate _arr _index -> undefined
IfBegin rate cond -> hsep ["IF", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
IfBlock rate cond (CodeBlock th) -> ppIfBlockBy "IF-BLOCK" rate cond th
IfElseBlock rate cond (CodeBlock th) (CodeBlock el) ->
ppFun (hsep ["IF-BLOCK", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])
[ pp th
, "ELSE-BLOCK"
, pp el
, "END-BLOCK"
]
ElseBegin -> "ELSE"
IfEnd -> "END_IF"
UntilBegin rate cond -> hsep ["UNTIL", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
UntilEnd -> "END_UNTIL"
WhileBegin rate cond -> hsep ["WHILE", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"]
WhileRefBegin v -> hsep ["WHILE_REF", ppVar v]
WhileEnd -> "END_WHILE"
UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy "UNTIL-BLOCK" rate cond th
WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy "WHILE-BLOCK" rate cond th
WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th
Verbatim txt -> ppFun "VERBATIM" [textStrict txt]
Starts -> "STARTS"
Seq a b -> vcat ["SEQ", pp a, pp b]
Ends a -> vcat ["ENDS", pp a]
InitMacrosInt _name _n -> undefined
InitMacrosDouble _name _d -> undefined
InitMacrosString _name _str -> undefined
ReadMacrosInt _name -> undefined
ReadMacrosDouble _name -> undefined
ReadMacrosString _name -> undefined
where
post a = hsep [hcat ["{",info, "}:"], a]
ppIfBlockBy leadTag rate cond th =
ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])
[ pp th
, "END-BLOCK"
]
ppWhileRefBlock var th =
ppFun (hsep ["WHILE-REF-BLOCK", ppVar var])
[ pp th
, "END-BLOCK"
]
ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args)
ppConvert to from a =
ppFun (hsep [textStrict "Convert-rate", ppRate to, maybe mempty ppRate from]) [pp a]
ppSelect rate n arg =
ppFun (hsep ["select", ppRate rate, pretty n]) [pp arg]
ppIff rate cond th el =
vcat
[ hsep ["if", ppRate (fromIfRate rate), ppCond $ fmap pp cond]
, indent 2 $ vcat
[ "then" <+> pp th
, "else" <+> pp el
]
]
ppExpNum (PreInline op as) = ppNumOp op (fmap pp as)
ppInitVar v a =
ppFun (hsep ["InitVar", ppVar v]) [pp a]
ppFun name args =
vcat
[ name
, indent 2 $ vcat args
]
pp = either ppPrim id . unPrimOr
|
3e249bf493c1ef45a931b1c5e58252c2274a7b8ef6ee58e09ad1ff3a8a392510 | clash-lang/clash-compiler | PatError.hs | module PatError where
import Prelude
topEntity :: Maybe Int -> Int
topEntity (Just x) = x
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/Basic/PatError.hs | haskell | module PatError where
import Prelude
topEntity :: Maybe Int -> Int
topEntity (Just x) = x
|
|
1183936d161e464928944a8c3599ab205acb38e27c191c9a3312d98b1103bce9 | klajo/hacks | beam_renamer_tests.erl | %%%-------------------------------------------------------------------
%%% @doc Test {@link beam_renamer}.
@author ( )
%%% @end
%%%-------------------------------------------------------------------
-module(beam_renamer_tests).
-include_lib("eunit/include/eunit.hrl").
replaces_in_atom_table_test() ->
'x^' = run_literal(x, 'x^', x).
replaces_in_constant_pool_test() ->
['x^'] = run_literal(x, 'x^', [x]),
['x^', 'x^'] = run_literal(x, 'x^', [x, x]),
{'x^', 'x^'} = run_literal(x, 'x^', {x, x}),
{[{'x^'}]} = run_literal(x, 'x^', {[{x}]}).
run_literal(Name0, Name, Term) ->
run_with_renamed_module(
fun() -> Name:f() end,
mk_module(Name0, [erl_syntax:abstract(Term)]),
Name).
run_with_renamed_module(Fun, BeamBin, Name) ->
Bin = beam_renamer:rename(BeamBin, Name),
unload_module(Name),
{module, _} = code:load_binary(Name, "dummy.beam", Bin),
try Fun()
after unload_module(Name)
end.
unload_module(ModName) ->
code:purge(ModName),
code:delete(ModName).
mk_module(ModName, FuncBody) ->
{ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)),
Bin.
mk_module_forms(ModName, FuncBody) ->
erl_syntax:revert_forms(
[erl_syntax:attribute(
erl_syntax:atom(module),
[erl_syntax:atom(ModName)]),
erl_syntax:attribute(
erl_syntax:atom(compile),
[erl_syntax:atom(export_all)]),
erl_syntax:function(
erl_syntax:atom(f),
[erl_syntax:clause([], FuncBody)])]).
| null | https://raw.githubusercontent.com/klajo/hacks/80afdad130b9b914d410cb382ebb1b6ee1236e94/beam/test/beam_renamer_tests.erl | erlang | -------------------------------------------------------------------
@doc Test {@link beam_renamer}.
@end
------------------------------------------------------------------- | @author ( )
-module(beam_renamer_tests).
-include_lib("eunit/include/eunit.hrl").
replaces_in_atom_table_test() ->
'x^' = run_literal(x, 'x^', x).
replaces_in_constant_pool_test() ->
['x^'] = run_literal(x, 'x^', [x]),
['x^', 'x^'] = run_literal(x, 'x^', [x, x]),
{'x^', 'x^'} = run_literal(x, 'x^', {x, x}),
{[{'x^'}]} = run_literal(x, 'x^', {[{x}]}).
run_literal(Name0, Name, Term) ->
run_with_renamed_module(
fun() -> Name:f() end,
mk_module(Name0, [erl_syntax:abstract(Term)]),
Name).
run_with_renamed_module(Fun, BeamBin, Name) ->
Bin = beam_renamer:rename(BeamBin, Name),
unload_module(Name),
{module, _} = code:load_binary(Name, "dummy.beam", Bin),
try Fun()
after unload_module(Name)
end.
unload_module(ModName) ->
code:purge(ModName),
code:delete(ModName).
mk_module(ModName, FuncBody) ->
{ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)),
Bin.
mk_module_forms(ModName, FuncBody) ->
erl_syntax:revert_forms(
[erl_syntax:attribute(
erl_syntax:atom(module),
[erl_syntax:atom(ModName)]),
erl_syntax:attribute(
erl_syntax:atom(compile),
[erl_syntax:atom(export_all)]),
erl_syntax:function(
erl_syntax:atom(f),
[erl_syntax:clause([], FuncBody)])]).
|
3e8a841b1a590d0222f030e46885e26f247f3f9885e2f2c7d02e171facb46a0e | Lysxia/generic-data | Prelude.hs | # LANGUAGE FlexibleContexts #
-- | Generic deriving for standard classes in base
--
-- === Warning
--
-- This is an internal module: it is not subject to any versioning policy,
-- breaking changes can happen at any time.
--
-- If something here seems useful, please report it or create a pull request to
-- export it from an external module.
module Generic.Data.Internal.Prelude where
import Control.Applicative (liftA2, Alternative(..))
import Data.Function (on)
import Data.Functor.Classes
import Data.Semigroup
import GHC.Generics
import Generic.Data.Internal.Utils (from', to', liftG2)
* ' '
| Generic
--
-- @
instance ' ' MyType where
-- ('==') = 'geq'
-- @
geq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool
geq = (==) `on` from'
* ' '
-- | Generic 'compare'.
--
-- @
instance ' ' where
-- 'compare' = 'gcompare'
-- @
gcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering
gcompare = compare `on` from'
-- * 'Semigroup'
-- | Generic @('<>')@ (or 'mappend').
--
-- @
instance ' Semigroup ' where
-- ('<>') = 'gmappend'
-- @
--
-- See also 'gmempty'.
gmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a
gmappend = \a b -> to (from' a <> from' b)
-- * 'Monoid'
| Generic ' ' .
--
-- @
-- instance 'Monoid' MyType where
' ' = ' gmempty '
-- @
gmempty :: (Generic a, Monoid (Rep a ())) => a
gmempty = to' mempty
-- | Generic @('<>')@ (or @'mappend'@).
--
-- The difference from `gmappend' is the 'Monoid' constraint instead of
-- 'Semigroup', for older versions of base where 'Semigroup' is not a
-- superclass of 'Monoid'.
gmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a
gmappend' = \a b -> to (from' a `mappend` from' b)
-- * 'Functor'
-- | Generic 'fmap'.
--
-- @
instance ' Functor ' where
-- 'fmap' = 'gfmap'
-- @
gfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b
gfmap = \f -> to1 . fmap f . from1
-- | Generic @('<$')@.
--
-- See also 'gfmap'.
gconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a
gconstmap = \a -> to1 . (a <$) . from1
-- * 'Applicative'
-- | Generic 'pure'.
--
-- @
instance ' Applicative ' where
-- 'pure' = 'gpure'
-- ('<*>') = 'gap'
-- @
gpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a
gpure = to1 . pure
-- | Generic @('<*>')@ (or 'Control.Monad.ap').
--
-- See also 'gpure'.
gap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b
gap = liftG2 (<*>)
-- | Generic 'liftA2'.
--
-- See also 'gpure'.
gliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c
gliftA2 = liftG2 . liftA2
-- * 'Alternative'
-- | Generic 'empty'.
--
-- @
instance ' Alternative ' where
-- 'empty' = 'gempty'
-- ('<|>') = 'galt'
-- @
gempty :: (Generic1 f, Alternative (Rep1 f)) => f a
gempty = to1 empty
-- | Generic ('<|>').
--
-- See also 'gempty'.
galt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a
galt = liftG2 (<|>)
-- * 'Foldable'
-- | Generic 'foldMap'.
--
-- @
instance ' Foldable ' where
-- 'foldMap' = 'gfoldMap'
-- @
--
-- This is deprecated but kept around just for reference.
# DEPRECATED gfoldMap " This definition has been replaced with ' Generic . Data . ' . " #
gfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m
gfoldMap = \f -> foldMap f . from1
-- | Generic 'foldr'.
--
-- @
instance ' Foldable ' where
-- 'foldr' = 'gfoldr'
-- @
--
-- See also 'gfoldMap'.
gfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b
gfoldr = \f b -> foldr f b . from1
-- Note: this one is not deprecated because inlining Just Works.
* ' '
-- | Generic 'traverse'.
--
-- @
instance ' ' where
-- 'traverse' = 'gtraverse'
-- @
--
-- This is deprecated but kept around just for reference.
{-# DEPRECATED gtraverse "This definition has been replaced with 'Generic.Data.Internal.gtraverse'." #-}
gtraverse
:: (Generic1 f, Traversable (Rep1 f), Applicative m)
=> (a -> m b) -> f a -> m (f b)
gtraverse = \f -> fmap to1 . traverse f . from1
-- | Generic 'sequenceA'.
--
-- @
instance ' ' where
-- 'sequenceA' = 'gsequenceA'
-- @
--
-- See also 'gtraverse'.
--
-- This is deprecated but kept around just for reference.
{-# DEPRECATED gsequenceA "This definition has been replaced with 'Generic.Data.Internal.gsequenceA'." #-}
gsequenceA
:: (Generic1 f, Traversable (Rep1 f), Applicative m)
=> f (m a) -> m (f a)
gsequenceA = fmap to1 . sequenceA . from1
* ' Eq1 '
-- | Generic 'liftEq'.
gliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool
gliftEq = \(==.) a b -> liftEq (==.) (from1 a) (from1 b)
-- * 'Ord1'
-- | Generic 'liftCompare'.
gliftCompare
:: (Generic1 f, Ord1 (Rep1 f))
=> (a -> b -> Ordering) -> f a -> f b -> Ordering
gliftCompare = \compare' a b -> liftCompare compare' (from1 a) (from1 b)
| null | https://raw.githubusercontent.com/Lysxia/generic-data/846fafb9ec1e4e60424e4f266451665fe25fdfa9/src/Generic/Data/Internal/Prelude.hs | haskell | | Generic deriving for standard classes in base
=== Warning
This is an internal module: it is not subject to any versioning policy,
breaking changes can happen at any time.
If something here seems useful, please report it or create a pull request to
export it from an external module.
@
('==') = 'geq'
@
| Generic 'compare'.
@
'compare' = 'gcompare'
@
* 'Semigroup'
| Generic @('<>')@ (or 'mappend').
@
('<>') = 'gmappend'
@
See also 'gmempty'.
* 'Monoid'
@
instance 'Monoid' MyType where
@
| Generic @('<>')@ (or @'mappend'@).
The difference from `gmappend' is the 'Monoid' constraint instead of
'Semigroup', for older versions of base where 'Semigroup' is not a
superclass of 'Monoid'.
* 'Functor'
| Generic 'fmap'.
@
'fmap' = 'gfmap'
@
| Generic @('<$')@.
See also 'gfmap'.
* 'Applicative'
| Generic 'pure'.
@
'pure' = 'gpure'
('<*>') = 'gap'
@
| Generic @('<*>')@ (or 'Control.Monad.ap').
See also 'gpure'.
| Generic 'liftA2'.
See also 'gpure'.
* 'Alternative'
| Generic 'empty'.
@
'empty' = 'gempty'
('<|>') = 'galt'
@
| Generic ('<|>').
See also 'gempty'.
* 'Foldable'
| Generic 'foldMap'.
@
'foldMap' = 'gfoldMap'
@
This is deprecated but kept around just for reference.
| Generic 'foldr'.
@
'foldr' = 'gfoldr'
@
See also 'gfoldMap'.
Note: this one is not deprecated because inlining Just Works.
| Generic 'traverse'.
@
'traverse' = 'gtraverse'
@
This is deprecated but kept around just for reference.
# DEPRECATED gtraverse "This definition has been replaced with 'Generic.Data.Internal.gtraverse'." #
| Generic 'sequenceA'.
@
'sequenceA' = 'gsequenceA'
@
See also 'gtraverse'.
This is deprecated but kept around just for reference.
# DEPRECATED gsequenceA "This definition has been replaced with 'Generic.Data.Internal.gsequenceA'." #
| Generic 'liftEq'.
* 'Ord1'
| Generic 'liftCompare'. | # LANGUAGE FlexibleContexts #
module Generic.Data.Internal.Prelude where
import Control.Applicative (liftA2, Alternative(..))
import Data.Function (on)
import Data.Functor.Classes
import Data.Semigroup
import GHC.Generics
import Generic.Data.Internal.Utils (from', to', liftG2)
* ' '
| Generic
instance ' ' MyType where
geq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool
geq = (==) `on` from'
* ' '
instance ' ' where
gcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering
gcompare = compare `on` from'
instance ' Semigroup ' where
gmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a
gmappend = \a b -> to (from' a <> from' b)
| Generic ' ' .
' ' = ' gmempty '
gmempty :: (Generic a, Monoid (Rep a ())) => a
gmempty = to' mempty
gmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a
gmappend' = \a b -> to (from' a `mappend` from' b)
instance ' Functor ' where
gfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b
gfmap = \f -> to1 . fmap f . from1
gconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a
gconstmap = \a -> to1 . (a <$) . from1
instance ' Applicative ' where
gpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a
gpure = to1 . pure
gap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b
gap = liftG2 (<*>)
gliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c
gliftA2 = liftG2 . liftA2
instance ' Alternative ' where
gempty :: (Generic1 f, Alternative (Rep1 f)) => f a
gempty = to1 empty
galt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a
galt = liftG2 (<|>)
instance ' Foldable ' where
# DEPRECATED gfoldMap " This definition has been replaced with ' Generic . Data . ' . " #
gfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m
gfoldMap = \f -> foldMap f . from1
instance ' Foldable ' where
gfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b
gfoldr = \f b -> foldr f b . from1
* ' '
instance ' ' where
gtraverse
:: (Generic1 f, Traversable (Rep1 f), Applicative m)
=> (a -> m b) -> f a -> m (f b)
gtraverse = \f -> fmap to1 . traverse f . from1
instance ' ' where
gsequenceA
:: (Generic1 f, Traversable (Rep1 f), Applicative m)
=> f (m a) -> m (f a)
gsequenceA = fmap to1 . sequenceA . from1
* ' Eq1 '
gliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool
gliftEq = \(==.) a b -> liftEq (==.) (from1 a) (from1 b)
gliftCompare
:: (Generic1 f, Ord1 (Rep1 f))
=> (a -> b -> Ordering) -> f a -> f b -> Ordering
gliftCompare = \compare' a b -> liftCompare compare' (from1 a) (from1 b)
|
b3a1dcdaf55a70a28bc1b23a9a68ef333dda5c9779984e25eb61316b079a3c7e | williamleferrand/accretio | core_invite.ml | (*
* core - invite
*
*
*
*)
open Lwt
open Printf
open CalendarLib
open Api
open Eliom_content.Html5
open Eliom_content.Html5.D
open Message_parsers
let has_already_declined = sprintf "core-invite-has-already-declined-%d"
let tag_timer_reminder = sprintf "core-invite-reminded-%d"
let key_email_anchor = sprintf "core-invite-anchor-%d"
let invite context message =
lwt content = context.get_message_content ~message in
let emails = Ys_email.get_all_emails content in
lwt supervisor = $society(context.society)->leader in
lwt supervisor_name = $member(supervisor)->name in
lwt already_members, already_declined, invited =
Lwt_list.fold_left_s
(fun (already_members, already_declined, invited) email ->
context.log_info "inviting member with email %s to society %d" email context.society ;
lwt member =
match_lwt Object_member.Store.find_by_email email with
| Some uid -> return uid
| None ->
match_lwt Object_member.Store.create
~preferred_email:email
~emails:[ email ]
() with
| `Object_already_exists (_, uid) -> return uid
| `Object_created member -> return member.Object_member.uid
in
match_lwt context.is_member ~member with
true -> return ((member, email) :: already_members, already_declined, invited)
| false ->
(* check if the member hasn't declined already *)
match_lwt context.get ~key:(has_already_declined member) with
Some _ -> return (already_members, (member, email) :: already_declined, invited)
| None ->
lwt _ =
match_lwt
context.message_member
~member
~subject:context.society_name
~content:[
pcdata "Greetings," ; br () ;
br () ;
pcdata "I'm running a group called " ; i [ pcdata context.society_name ] ; pcdata ". "; pcdata context.society_description ; br ();
br () ;
pcdata "Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email." ; br () ;
br () ;
pcdata "Looking forward to hearing from you," ; br () ;
br () ;
pcdata supervisor_name ;
]
() with
None -> return_unit
| Some message_id ->
context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id)
in
lwt _ =
context.set_timer
~label:(tag_timer_reminder member)
~duration:(Calendar.Period.lmake ~hour:26 ())
(`RemindMember member)
in
return (already_members, already_declined, ((member, email) :: invited)))
([], [], [])
emails
in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Great. Here is what I did:" ; br () ;
br () ;
pcdata "Already members:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ;
br () ;
pcdata "Already declined:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ;
br () ;
pcdata "Invited:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ;
br () ;
pcdata "Let's see what comes back!"
]
()
in
return `None
let remind context member =
context.log_info "sending reminder to member %d" member ;
lwt _ =
context.cancel_timers ~query:(tag_timer_reminder member)
in
match_lwt context.get ~key:(key_email_anchor member) with
None ->
lwt _ =
context.message_member
~member
~subject:context.society_name
~content:[
pcdata "My apologies for the reminder, but maybe have you missed my previous email." ; br () ;
br () ;
pcdata "Would you be interested in hearing more about our " ; i [ pcdata context.society_name ] ; pcdata " group?" ;
]
()
in
return `None
| Some message ->
let message = Ys_uid.of_string message in
lwt _ =
context.reply_to
~message
~content:[
pcdata "My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?" ;
]
()
in
return `None
let accepted context message =
lwt member = context.get_message_sender ~message in
context.log_info "adding member %d to the society" member ;
lwt _ = context.add_member ~member in
lwt _ =
context.cancel_timers ~query:(tag_timer_reminder member)
in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Great, I added you to the list of participants, stay tuned!" ; br ()
]
()
in
return `None
let declined context message =
lwt member = context.get_message_sender ~message in
context.log_info "removing member %d to the society" member ;
lwt _ = context.remove_member ~member in
lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in
lwt _ = context.set ~key:(has_already_declined member) ~value:"true" in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Ok!" ; pcdata " If you change you mind later, don't hesitate to be get back in touch!" ; br ()
]
()
in
return `None
let initialize_invites context () =
lwt _ =
context.message_supervisor
~subject:"Who do you want to invite?"
~content:[
pcdata "Greetings," ; br () ;
br () ;
pcdata "Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with" ; br ()
]
()
in
return `None
COMPONENT
*initialize_invites<forward> ~> `Message of email ~> invite
remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted
-invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted
invite ~> `Accepted of email ~> accepted
invite ~> `Declined of email ~> declined
| null | https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/playbooks/core_invite.ml | ocaml |
* core - invite
*
*
*
check if the member hasn't declined already |
open Lwt
open Printf
open CalendarLib
open Api
open Eliom_content.Html5
open Eliom_content.Html5.D
open Message_parsers
let has_already_declined = sprintf "core-invite-has-already-declined-%d"
let tag_timer_reminder = sprintf "core-invite-reminded-%d"
let key_email_anchor = sprintf "core-invite-anchor-%d"
let invite context message =
lwt content = context.get_message_content ~message in
let emails = Ys_email.get_all_emails content in
lwt supervisor = $society(context.society)->leader in
lwt supervisor_name = $member(supervisor)->name in
lwt already_members, already_declined, invited =
Lwt_list.fold_left_s
(fun (already_members, already_declined, invited) email ->
context.log_info "inviting member with email %s to society %d" email context.society ;
lwt member =
match_lwt Object_member.Store.find_by_email email with
| Some uid -> return uid
| None ->
match_lwt Object_member.Store.create
~preferred_email:email
~emails:[ email ]
() with
| `Object_already_exists (_, uid) -> return uid
| `Object_created member -> return member.Object_member.uid
in
match_lwt context.is_member ~member with
true -> return ((member, email) :: already_members, already_declined, invited)
| false ->
match_lwt context.get ~key:(has_already_declined member) with
Some _ -> return (already_members, (member, email) :: already_declined, invited)
| None ->
lwt _ =
match_lwt
context.message_member
~member
~subject:context.society_name
~content:[
pcdata "Greetings," ; br () ;
br () ;
pcdata "I'm running a group called " ; i [ pcdata context.society_name ] ; pcdata ". "; pcdata context.society_description ; br ();
br () ;
pcdata "Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email." ; br () ;
br () ;
pcdata "Looking forward to hearing from you," ; br () ;
br () ;
pcdata supervisor_name ;
]
() with
None -> return_unit
| Some message_id ->
context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id)
in
lwt _ =
context.set_timer
~label:(tag_timer_reminder member)
~duration:(Calendar.Period.lmake ~hour:26 ())
(`RemindMember member)
in
return (already_members, already_declined, ((member, email) :: invited)))
([], [], [])
emails
in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Great. Here is what I did:" ; br () ;
br () ;
pcdata "Already members:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ;
br () ;
pcdata "Already declined:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ;
br () ;
pcdata "Invited:" ;
ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ;
br () ;
pcdata "Let's see what comes back!"
]
()
in
return `None
let remind context member =
context.log_info "sending reminder to member %d" member ;
lwt _ =
context.cancel_timers ~query:(tag_timer_reminder member)
in
match_lwt context.get ~key:(key_email_anchor member) with
None ->
lwt _ =
context.message_member
~member
~subject:context.society_name
~content:[
pcdata "My apologies for the reminder, but maybe have you missed my previous email." ; br () ;
br () ;
pcdata "Would you be interested in hearing more about our " ; i [ pcdata context.society_name ] ; pcdata " group?" ;
]
()
in
return `None
| Some message ->
let message = Ys_uid.of_string message in
lwt _ =
context.reply_to
~message
~content:[
pcdata "My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?" ;
]
()
in
return `None
let accepted context message =
lwt member = context.get_message_sender ~message in
context.log_info "adding member %d to the society" member ;
lwt _ = context.add_member ~member in
lwt _ =
context.cancel_timers ~query:(tag_timer_reminder member)
in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Great, I added you to the list of participants, stay tuned!" ; br ()
]
()
in
return `None
let declined context message =
lwt member = context.get_message_sender ~message in
context.log_info "removing member %d to the society" member ;
lwt _ = context.remove_member ~member in
lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in
lwt _ = context.set ~key:(has_already_declined member) ~value:"true" in
lwt _ =
context.reply_to
~message
~content:[
pcdata "Ok!" ; pcdata " If you change you mind later, don't hesitate to be get back in touch!" ; br ()
]
()
in
return `None
let initialize_invites context () =
lwt _ =
context.message_supervisor
~subject:"Who do you want to invite?"
~content:[
pcdata "Greetings," ; br () ;
br () ;
pcdata "Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with" ; br ()
]
()
in
return `None
COMPONENT
*initialize_invites<forward> ~> `Message of email ~> invite
remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted
-invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted
invite ~> `Accepted of email ~> accepted
invite ~> `Declined of email ~> declined
|
6b79bf8204c2560a98fd1b57438c1c67a852a1f6bdfc379cf3ec924ba2c70262 | LaurentRDC/pandoc-plot | Prelude.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : $header$
Copyright : ( c ) , 2019 - present
License : GNU GPL , version 2 or above
-- Maintainer :
-- Stability : internal
-- Portability : portable
--
-- Prelude for renderers, containing some helpful utilities.
module Text.Pandoc.Filter.Plot.Renderers.Prelude
( module Prelude,
module Text.Pandoc.Filter.Plot.Monad,
Text,
st,
unpack,
findExecutable,
appendCapture,
toRPath,
)
where
import Data.Text (Text, unpack)
import System.Directory (findExecutable)
import System.FilePath (isPathSeparator)
import Text.Pandoc.Filter.Plot.Monad
import Text.Shakespeare.Text (st)
-- | A shortcut to append capture script fragments to scripts
appendCapture ::
(FigureSpec -> FilePath -> Script) ->
FigureSpec ->
FilePath ->
Script
appendCapture f s fp = mconcat [script s, "\n", f s fp]
-- | R paths use the '/' path separator
toRPath :: FilePath -> FilePath
toRPath = fmap (\c -> if isPathSeparator c then '/' else c)
| null | https://raw.githubusercontent.com/LaurentRDC/pandoc-plot/933daba593196bf3b1ae1f2022d17389552f275c/src/Text/Pandoc/Filter/Plot/Renderers/Prelude.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module : $header$
Maintainer :
Stability : internal
Portability : portable
Prelude for renderers, containing some helpful utilities.
| A shortcut to append capture script fragments to scripts
| R paths use the '/' path separator |
Copyright : ( c ) , 2019 - present
License : GNU GPL , version 2 or above
module Text.Pandoc.Filter.Plot.Renderers.Prelude
( module Prelude,
module Text.Pandoc.Filter.Plot.Monad,
Text,
st,
unpack,
findExecutable,
appendCapture,
toRPath,
)
where
import Data.Text (Text, unpack)
import System.Directory (findExecutable)
import System.FilePath (isPathSeparator)
import Text.Pandoc.Filter.Plot.Monad
import Text.Shakespeare.Text (st)
appendCapture ::
(FigureSpec -> FilePath -> Script) ->
FigureSpec ->
FilePath ->
Script
appendCapture f s fp = mconcat [script s, "\n", f s fp]
toRPath :: FilePath -> FilePath
toRPath = fmap (\c -> if isPathSeparator c then '/' else c)
|
eb2259778274d44093ba70d8ba8192f57d237cb82c1d2726aae5f7cd0b2b8a8f | rob7hunter/leftparen | loc.scm | ;; how much code have you written?
#lang scheme/base
(require "util.scm")
(provide loc)
;; counts all lines except for comment lines and blank lines
(define (loc #:comment-chars (comment-chars (list #\;)) . filenames)
(fold + 0
(map (lambda (filename)
(file-line-fold
(lambda (line-str total-loc)
(let ((trimmed (string-trim-both line-str #\space)))
(cond ((string=? trimmed "") total-loc)
((memq (string-ref trimmed 0) comment-chars) total-loc)
(else (+ 1 total-loc)))))
0
filename))
filenames)))
| null | https://raw.githubusercontent.com/rob7hunter/leftparen/169c896bda989b6a049fe49253a04d6f8b62402b/loc.scm | scheme | how much code have you written?
counts all lines except for comment lines and blank lines
)) . filenames) | #lang scheme/base
(require "util.scm")
(provide loc)
(fold + 0
(map (lambda (filename)
(file-line-fold
(lambda (line-str total-loc)
(let ((trimmed (string-trim-both line-str #\space)))
(cond ((string=? trimmed "") total-loc)
((memq (string-ref trimmed 0) comment-chars) total-loc)
(else (+ 1 total-loc)))))
0
filename))
filenames)))
|
edcbdbbce0fdac8d366b70a28a717a3431f760e7d8d96f0c396af1b9675ca8df | madmax96/brave-clojure-solutions | section_8.clj | (ns clojure-brave.exercises.section-8)
setup for exercise 1
(def order-details-validation
{:name
["Please enter a name" not-empty]
:email
["Please enter an email address" not-empty
"Your email address doesn't look like an email address"
#(or (empty? %) (re-seq #"@" %))]})
(def order-details-good {:name "user" :email ""})
(def order-details-bad {:name "user" :email "usermail.com"})
(defn error-messages-for
"Return a seq of error messages"
[to-validate message-validator-pairs]
(map first (filter #(not ((second %) to-validate))
(partition 2 message-validator-pairs))))
(defn validate
"Returns a map with a vector of errors for each key"
[to-validate validations]
(reduce (fn [errors validation]
(let [[fieldname validation-check-groups] validation
value (get to-validate fieldname)
error-messages (error-messages-for value validation-check-groups)]
(if (empty? error-messages)
errors
(assoc errors fieldname error-messages))))
{}
validations))
;we need if-valid macro in order to implement when-valid in most straightforward way,
;similar to how 'when' macro from `clojure.core` is implemented in terms of 'if'
(defmacro if-valid
"Handle validation more concisely"
[to-validate validations errors-name & then-else]
`(let [~errors-name (validate ~to-validate ~validations)]
(if (empty? ~errors-name)
~@then-else)))
1
(defmacro when-valid
[data data-validation & actions]
`(if-valid ~data ~data-validation ~'err (do ~@actions) false))
;Should execute both functions
(when-valid order-details-good order-details-validation
(println "It's a success!")
(println :success))
;Should return false
(when-valid order-details-bad order-details-validation
(println "It's a success!")
(println :success))
;Check expanded forms
(macroexpand '(when-valid order-details order-details-validation
(println "It's a success!")
(println :success)))
2
(defmacro my-or
"macro for or logic"
([] nil)
([x] x)
([form & forms]
`(let [sym# ~form]
(if sym# sym# (my-or ~@forms)))))
(my-or nil false 2 1)
(macroexpand '(my-or nil false 2 1))
3
(defmacro defattrs
[& assignments]
`(do
~@(map
(fn [[retr attr]] `(def ~retr ~attr))
(partition 2 assignments))))
(defattrs c-int :intelligence wokring? :should-work)
(print wokring? c-int)
(macroexpand '(defattrs c-int :intelligence test :should-work))
| null | https://raw.githubusercontent.com/madmax96/brave-clojure-solutions/3be234bdcf3704acd2aca62d1a46fa03463e5735/section_8.clj | clojure | we need if-valid macro in order to implement when-valid in most straightforward way,
similar to how 'when' macro from `clojure.core` is implemented in terms of 'if'
Should execute both functions
Should return false
Check expanded forms | (ns clojure-brave.exercises.section-8)
setup for exercise 1
(def order-details-validation
{:name
["Please enter a name" not-empty]
:email
["Please enter an email address" not-empty
"Your email address doesn't look like an email address"
#(or (empty? %) (re-seq #"@" %))]})
(def order-details-good {:name "user" :email ""})
(def order-details-bad {:name "user" :email "usermail.com"})
(defn error-messages-for
"Return a seq of error messages"
[to-validate message-validator-pairs]
(map first (filter #(not ((second %) to-validate))
(partition 2 message-validator-pairs))))
(defn validate
"Returns a map with a vector of errors for each key"
[to-validate validations]
(reduce (fn [errors validation]
(let [[fieldname validation-check-groups] validation
value (get to-validate fieldname)
error-messages (error-messages-for value validation-check-groups)]
(if (empty? error-messages)
errors
(assoc errors fieldname error-messages))))
{}
validations))
(defmacro if-valid
"Handle validation more concisely"
[to-validate validations errors-name & then-else]
`(let [~errors-name (validate ~to-validate ~validations)]
(if (empty? ~errors-name)
~@then-else)))
1
(defmacro when-valid
[data data-validation & actions]
`(if-valid ~data ~data-validation ~'err (do ~@actions) false))
(when-valid order-details-good order-details-validation
(println "It's a success!")
(println :success))
(when-valid order-details-bad order-details-validation
(println "It's a success!")
(println :success))
(macroexpand '(when-valid order-details order-details-validation
(println "It's a success!")
(println :success)))
2
(defmacro my-or
"macro for or logic"
([] nil)
([x] x)
([form & forms]
`(let [sym# ~form]
(if sym# sym# (my-or ~@forms)))))
(my-or nil false 2 1)
(macroexpand '(my-or nil false 2 1))
3
(defmacro defattrs
[& assignments]
`(do
~@(map
(fn [[retr attr]] `(def ~retr ~attr))
(partition 2 assignments))))
(defattrs c-int :intelligence wokring? :should-work)
(print wokring? c-int)
(macroexpand '(defattrs c-int :intelligence test :should-work))
|
335cd37f510fea2c873504bdbf2484f4eec983570939507828c7671f0c5df325 | geocaml/ocaml-geojson | geojsone.mli | Copyright ( c ) 2021 - 2022 < >
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*)
* A library for manipulating large documents without reading the whole
document into memory using the { ! streaming , JSON parser .
document into memory using the {!Jsonm} streaming, JSON parser. *)
module Err : sig
type location = (int * int) * (int * int)
type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ]
val pp : Format.formatter -> t -> unit
end
module G : Geojson.S with type json = Ezjsone.value
* { 2 Maps }
Maps are functions that allow you to manipulate common structure in GeoJson
objects . These will be written directly back to the destination that you
provide .
Maps are functions that allow you to manipulate common structure in GeoJson
objects. These will be written directly back to the destination that you
provide. *)
val map_geometry :
(G.Geometry.t -> G.Geometry.t) ->
Jsone.src ->
Jsone.dst ->
(unit, Err.t) result
* [ map_geometry f src dst ] will apply [ f ] to all objects . This is
essentially any
{ { : #section-3.1 } geometry
object } .
The map will recurse into geometry collections . Note for the moment if you
have a single geometry object as your document , this will not work .
essentially any
{{:#section-3.1} geometry
object}.
The map will recurse into geometry collections. Note for the moment if you
have a single geometry object as your document, this will not work. *)
val map_props :
(Ezjsone.value -> Ezjsone.value) ->
Jsone.src ->
Jsone.dst ->
(unit, Err.t) result
* [ map_props ~f ] will apply [ f ] to each feature 's properties field .
The properties field is decoded into an { ! Ezjsone.value } for convenience .
The properties field is decoded into an {!Ezjsone.value} for convenience. *)
* { 2 Folds }
Folds are like maps except you can collect items into an accumulator which
is returned to you .
For example , you might want to collect all of the [ names ] in the
[ properties ] of features .
{ [
let get_string_exn = function ` String s - > s | _ - > failwith " err "
let = function
| ` O assoc - > List.assoc " name " assoc | > get_string_exn
| _ - > failwith " err "
let places src =
( fun acc p - > p : : acc ) [ ] src
] }
Folds are like maps except you can collect items into an accumulator which
is returned to you.
For example, you might want to collect all of the [names] in the
[properties] of features.
{[
let get_string_exn = function `String s -> s | _ -> failwith "err"
let get_name = function
| `O assoc -> List.assoc "name" assoc |> get_string_exn
| _ -> failwith "err"
let places src =
Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src
]} *)
val fold_geometry :
('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result
(** [fold_geometry f acc src] is much like {!map_geometry} but allows you to
accumulate some result that is then returned to you. *)
val fold_props :
('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result
(** [fold_props f init src] *)
* { 2 Iterators }
Iterators are similar to map functions except they take a function [ f ] that
takes a single element from the data - structure as an argument and returns
[ unit ] . In that sense , they tend to be functions with side - effects , such as
[ print_endline ] .
For example , we might want to print the JSON value of every geometry object
in a GeoJSON object .
{ [
let print_geometry g =
( . )
let values src = Geojsonm.iter_geometry print_geometry src
] }
Iterators are similar to map functions except they take a function [f] that
takes a single element from the data-structure as an argument and returns
[unit]. In that sense, they tend to be functions with side-effects, such as
[print_endline].
For example, we might want to print the JSON value of every geometry object
in a GeoJSON object.
{[
let print_geometry g =
print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g)
let values src = Geojsonm.iter_geometry print_geometry src
]} *)
val iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result
* [ iter_geometry f src ] will apply [ f ] to all objects .
val iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result
(** [iter_props f src] will apply [f] to each feature's properties field. *)
* { 2 Effect - based , non - blocking libraries }
These libraries use effects to perform non - blocking parsing . They are
currently a part of Geojsone and exposed for other libraries to use .
These libraries use effects to perform non-blocking parsing. They are
currently a part of Geojsone and exposed for other libraries to use. *)
module Ezjsone = Ezjsone
module Jsone = Jsone
module Uutfe = Uutfe
| null | https://raw.githubusercontent.com/geocaml/ocaml-geojson/1342f4627caa813cd153d5724f73c2fb8f0eac31/src/geojsone/geojsone.mli | ocaml | * [fold_geometry f acc src] is much like {!map_geometry} but allows you to
accumulate some result that is then returned to you.
* [fold_props f init src]
* [iter_props f src] will apply [f] to each feature's properties field. | Copyright ( c ) 2021 - 2022 < >
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*)
* A library for manipulating large documents without reading the whole
document into memory using the { ! streaming , JSON parser .
document into memory using the {!Jsonm} streaming, JSON parser. *)
module Err : sig
type location = (int * int) * (int * int)
type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ]
val pp : Format.formatter -> t -> unit
end
module G : Geojson.S with type json = Ezjsone.value
* { 2 Maps }
Maps are functions that allow you to manipulate common structure in GeoJson
objects . These will be written directly back to the destination that you
provide .
Maps are functions that allow you to manipulate common structure in GeoJson
objects. These will be written directly back to the destination that you
provide. *)
val map_geometry :
(G.Geometry.t -> G.Geometry.t) ->
Jsone.src ->
Jsone.dst ->
(unit, Err.t) result
* [ map_geometry f src dst ] will apply [ f ] to all objects . This is
essentially any
{ { : #section-3.1 } geometry
object } .
The map will recurse into geometry collections . Note for the moment if you
have a single geometry object as your document , this will not work .
essentially any
{{:#section-3.1} geometry
object}.
The map will recurse into geometry collections. Note for the moment if you
have a single geometry object as your document, this will not work. *)
val map_props :
(Ezjsone.value -> Ezjsone.value) ->
Jsone.src ->
Jsone.dst ->
(unit, Err.t) result
* [ map_props ~f ] will apply [ f ] to each feature 's properties field .
The properties field is decoded into an { ! Ezjsone.value } for convenience .
The properties field is decoded into an {!Ezjsone.value} for convenience. *)
* { 2 Folds }
Folds are like maps except you can collect items into an accumulator which
is returned to you .
For example , you might want to collect all of the [ names ] in the
[ properties ] of features .
{ [
let get_string_exn = function ` String s - > s | _ - > failwith " err "
let = function
| ` O assoc - > List.assoc " name " assoc | > get_string_exn
| _ - > failwith " err "
let places src =
( fun acc p - > p : : acc ) [ ] src
] }
Folds are like maps except you can collect items into an accumulator which
is returned to you.
For example, you might want to collect all of the [names] in the
[properties] of features.
{[
let get_string_exn = function `String s -> s | _ -> failwith "err"
let get_name = function
| `O assoc -> List.assoc "name" assoc |> get_string_exn
| _ -> failwith "err"
let places src =
Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src
]} *)
val fold_geometry :
('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result
val fold_props :
('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result
* { 2 Iterators }
Iterators are similar to map functions except they take a function [ f ] that
takes a single element from the data - structure as an argument and returns
[ unit ] . In that sense , they tend to be functions with side - effects , such as
[ print_endline ] .
For example , we might want to print the JSON value of every geometry object
in a GeoJSON object .
{ [
let print_geometry g =
( . )
let values src = Geojsonm.iter_geometry print_geometry src
] }
Iterators are similar to map functions except they take a function [f] that
takes a single element from the data-structure as an argument and returns
[unit]. In that sense, they tend to be functions with side-effects, such as
[print_endline].
For example, we might want to print the JSON value of every geometry object
in a GeoJSON object.
{[
let print_geometry g =
print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g)
let values src = Geojsonm.iter_geometry print_geometry src
]} *)
val iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result
* [ iter_geometry f src ] will apply [ f ] to all objects .
val iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result
* { 2 Effect - based , non - blocking libraries }
These libraries use effects to perform non - blocking parsing . They are
currently a part of Geojsone and exposed for other libraries to use .
These libraries use effects to perform non-blocking parsing. They are
currently a part of Geojsone and exposed for other libraries to use. *)
module Ezjsone = Ezjsone
module Jsone = Jsone
module Uutfe = Uutfe
|
bda02469c7de385dfb454449b2e2466c5cdbe512c7de07d562364fffe6bd5f45 | acieroid/scala-am | nqueens.scm | (define (one-to n)
(letrec ((loop (lambda (i l)
(if (= i 0)
l
(loop (- i 1) (cons i l))))))
(loop n '())))
(define (ok? row dist placed)
(if (null? placed)
#t
(and (not (= (car placed) (+ row dist)))
(not (= (car placed) (- row dist)))
(ok? row (+ dist 1) (cdr placed)))))
(define (try-it x y z)
(if (null? x)
(if (null? y)
1
0)
(+ (if (ok? (car x) 1 z)
(try-it (append (cdr x) y) '() (cons (car x) z))
0)
(try-it (cdr x) (cons (car x) y) z))))
(define (nqueens n)
(try-it (one-to n) '() '()))
(nqueens 8)
#t
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/gambit/nqueens.scm | scheme | (define (one-to n)
(letrec ((loop (lambda (i l)
(if (= i 0)
l
(loop (- i 1) (cons i l))))))
(loop n '())))
(define (ok? row dist placed)
(if (null? placed)
#t
(and (not (= (car placed) (+ row dist)))
(not (= (car placed) (- row dist)))
(ok? row (+ dist 1) (cdr placed)))))
(define (try-it x y z)
(if (null? x)
(if (null? y)
1
0)
(+ (if (ok? (car x) 1 z)
(try-it (append (cdr x) y) '() (cons (car x) z))
0)
(try-it (cdr x) (cons (car x) y) z))))
(define (nqueens n)
(try-it (one-to n) '() '()))
(nqueens 8)
#t
|
|
a9f730699351ac9802f59db29444d24785563bff66d67a9a0eec9662899ea1a8 | haskell/vector | take.hs | import qualified Data.Vector as U
import Data.Bits
main = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int)
| null | https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/take.hs | haskell | import qualified Data.Vector as U
import Data.Bits
main = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int)
|
|
d552abab566a0a3f50e8d1787878e8def49d571c4c6be967ef64bece40a65253 | cloudant/monic | monic_utils.erl | Copyright 2011 Cloudant
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
%% use this file except in compliance with the License. You may obtain a copy of
%% the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
%% License for the specific language governing permissions and limitations under
%% the License.
-module(monic_utils).
-export([path/2, exists/2, open/2]).
-export([write_term/2, pread_term/2]).
-include("monic.hrl").
-define(MAX_TERM, (1 bsl 16)).
path(ReqData, Context) ->
Root = proplists:get_value(root, Context, "tmp"),
File = wrq:path_info(file, ReqData),
filename:join(Root, File).
open(ReqData, Context) ->
case monic_file:open(path(ReqData, Context)) of
{ok, Pid} ->
monic_file_lru:update(Pid),
{ok, Pid};
Else ->
Else
end.
exists(ReqData, Context) ->
filelib:is_file(path(ReqData, Context)).
-spec write_term(term(), term()) -> {ok, integer()} | {error, term()}.
write_term(Fd, Term) ->
Bin = term_to_binary(Term),
Size = iolist_size(Bin),
case Size =< ?MAX_TERM of
true ->
case file:write(Fd, <<Size:16/integer, Bin/binary>>) of
ok ->
{ok, Size + 2};
Else ->
Else
end;
false ->
{error, term_too_long}
end.
-spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}.
pread_term(Fd, Location) ->
case file:pread(Fd, Location, 2) of
{ok, <<Size:16/integer>>} ->
case file:pread(Fd, Location + 2, Size) of
{ok, <<Bin:Size/binary>>} ->
{ok, Size + 2, binary_to_term(Bin)};
{ok, _} ->
eof;
Else ->
Else
end;
Else ->
Else
end.
| null | https://raw.githubusercontent.com/cloudant/monic/9b7670d53ee40efea57c777f044b3de74c66e6de/src/monic_utils.erl | erlang |
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Copyright 2011 Cloudant
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(monic_utils).
-export([path/2, exists/2, open/2]).
-export([write_term/2, pread_term/2]).
-include("monic.hrl").
-define(MAX_TERM, (1 bsl 16)).
path(ReqData, Context) ->
Root = proplists:get_value(root, Context, "tmp"),
File = wrq:path_info(file, ReqData),
filename:join(Root, File).
open(ReqData, Context) ->
case monic_file:open(path(ReqData, Context)) of
{ok, Pid} ->
monic_file_lru:update(Pid),
{ok, Pid};
Else ->
Else
end.
exists(ReqData, Context) ->
filelib:is_file(path(ReqData, Context)).
-spec write_term(term(), term()) -> {ok, integer()} | {error, term()}.
write_term(Fd, Term) ->
Bin = term_to_binary(Term),
Size = iolist_size(Bin),
case Size =< ?MAX_TERM of
true ->
case file:write(Fd, <<Size:16/integer, Bin/binary>>) of
ok ->
{ok, Size + 2};
Else ->
Else
end;
false ->
{error, term_too_long}
end.
-spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}.
pread_term(Fd, Location) ->
case file:pread(Fd, Location, 2) of
{ok, <<Size:16/integer>>} ->
case file:pread(Fd, Location + 2, Size) of
{ok, <<Bin:Size/binary>>} ->
{ok, Size + 2, binary_to_term(Bin)};
{ok, _} ->
eof;
Else ->
Else
end;
Else ->
Else
end.
|
9f21b932842b907468e2f9d2612fce45e89c5d990b310af4d7bd5e7d4e7e28d9 | incoherentsoftware/defect-process | BubbleProjectile.hs | module Enemy.All.BubbleTurret.BubbleProjectile
( bubbleSpinPath
, bubbleExplodePath
, mkBubbleProjectile
) where
import Control.Monad.IO.Class (MonadIO)
import qualified Data.Set as S
import Attack
import Attack.Projectile
import Collision
import Configs.All.Enemy
import Configs.All.Enemy.BubbleTurret
import Constants
import Enemy.All.BubbleTurret.Data
import FileCache
import Id
import Msg
import Projectile as P
import Util
import Window.Graphics
import World.ZIndex
bubbleSpinPath =
PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-spin.spr" :: PackResourceFilePath
bubbleExplodePath =
PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-explode.atk" :: PackResourceFilePath
registeredCollisions = S.fromList
[ ProjRegisteredPlayerCollision
] :: S.Set ProjectileRegisteredCollision
data BubbleProjVelBehavior
= InitialRiseVel Secs
| RiseFallVel Secs
data BubbleProjData = BubbleProjData
{ _velBehavior :: BubbleProjVelBehavior
, _pos :: Pos2
, _dir :: Direction
, _sprite :: Sprite
, _explodeAtkDesc :: AttackDescription
, _config :: BubbleTurretEnemyConfig
}
mkBubbleProjData
:: (FileCache m, GraphicsRead m, MonadIO m)
=> Pos2
-> Direction
-> BubbleTurretEnemyData
-> m BubbleProjData
mkBubbleProjData pos dir bubbleProjData = do
spr <- loadPackSprite bubbleSpinPath
explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath
let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData)
return $ BubbleProjData
{ _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg
, _pos = pos
, _dir = dir
, _sprite = spr
, _explodeAtkDesc = explodeAtkDesc
, _config = cfg
}
bubbleProjHitbox :: ProjectileHitbox BubbleProjData
bubbleProjHitbox bubbleProj = rectHitbox pos width height
where
bubbleProjData = _data bubbleProj
Pos2 x y = _pos (bubbleProjData :: BubbleProjData)
cfg = _config (bubbleProjData :: BubbleProjData)
width = _bubbleProjWidth cfg
height = _bubbleProjHeight cfg
pos = Pos2 (x - width / 2.0) (y - height / 2.0)
mkBubbleProjectile
:: (FileCache m, GraphicsRead m, MonadIO m)
=> Pos2
-> Direction
-> BubbleTurretEnemyData
-> m (Some Projectile)
mkBubbleProjectile pos dir bubbleTurretData = do
bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData
msgId <- newId
let
dummyHbx = dummyHitbox pos
ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData)
return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl)
{ _hitbox = bubbleProjHitbox
, _registeredCollisions = registeredCollisions
, _think = thinkBubbleProj
, _update = updateBubbleProj
, _draw = drawBubbleProj
, _processCollisions = processBubbleProjCollisions
}
bubbleProjExplodeRemoveMsgs
:: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload)
=> Projectile BubbleProjData
-> [Msg p]
bubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg]
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
dir = _dir (bubbleProjData :: BubbleProjData)
explodeAtkDesc = _explodeAtkDesc bubbleProjData
mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc
mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj
bubbleProjId = P._msgId bubbleProj
removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId
thinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m
thinkBubbleProj bubbleProj = return $ if
| willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj
| otherwise ->
let
bubbleProjData = _data bubbleProj
cfg = _config (bubbleProjData :: BubbleProjData)
speedX = _bubbleProjSpeedX cfg
speedY = _bubbleProjSpeedY cfg
riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg
dir = _dir (bubbleProjData :: BubbleProjData)
velX = speedX * directionNeg dir
velY = vecY $ P._vel bubbleProj
(velBehavior, velY') = case _velBehavior bubbleProjData of
InitialRiseVel velTtl
| velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY)
| otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY)
RiseFallVel velTtl
| velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY)
| otherwise -> (RiseFallVel (velTtl - timeStep), velY)
update = \p -> p
{ _data = (P._data p) {_velBehavior = velBehavior}
, _vel = Vel2 velX velY'
}
in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)]
where willDisappear = P._ttl bubbleProj - timeStep <= 0.0
updateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m
updateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'}
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
vel = P._vel bubbleProj
pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep)
spr = _sprite (bubbleProjData :: BubbleProjData)
bubbleProjData' = bubbleProjData
{ _pos = pos'
, _sprite = updateSprite spr
} :: BubbleProjData
drawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m
drawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
dir = _dir (bubbleProjData :: BubbleProjData)
spr = _sprite (bubbleProjData :: BubbleProjData)
processBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData
processBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions
where
processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]
processCollision collision !msgs = case collision of
ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs
_ -> msgs
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/BubbleTurret/BubbleProjectile.hs | haskell | module Enemy.All.BubbleTurret.BubbleProjectile
( bubbleSpinPath
, bubbleExplodePath
, mkBubbleProjectile
) where
import Control.Monad.IO.Class (MonadIO)
import qualified Data.Set as S
import Attack
import Attack.Projectile
import Collision
import Configs.All.Enemy
import Configs.All.Enemy.BubbleTurret
import Constants
import Enemy.All.BubbleTurret.Data
import FileCache
import Id
import Msg
import Projectile as P
import Util
import Window.Graphics
import World.ZIndex
bubbleSpinPath =
PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-spin.spr" :: PackResourceFilePath
bubbleExplodePath =
PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-explode.atk" :: PackResourceFilePath
registeredCollisions = S.fromList
[ ProjRegisteredPlayerCollision
] :: S.Set ProjectileRegisteredCollision
data BubbleProjVelBehavior
= InitialRiseVel Secs
| RiseFallVel Secs
data BubbleProjData = BubbleProjData
{ _velBehavior :: BubbleProjVelBehavior
, _pos :: Pos2
, _dir :: Direction
, _sprite :: Sprite
, _explodeAtkDesc :: AttackDescription
, _config :: BubbleTurretEnemyConfig
}
mkBubbleProjData
:: (FileCache m, GraphicsRead m, MonadIO m)
=> Pos2
-> Direction
-> BubbleTurretEnemyData
-> m BubbleProjData
mkBubbleProjData pos dir bubbleProjData = do
spr <- loadPackSprite bubbleSpinPath
explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath
let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData)
return $ BubbleProjData
{ _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg
, _pos = pos
, _dir = dir
, _sprite = spr
, _explodeAtkDesc = explodeAtkDesc
, _config = cfg
}
bubbleProjHitbox :: ProjectileHitbox BubbleProjData
bubbleProjHitbox bubbleProj = rectHitbox pos width height
where
bubbleProjData = _data bubbleProj
Pos2 x y = _pos (bubbleProjData :: BubbleProjData)
cfg = _config (bubbleProjData :: BubbleProjData)
width = _bubbleProjWidth cfg
height = _bubbleProjHeight cfg
pos = Pos2 (x - width / 2.0) (y - height / 2.0)
mkBubbleProjectile
:: (FileCache m, GraphicsRead m, MonadIO m)
=> Pos2
-> Direction
-> BubbleTurretEnemyData
-> m (Some Projectile)
mkBubbleProjectile pos dir bubbleTurretData = do
bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData
msgId <- newId
let
dummyHbx = dummyHitbox pos
ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData)
return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl)
{ _hitbox = bubbleProjHitbox
, _registeredCollisions = registeredCollisions
, _think = thinkBubbleProj
, _update = updateBubbleProj
, _draw = drawBubbleProj
, _processCollisions = processBubbleProjCollisions
}
bubbleProjExplodeRemoveMsgs
:: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload)
=> Projectile BubbleProjData
-> [Msg p]
bubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg]
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
dir = _dir (bubbleProjData :: BubbleProjData)
explodeAtkDesc = _explodeAtkDesc bubbleProjData
mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc
mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj
bubbleProjId = P._msgId bubbleProj
removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId
thinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m
thinkBubbleProj bubbleProj = return $ if
| willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj
| otherwise ->
let
bubbleProjData = _data bubbleProj
cfg = _config (bubbleProjData :: BubbleProjData)
speedX = _bubbleProjSpeedX cfg
speedY = _bubbleProjSpeedY cfg
riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg
dir = _dir (bubbleProjData :: BubbleProjData)
velX = speedX * directionNeg dir
velY = vecY $ P._vel bubbleProj
(velBehavior, velY') = case _velBehavior bubbleProjData of
InitialRiseVel velTtl
| velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY)
| otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY)
RiseFallVel velTtl
| velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY)
| otherwise -> (RiseFallVel (velTtl - timeStep), velY)
update = \p -> p
{ _data = (P._data p) {_velBehavior = velBehavior}
, _vel = Vel2 velX velY'
}
in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)]
where willDisappear = P._ttl bubbleProj - timeStep <= 0.0
updateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m
updateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'}
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
vel = P._vel bubbleProj
pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep)
spr = _sprite (bubbleProjData :: BubbleProjData)
bubbleProjData' = bubbleProjData
{ _pos = pos'
, _sprite = updateSprite spr
} :: BubbleProjData
drawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m
drawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr
where
bubbleProjData = _data bubbleProj
pos = _pos (bubbleProjData :: BubbleProjData)
dir = _dir (bubbleProjData :: BubbleProjData)
spr = _sprite (bubbleProjData :: BubbleProjData)
processBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData
processBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions
where
processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]
processCollision collision !msgs = case collision of
ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs
_ -> msgs
|
|
be9a2e7955dbf0f7e98f37c1d86c47f5e50fea70b0bd46bda1e799f973b8fbb6 | racket/racket7 | main.rkt | #lang racket/base
(printf "pkg-b first main\n")
(exit 42)
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/pkgs/racket-test/tests/pkg/test-pkgs/pkg-b-first/pkg-b/main.rkt | racket | #lang racket/base
(printf "pkg-b first main\n")
(exit 42)
|
|
234b89d41f4977c73e7ed100c381fa1e961cfa8a62cfae3adc0d7444d9778256 | clojerl/clojerl | clojerl_Atom_SUITE.erl | -module(clojerl_Atom_SUITE).
-include("clojerl.hrl").
-include("clj_test_utils.hrl").
-export([ all/0
, init_per_suite/1
, end_per_suite/1
]).
-export([ deref/1
, swap/1
, reset/1
, compare_and_set/1
, equiv/1
, meta/1
, str/1
, complete_coverage/1
]).
-spec all() -> [atom()].
all() -> clj_test_utils:all(?MODULE).
-spec init_per_suite(config()) -> config().
init_per_suite(Config) -> clj_test_utils:init_per_suite(Config).
-spec end_per_suite(config()) -> config().
end_per_suite(Config) -> Config.
%%------------------------------------------------------------------------------
%% Test Cases
%%------------------------------------------------------------------------------
-spec deref(config()) -> result().
deref(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
ct:comment("deref an atom"),
1 = clj_rt:deref(Atom),
2 = 'clojerl.Atom':reset(Atom, 2),
2 = clj_rt:deref(Atom),
{comments, ""}.
-spec swap(config()) -> result().
swap(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(2),
ct:comment("Successful swaps"),
3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end),
4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1),
6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1),
9 = 'clojerl.Atom':swap( Atom
, fun(X, Y, Z, W) -> X + Y + Z + W end
, 1
, 1
, [1]
),
ct:comment("Concurrent swaps"),
Inc = fun(X) -> X + 1 end,
Self = self(),
ResetFun = fun(_) ->
spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end)
end,
N = 100,
Result = N + 9,
lists:foreach(ResetFun, lists:seq(1, N)),
ok = clj_test_utils:wait_for(ok, N, 1000),
Result = 'clojerl.Atom':deref(Atom),
{comments, ""}.
-spec reset(config()) -> result().
reset(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
ct:comment("Successful resets"),
2 = 'clojerl.Atom':reset(Atom, 2),
foo = 'clojerl.Atom':reset(Atom, foo),
bar = 'clojerl.Atom':reset(Atom, bar),
<<"baz">> = 'clojerl.Atom':reset(Atom, <<"baz">>),
ct:comment("Concurrent resets"),
Self = self(),
ResetFun = fun(N) ->
spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end)
end,
N = 100,
lists:foreach(ResetFun, lists:seq(1, N)),
ok = clj_test_utils:wait_for(ok, N, 1000),
{comments, ""}.
-spec compare_and_set(config()) -> result().
compare_and_set(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(2),
true = 'clojerl.Atom':compare_and_set(Atom, 2, 3),
false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3),
{comments, ""}.
-spec equiv(config()) -> result().
equiv(_Config) ->
Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2),
ct:comment("Check that the same atom with different meta is equivalent"),
Atom3 = clj_rt:with_meta(Atom1, #{a => 1}),
Atom4 = clj_rt:with_meta(Atom1, #{b => 2}),
true = clj_rt:equiv(Atom3, Atom4),
ct:comment("Check that different atoms are not equivalent"),
false = clj_rt:equiv(Atom1, Atom2),
ct:comment("An atom and something else"),
false = clj_rt:equiv(Atom1, whatever),
false = clj_rt:equiv(Atom1, #{}),
false = clj_rt:equiv(Atom1, 1),
{comments, ""}.
-spec meta(config()) -> result().
meta(_Config) ->
Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),
#{a := 1} = clj_rt:meta(Atom1),
{comments, ""}.
-spec str(config()) -> result().
str(_Config) ->
Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),
<<"#<clojerl.Atom ", _/binary>> = clj_rt:str(Atom1),
{comments, ""}.
-spec complete_coverage(config()) -> result().
complete_coverage(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
Hash = 'clojerl.IHash':hash(Atom),
Hash = 'clojerl.IHash':hash(Atom),
true = erlang:is_integer(Hash),
{noreply, state} = 'clojerl.Atom':handle_cast(msg, state),
{noreply, state} = 'clojerl.Atom':handle_info(msg, state),
{ok, state} = 'clojerl.Atom':terminate(msg, state),
{ok, state} = 'clojerl.Atom':code_change(msg, from, state),
{comments, ""}.
| null | https://raw.githubusercontent.com/clojerl/clojerl/aa35847ca64e1c66224867ca4c31ca6de95bc898/test/clojerl_Atom_SUITE.erl | erlang | ------------------------------------------------------------------------------
Test Cases
------------------------------------------------------------------------------ | -module(clojerl_Atom_SUITE).
-include("clojerl.hrl").
-include("clj_test_utils.hrl").
-export([ all/0
, init_per_suite/1
, end_per_suite/1
]).
-export([ deref/1
, swap/1
, reset/1
, compare_and_set/1
, equiv/1
, meta/1
, str/1
, complete_coverage/1
]).
-spec all() -> [atom()].
all() -> clj_test_utils:all(?MODULE).
-spec init_per_suite(config()) -> config().
init_per_suite(Config) -> clj_test_utils:init_per_suite(Config).
-spec end_per_suite(config()) -> config().
end_per_suite(Config) -> Config.
-spec deref(config()) -> result().
deref(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
ct:comment("deref an atom"),
1 = clj_rt:deref(Atom),
2 = 'clojerl.Atom':reset(Atom, 2),
2 = clj_rt:deref(Atom),
{comments, ""}.
-spec swap(config()) -> result().
swap(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(2),
ct:comment("Successful swaps"),
3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end),
4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1),
6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1),
9 = 'clojerl.Atom':swap( Atom
, fun(X, Y, Z, W) -> X + Y + Z + W end
, 1
, 1
, [1]
),
ct:comment("Concurrent swaps"),
Inc = fun(X) -> X + 1 end,
Self = self(),
ResetFun = fun(_) ->
spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end)
end,
N = 100,
Result = N + 9,
lists:foreach(ResetFun, lists:seq(1, N)),
ok = clj_test_utils:wait_for(ok, N, 1000),
Result = 'clojerl.Atom':deref(Atom),
{comments, ""}.
-spec reset(config()) -> result().
reset(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
ct:comment("Successful resets"),
2 = 'clojerl.Atom':reset(Atom, 2),
foo = 'clojerl.Atom':reset(Atom, foo),
bar = 'clojerl.Atom':reset(Atom, bar),
<<"baz">> = 'clojerl.Atom':reset(Atom, <<"baz">>),
ct:comment("Concurrent resets"),
Self = self(),
ResetFun = fun(N) ->
spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end)
end,
N = 100,
lists:foreach(ResetFun, lists:seq(1, N)),
ok = clj_test_utils:wait_for(ok, N, 1000),
{comments, ""}.
-spec compare_and_set(config()) -> result().
compare_and_set(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(2),
true = 'clojerl.Atom':compare_and_set(Atom, 2, 3),
false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3),
{comments, ""}.
-spec equiv(config()) -> result().
equiv(_Config) ->
Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2),
ct:comment("Check that the same atom with different meta is equivalent"),
Atom3 = clj_rt:with_meta(Atom1, #{a => 1}),
Atom4 = clj_rt:with_meta(Atom1, #{b => 2}),
true = clj_rt:equiv(Atom3, Atom4),
ct:comment("Check that different atoms are not equivalent"),
false = clj_rt:equiv(Atom1, Atom2),
ct:comment("An atom and something else"),
false = clj_rt:equiv(Atom1, whatever),
false = clj_rt:equiv(Atom1, #{}),
false = clj_rt:equiv(Atom1, 1),
{comments, ""}.
-spec meta(config()) -> result().
meta(_Config) ->
Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),
#{a := 1} = clj_rt:meta(Atom1),
{comments, ""}.
-spec str(config()) -> result().
str(_Config) ->
Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),
Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),
<<"#<clojerl.Atom ", _/binary>> = clj_rt:str(Atom1),
{comments, ""}.
-spec complete_coverage(config()) -> result().
complete_coverage(_Config) ->
Atom = 'clojerl.Atom':?CONSTRUCTOR(1),
Hash = 'clojerl.IHash':hash(Atom),
Hash = 'clojerl.IHash':hash(Atom),
true = erlang:is_integer(Hash),
{noreply, state} = 'clojerl.Atom':handle_cast(msg, state),
{noreply, state} = 'clojerl.Atom':handle_info(msg, state),
{ok, state} = 'clojerl.Atom':terminate(msg, state),
{ok, state} = 'clojerl.Atom':code_change(msg, from, state),
{comments, ""}.
|
61c56175e2501a86f347f6c09b3b349eaac8d9147c2da5ddfaf2b64bf89ea5f1 | jellelicht/guix | gnu.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2014 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix scripts import gnu)
#:use-module (guix ui)
#:use-module (guix utils)
#:use-module (guix scripts)
#:use-module (guix import gnu)
#:use-module (guix scripts import)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-37)
#:use-module (ice-9 match)
#:export (guix-import-gnu))
;;;
;;; Command-line options.
;;;
(define %default-options
'((key-download . interactive)))
(define (show-help)
(display (_ "Usage: guix import gnu [OPTION...] PACKAGE
Return a package declaration template for PACKAGE, a GNU package.\n"))
;; '--key-download' taken from (guix scripts refresh).
(display (_ "
--key-download=POLICY
handle missing OpenPGP keys according to POLICY:
'always', 'never', and 'interactive', which is also
used when 'key-download' is not specified"))
(newline)
(display (_ "
-h, --help display this help and exit"))
(display (_ "
-V, --version display version information and exit"))
(newline)
(show-bug-report-information))
(define %options
;; Specification of the command-line options.
(cons* (option '(#\h "help") #f #f
(lambda args
(show-help)
(exit 0)))
(option '(#\V "version") #f #f
(lambda args
(show-version-and-exit "guix import gnu")))
(option '("key-download") #t #f ;from (guix scripts refresh)
(lambda (opt name arg result)
(match arg
((or "interactive" "always" "never")
(alist-cons 'key-download (string->symbol arg)
result))
(_
(leave (_ "unsupported policy: ~a~%")
arg)))))
%standard-import-options))
;;;
;;; Entry point.
;;;
(define (guix-import-gnu . args)
(define (parse-options)
;; Return the alist of option values.
(args-fold* args %options
(lambda (opt name arg result)
(leave (_ "~A: unrecognized option~%") name))
(lambda (arg result)
(alist-cons 'argument arg result))
%default-options))
(let* ((opts (parse-options))
(args (filter-map (match-lambda
(('argument . value)
value)
(_ #f))
(reverse opts))))
(match args
((name)
(with-error-handling
(gnu->guix-package name
#:key-download (assoc-ref opts 'key-download))))
(_
(leave (_ "wrong number of arguments~%"))))))
;;; gnu.scm ends here
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/guix/scripts/import/gnu.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Command-line options.
'--key-download' taken from (guix scripts refresh).
Specification of the command-line options.
from (guix scripts refresh)
Entry point.
Return the alist of option values.
gnu.scm ends here | Copyright © 2014 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix scripts import gnu)
#:use-module (guix ui)
#:use-module (guix utils)
#:use-module (guix scripts)
#:use-module (guix import gnu)
#:use-module (guix scripts import)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-37)
#:use-module (ice-9 match)
#:export (guix-import-gnu))
(define %default-options
'((key-download . interactive)))
(define (show-help)
(display (_ "Usage: guix import gnu [OPTION...] PACKAGE
Return a package declaration template for PACKAGE, a GNU package.\n"))
(display (_ "
--key-download=POLICY
handle missing OpenPGP keys according to POLICY:
'always', 'never', and 'interactive', which is also
used when 'key-download' is not specified"))
(newline)
(display (_ "
-h, --help display this help and exit"))
(display (_ "
-V, --version display version information and exit"))
(newline)
(show-bug-report-information))
(define %options
(cons* (option '(#\h "help") #f #f
(lambda args
(show-help)
(exit 0)))
(option '(#\V "version") #f #f
(lambda args
(show-version-and-exit "guix import gnu")))
(lambda (opt name arg result)
(match arg
((or "interactive" "always" "never")
(alist-cons 'key-download (string->symbol arg)
result))
(_
(leave (_ "unsupported policy: ~a~%")
arg)))))
%standard-import-options))
(define (guix-import-gnu . args)
(define (parse-options)
(args-fold* args %options
(lambda (opt name arg result)
(leave (_ "~A: unrecognized option~%") name))
(lambda (arg result)
(alist-cons 'argument arg result))
%default-options))
(let* ((opts (parse-options))
(args (filter-map (match-lambda
(('argument . value)
value)
(_ #f))
(reverse opts))))
(match args
((name)
(with-error-handling
(gnu->guix-package name
#:key-download (assoc-ref opts 'key-download))))
(_
(leave (_ "wrong number of arguments~%"))))))
|
3808527b4a8541e5865943714d06c824c9f05b816fb0b8e61cc0edc86f522baa | sampou-org/ghc_users_guide_ja | PhasePrograms.hs | module Options.PhasePrograms where
import Types
phaseProgramsOptions :: [Flag]
phaseProgramsOptions =
[ flag { flagName = "-pgmL ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を文芸的コードのプリプロセッサとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmP ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmc ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を C のコンパイラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlo ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を LLVM 最適化器として使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlc ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を LLVM コンパイラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgms ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をスプリッタとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgma ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をアセンブラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgml ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をリンカとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmdll ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を DLL 生成器として使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmF ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmwindres ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlibtool ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)"
, flagType = DynamicFlag
}
]
| null | https://raw.githubusercontent.com/sampou-org/ghc_users_guide_ja/91ac4ee4347802bbfc63686cfcbd4fc12f95a584/8.2.2/mkUserGuidePart/Options/PhasePrograms.hs | haskell | module Options.PhasePrograms where
import Types
phaseProgramsOptions :: [Flag]
phaseProgramsOptions =
[ flag { flagName = "-pgmL ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を文芸的コードのプリプロセッサとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmP ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmc ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を C のコンパイラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlo ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を LLVM 最適化器として使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlc ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を LLVM コンパイラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgms ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をスプリッタとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgma ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をアセンブラとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgml ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ をリンカとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmdll ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を DLL 生成器として使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmF ⟨cmd⟩"
, flagDescription = "⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmwindres ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う"
, flagType = DynamicFlag
}
, flag { flagName = "-pgmlibtool ⟨cmd⟩"
, flagDescription =
"⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)"
, flagType = DynamicFlag
}
]
|
|
27cf0ba744fad5994af301dddadbcc762e8e7f814852e58d080e2bfc6b33e369 | philopon/apiary | Persist.hs | # LANGUAGE UndecidableInstances #
# LANGUAGE OverlappingInstances #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE LambdaCase #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
module Web.Apiary.Database.Persist
( Persist
-- * initializer
, Migrator(..), With
, initPersist, initPersistNoLog
, initPersistPool, initPersistPoolNoLog
-- ** low level
, initPersist', initPersistPool'
-- * query
, RunSQL(runSql)
-- * filter
, sql
) where
import qualified Data.Pool as Pool
import Control.Monad(void, mzero)
import Control.Monad.IO.Class(MonadIO(..))
import Control.Monad.Logger(NoLoggingT(runNoLoggingT))
import Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask)
import Control.Monad.Trans.Control(MonadBaseControl)
import Web.Apiary.Logger(LogWrapper, runLogWrapper)
import qualified Database.Persist.Sql as Sql
import Web.Apiary(Html)
import Control.Monad.Apiary.Action(ActionT, applyDict)
import Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition))
import qualified Network.Routing.Dict as Dict
import qualified Network.Routing as R
import Data.Proxy.Compat(Proxy(..))
import GHC.TypeLits.Compat(KnownSymbol)
import Data.Apiary.Extension
(Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt)
data Migrator
= Logging Sql.Migration
| Silent Sql.Migration
| Unsafe Sql.Migration
| NoMigrate
data Persist
= PersistPool Sql.ConnectionPool
| PersistConn Sql.SqlBackend
instance Extension Persist
type With c m = forall a. (c -> m a) -> m a
initPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m)
=> (forall a. Extensions exts -> n a -> m a)
-> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts)
initPersist' run with migr = initializer $ \es -> run es $
with $ \conn -> do
doMigration migr conn
return (PersistConn conn)
-- | construct persist extension initializer with no connection pool.
--
-- example:
--
-- @
initPersist ( withSqliteConn " db.sqlite " ) migrateAll
-- @
initPersist :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration
-> Initializer m exts (Persist ': exts)
initPersist with = initPersist' runLogWrapper with . Logging
initPersistNoLog :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.SqlBackend (NoLoggingT m)
-> Sql.Migration -> Initializer m es (Persist ': es)
initPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent
initPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m)
=> (forall a. Extensions exts -> n a -> m a)
-> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts)
initPersistPool' run with migr = initializer $ \es -> run es $
with $ \pool -> do
Pool.withResource pool $ doMigration migr
return (PersistPool pool)
initPersistPool :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration
-> Initializer m exts (Persist ': exts)
initPersistPool with = initPersistPool' runLogWrapper with . Logging
initPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.ConnectionPool (NoLoggingT m)
-> Sql.Migration -> Initializer m es (Persist ': es)
initPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent
doMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m ()
doMigration migr conn = case migr of
Logging m -> runReaderT (Sql.runMigration m) conn
Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn
Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn
NoMigrate -> return ()
-- | execute sql in action.
class RunSQL m where
runSql :: Sql.SqlPersistT m a -> m a
runSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a
runSql' a persist = case persist of
PersistPool p -> Sql.runSqlPool a p
PersistConn c -> Sql.runSqlConn a c
instance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where
runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a
instance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where
runSql a = ask >>= runSql' a
-- | filter by sql query. since 0.9.0.0.
sql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict.</ prms)
=> Maybe Html -- ^ documentation.
-> proxy k
-> Sql.SqlPersistT (ActionT exts '[] actM) a
-> (a -> Maybe b) -- ^ result check function. Nothing: fail filter, Just a: success filter and add parameter.
-> Filter exts actM m prms (k Dict.:= b ': prms)
sql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw "sql" $ \d t ->
fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \case
Nothing -> mzero
Just a -> return (Dict.add k a d, t)
hoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a
hoistReaderT f m = ReaderT $ \b -> f (runReaderT m b)
# INLINE hoistReaderT #
| null | https://raw.githubusercontent.com/philopon/apiary/7da306fcbfcdec85d073746968298de4540d7235/apiary-persistent/src/Web/Apiary/Database/Persist.hs | haskell | # LANGUAGE Rank2Types #
# LANGUAGE GADTs #
* initializer
** low level
* query
* filter
| construct persist extension initializer with no connection pool.
example:
@
@
| execute sql in action.
| filter by sql query. since 0.9.0.0.
^ documentation.
^ result check function. Nothing: fail filter, Just a: success filter and add parameter. | # LANGUAGE UndecidableInstances #
# LANGUAGE OverlappingInstances #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE LambdaCase #
# LANGUAGE DataKinds #
module Web.Apiary.Database.Persist
( Persist
, Migrator(..), With
, initPersist, initPersistNoLog
, initPersistPool, initPersistPoolNoLog
, initPersist', initPersistPool'
, RunSQL(runSql)
, sql
) where
import qualified Data.Pool as Pool
import Control.Monad(void, mzero)
import Control.Monad.IO.Class(MonadIO(..))
import Control.Monad.Logger(NoLoggingT(runNoLoggingT))
import Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask)
import Control.Monad.Trans.Control(MonadBaseControl)
import Web.Apiary.Logger(LogWrapper, runLogWrapper)
import qualified Database.Persist.Sql as Sql
import Web.Apiary(Html)
import Control.Monad.Apiary.Action(ActionT, applyDict)
import Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition))
import qualified Network.Routing.Dict as Dict
import qualified Network.Routing as R
import Data.Proxy.Compat(Proxy(..))
import GHC.TypeLits.Compat(KnownSymbol)
import Data.Apiary.Extension
(Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt)
data Migrator
= Logging Sql.Migration
| Silent Sql.Migration
| Unsafe Sql.Migration
| NoMigrate
data Persist
= PersistPool Sql.ConnectionPool
| PersistConn Sql.SqlBackend
instance Extension Persist
type With c m = forall a. (c -> m a) -> m a
initPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m)
=> (forall a. Extensions exts -> n a -> m a)
-> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts)
initPersist' run with migr = initializer $ \es -> run es $
with $ \conn -> do
doMigration migr conn
return (PersistConn conn)
initPersist ( withSqliteConn " db.sqlite " ) migrateAll
initPersist :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration
-> Initializer m exts (Persist ': exts)
initPersist with = initPersist' runLogWrapper with . Logging
initPersistNoLog :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.SqlBackend (NoLoggingT m)
-> Sql.Migration -> Initializer m es (Persist ': es)
initPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent
initPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m)
=> (forall a. Extensions exts -> n a -> m a)
-> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts)
initPersistPool' run with migr = initializer $ \es -> run es $
with $ \pool -> do
Pool.withResource pool $ doMigration migr
return (PersistPool pool)
initPersistPool :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration
-> Initializer m exts (Persist ': exts)
initPersistPool with = initPersistPool' runLogWrapper with . Logging
initPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m)
=> With Sql.ConnectionPool (NoLoggingT m)
-> Sql.Migration -> Initializer m es (Persist ': es)
initPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent
doMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m ()
doMigration migr conn = case migr of
Logging m -> runReaderT (Sql.runMigration m) conn
Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn
Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn
NoMigrate -> return ()
class RunSQL m where
runSql :: Sql.SqlPersistT m a -> m a
runSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a
runSql' a persist = case persist of
PersistPool p -> Sql.runSqlPool a p
PersistConn c -> Sql.runSqlConn a c
instance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where
runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a
instance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where
runSql a = ask >>= runSql' a
sql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict.</ prms)
-> proxy k
-> Sql.SqlPersistT (ActionT exts '[] actM) a
-> Filter exts actM m prms (k Dict.:= b ': prms)
sql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw "sql" $ \d t ->
fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \case
Nothing -> mzero
Just a -> return (Dict.add k a d, t)
hoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a
hoistReaderT f m = ReaderT $ \b -> f (runReaderT m b)
# INLINE hoistReaderT #
|
9d928bf2be8912124ea0eca64a778884772879aee1238b0a03647238a4db5afb | TrustInSoft/tis-interpreter | zones.mli | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml .
This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml.
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/scope/zones.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************ | Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml .
This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml.
*)
|
f058a5e6bc7d4a02c9821e136b9dc035fc3816f4f74ca7863c824ee6b1bc14ba | brevis-us/brevis | globals.clj | (ns us.brevis.globals
(:import [us.brevis.graphics BrCamera]))
(def enable-display-text (atom true))
(def default-gui-state {:fullscreen false
: camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 )
: camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 )
:camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000)
:gui true
;:input (BrInput.)
: rot - x 90 : rot - y -90 : rot - z -45
: shift - x 300 : shift - y 300 : shift - z -50;-30
:last-report-time 0 :simulation-time 0})
(def #^:dynamic *gui-state* (atom default-gui-state))
(def #^:dynamic *gui-message-board* (atom (sorted-map)))
(def #^:dynamic *app-thread* (atom nil))
(def #^:dynamic *screenshot-filename* (atom nil))
(def #^:dynamic *simulation-state* (atom {}))
(def #^:dynamic *graphics* (atom {}))
(def destroy-hooks (atom []))
;(def #^:dynamic *brevis-params* (atom {}))
;(def #^:dynamic *brevis-state* (atom {}))
| null | https://raw.githubusercontent.com/brevis-us/brevis/de51c173279e82cca6d5990010144167050358a3/src/main/clojure/us/brevis/globals.clj | clojure | :input (BrInput.)
-30
(def #^:dynamic *brevis-params* (atom {}))
(def #^:dynamic *brevis-state* (atom {})) | (ns us.brevis.globals
(:import [us.brevis.graphics BrCamera]))
(def enable-display-text (atom true))
(def default-gui-state {:fullscreen false
: camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 )
: camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 )
:camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000)
:gui true
: rot - x 90 : rot - y -90 : rot - z -45
:last-report-time 0 :simulation-time 0})
(def #^:dynamic *gui-state* (atom default-gui-state))
(def #^:dynamic *gui-message-board* (atom (sorted-map)))
(def #^:dynamic *app-thread* (atom nil))
(def #^:dynamic *screenshot-filename* (atom nil))
(def #^:dynamic *simulation-state* (atom {}))
(def #^:dynamic *graphics* (atom {}))
(def destroy-hooks (atom []))
|
81f66040c8b28d4d5326c5b2d5cc7fd1b91ba8baa56dde06adf56af7a86cb412 | KavehYousefi/Esoteric-programming-languages | types.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; This file serves in the declaration of the globally significant
;; types.
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Declaration of types. -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(deftype list-of (&optional (element-type T))
"The ``list-of'' type defines a list of zero or more elements, each
member of which conforms to the ELEMENT-TYPE, defaulting to the
comprehensive ``T''."
(let ((predicate (gensym)))
(declare (type symbol predicate))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(listp candidate)
(every
#'(lambda (element)
(declare (type T element))
(typep element element-type))
(the list candidate)))))
`(satisfies ,predicate)))
;;; -------------------------------------------------------
(deftype hash-table-of (&optional (key-type T) (value-type T))
"The ``hash-table-of'' type defines a hash table of zero or more
entries, each key of which conforms to the KEY-TYPE and associates
with a value of the VALUE-TYPE, both defaulting to the comprehensive
``T''."
(let ((predicate (gensym)))
(declare (type symbol predicate))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(hash-table-p candidate)
(loop
for key
of-type T
being the hash-keys in (the hash-table candidate)
using
(hash-value value)
always
(and (typep key key-type)
(typep value value-type))))))
`(satisfies ,predicate)))
;;; -------------------------------------------------------
(deftype attribute-map ()
"The ``attribute-map'' type defines a collection of node attributes in
the form of a hash table mapping which associated keyword symbol
attribute names to arbitrary values."
'(hash-table-of keyword T))
;;; -------------------------------------------------------
(deftype attribute-list ()
"The ``attribute-list'' type defines a list of node attributes in
terms of a property list, or plist, with each attribute name (key or
indicator) immediately succeeded by its associated attribute value
(property value), the former of which must be a keyword symbol,
whereas the latter may assume the generic type ``T''."
(let ((predicate (gensym)))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(listp candidate)
(evenp (length (the list candidate)))
(loop
for (indicator value)
of-type (T T)
on (the list candidate)
by #'cddr
always
(and (typep indicator 'keyword)
(typep value T))))))
`(satisfies ,predicate)))
;;; -------------------------------------------------------
(deftype node-list ()
"The ``node-list'' type defines a list of zero or more ``Node''
objects."
'(list-of Node))
;;; -------------------------------------------------------
(deftype set-operator ()
"The ``set-operator'' type enumerates the recognized binary set
operations."
'(member
:union
:intersection
:left-difference
:right-difference))
;;; -------------------------------------------------------
(deftype set-relationship ()
"The ``set-relationship'' type enumerates the recognized relationship
betwixt two sets, most commonly employed in the indagation of a
loop's continuation predicate."
'(member
:subset
:proper-subset
:not-subset
:superset
:proper-superset
:not-superset
:equal))
;;; -------------------------------------------------------
(deftype destination ()
"The ``destination'' type defines a sink for output operations,
enumerating, among others, the functions ``format'' and
``write-char''."
'(or null (eql T) stream string))
;;; -------------------------------------------------------
(deftype natural-number ()
"The ``natural-number'' type defines a positive integer with no upper
bourne, that is, a commorant of the range [1, +infinity], most
commonly employed in the context of set members."
'(integer 1 *))
;;; -------------------------------------------------------
(deftype number-list ()
"The ``number-list'' type defines a list of zero or more natural
numbers, that is, positive integers."
'(list-of natural-number))
| null | https://raw.githubusercontent.com/KavehYousefi/Esoteric-programming-languages/86116d6045f426dbe74f881b92944ad76df59c68/SOAP/SOAP_001/types.lisp | lisp |
This file serves in the declaration of the globally significant
types.
-- Declaration of types. -- ;;
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
------------------------------------------------------- |
(deftype list-of (&optional (element-type T))
"The ``list-of'' type defines a list of zero or more elements, each
member of which conforms to the ELEMENT-TYPE, defaulting to the
comprehensive ``T''."
(let ((predicate (gensym)))
(declare (type symbol predicate))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(listp candidate)
(every
#'(lambda (element)
(declare (type T element))
(typep element element-type))
(the list candidate)))))
`(satisfies ,predicate)))
(deftype hash-table-of (&optional (key-type T) (value-type T))
"The ``hash-table-of'' type defines a hash table of zero or more
entries, each key of which conforms to the KEY-TYPE and associates
with a value of the VALUE-TYPE, both defaulting to the comprehensive
``T''."
(let ((predicate (gensym)))
(declare (type symbol predicate))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(hash-table-p candidate)
(loop
for key
of-type T
being the hash-keys in (the hash-table candidate)
using
(hash-value value)
always
(and (typep key key-type)
(typep value value-type))))))
`(satisfies ,predicate)))
(deftype attribute-map ()
"The ``attribute-map'' type defines a collection of node attributes in
the form of a hash table mapping which associated keyword symbol
attribute names to arbitrary values."
'(hash-table-of keyword T))
(deftype attribute-list ()
"The ``attribute-list'' type defines a list of node attributes in
terms of a property list, or plist, with each attribute name (key or
indicator) immediately succeeded by its associated attribute value
(property value), the former of which must be a keyword symbol,
whereas the latter may assume the generic type ``T''."
(let ((predicate (gensym)))
(setf (symbol-function predicate)
#'(lambda (candidate)
(declare (type T candidate))
(and
(listp candidate)
(evenp (length (the list candidate)))
(loop
for (indicator value)
of-type (T T)
on (the list candidate)
by #'cddr
always
(and (typep indicator 'keyword)
(typep value T))))))
`(satisfies ,predicate)))
(deftype node-list ()
"The ``node-list'' type defines a list of zero or more ``Node''
objects."
'(list-of Node))
(deftype set-operator ()
"The ``set-operator'' type enumerates the recognized binary set
operations."
'(member
:union
:intersection
:left-difference
:right-difference))
(deftype set-relationship ()
"The ``set-relationship'' type enumerates the recognized relationship
betwixt two sets, most commonly employed in the indagation of a
loop's continuation predicate."
'(member
:subset
:proper-subset
:not-subset
:superset
:proper-superset
:not-superset
:equal))
(deftype destination ()
"The ``destination'' type defines a sink for output operations,
enumerating, among others, the functions ``format'' and
``write-char''."
'(or null (eql T) stream string))
(deftype natural-number ()
"The ``natural-number'' type defines a positive integer with no upper
bourne, that is, a commorant of the range [1, +infinity], most
commonly employed in the context of set members."
'(integer 1 *))
(deftype number-list ()
"The ``number-list'' type defines a list of zero or more natural
numbers, that is, positive integers."
'(list-of natural-number))
|
65ee4e700d62c98cfe275cfc749fa26906769e8a07e90b9c30fd72f70fd27002 | dizengrong/erlang_game | sales_test.erl | -module (sales_test).
-include ("sales.hrl").
-include_lib ("amnesia/include/amnesia.hrl").
-compile ([export_all]).
populate() ->
amnesia:open({local, sales}, sales),
{ok, Cust1} = amnesia:add_new (sales,
#customer {customer_code = 102341,
name = "John",
address = "XXXXX"}),
{ok, Cust2} = amnesia:add_new (sales,
#customer {customer_code = 394021,
name = "Corrado",
address = "YYYYYY",
email = "corrado@yyy"}),
{ok, Cust3} = amnesia:add_new (sales,
#customer {customer_code = 102391,
name = "Dave",
address = "Dave's home",
email = "dave@zzz"}),
{ok, P1} = amnesia:add_new (sales,
#product { product_code = "001",
description = "CPU Intel",
price = 231.10 }),
{ok, P2} = amnesia:add_new (sales,
#product { product_code = "002",
description = "Compact Flash 4G",
price = 57.90 }),
{ok, P3} = amnesia:add_new (sales,
#product { product_code = "003",
description = "Hard Disk 500G",
price = 190.77 }),
{ok, Order} = amnesia:add_new (sales,
#orders { order_number = 30,
order_date = {2008, 7, 17},
customer = Cust2 }),
amnesia:add_new (sales, #order_line { orders = Order,
product = P2,
quantity = 3 }),
amnesia:add_new (sales, #order_line { orders = Order,
product = P1,
quantity = 10 }),
amnesia:add_new (sales,
[#product { product_code = "004",
description = "Data Server",
price = 5200.00 },
#orders { order_number = 31,
customer = Cust1},
#order_line { orders = '$2',
product = P3,
quantity = 2} ,
#order_line { orders = '$2',
product = '$1',
quantity = 11 }
]),
ok.
test_join () ->
amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]).
test_join (Pid) ->
amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]).
test_connections () ->
{ok, [Order]} = amnesia:fetch (sales, orders, {"order_number = 31", []}),
io:format ("Order #31 is: ~p~n", [Order]),
{ok, OrderWithCust} = amnesia:load_referenced (sales, Order),
io:format ("Order #31 with customer explicited is: ~p~n", [OrderWithCust]),
{ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line),
io:format ("The items of order #31 are: ~p~n", [OrderLines]),
OrderLinesWithProduct =
lists:map (fun (Line) ->
{ok, LineWithProduct} =
amnesia:load_referenced (sales, Line),
LineWithProduct
end, OrderLines),
io:format ("The items of order #31, with products explicited, are:~n~p~n",
[OrderLinesWithProduct]),
ok.
test_fetch () ->
{ok, X1} = amnesia:fetch (sales, customer),
io:format ("SIMPLE FETCH = ~p~n~n", [X1]),
{ok, X2} = amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line]),
io:format ("FETCH WITH JOINS = ~p~n~n", [X2]),
{ok, X3} = amnesia:fetch (sales, orders, {"order_number = $1", [30]}),
io:format ("SIMPLE FETCH WITH SELECTION = ~p~n~n", [X3]),
{ok, X4} = amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line],
{"name = $1", ["Corrado"]}),
io:format ("FETCH WITH JOINS AND SELECTION = ~p~n~n", [X4]),
{ok, X5} = amnesia:fetch (sales, customer,
{}, [{order_by, name}]),
io:format ("SIMPLE FETCH WITH ORDERING = ~p~n~n", [X5]),
{ok, X6} = amnesia:fetch (sales,
[customer, ?JOIN, orders],
{}, [{order_by, order_number}]),
io:format ("FETCH WITH JOINS AND ORDERING = ~p~n~n", [X6]),
ok.
test_aggregate() ->
{ok, X1} = amnesia:fetch (sales, customer, {},
[{aggregate, "count(*)", integer}]),
io:format ("SIMPLE COUNT = ~p~n~n", [X1]),
{ok, X2} = amnesia:fetch (sales, product, {},
[{aggregate, "max(price)", decimal}]),
io:format ("SIMPLE MAX = ~p~n~n", [X2]),
{ok, X3} = amnesia:fetch (sales, product, {},
[{aggregate, "count(*)",
integer, product_code}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n", [X3]),
{ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n", [X4]),
{ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code,
{"__aggregated_data__ > $1", [5]}}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n",
[X5]),
{ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code},
{order_by, '__aggregated_data__', desc}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n",
[X6]),
{ok, X7} = amnesia:fetch (sales,
[product, ?JOIN, order_line, ?JOIN, orders], {},
[{aggregate, "sum(quantity * price)",
decimal, order_number}]),
io:format ("~p~n~n",
[X7]),
X7.
test_cursor () ->
{ok, CursorID} =
amnesia:create_cursor (
sales,
amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line] )),
io:format ("CURSOR ID = ~p~n~n", [CursorID]),
show_cursor_data (CursorID, 1).
show_cursor_data (CursorID, N) ->
case amnesia:nth (sales, CursorID, N) of
{end_of_data} -> amnesia:delete_cursor (sales, CursorID);
{ok, X} ->
io:format ("Item #~p = ~p~n~n", [N, X]),
show_cursor_data (CursorID, N + 1)
end.
| null | https://raw.githubusercontent.com/dizengrong/erlang_game/4598f97daa9ca5eecff292ac401dd8f903eea867/gerl/lib/amnesia/examples/sales_test.erl | erlang | -module (sales_test).
-include ("sales.hrl").
-include_lib ("amnesia/include/amnesia.hrl").
-compile ([export_all]).
populate() ->
amnesia:open({local, sales}, sales),
{ok, Cust1} = amnesia:add_new (sales,
#customer {customer_code = 102341,
name = "John",
address = "XXXXX"}),
{ok, Cust2} = amnesia:add_new (sales,
#customer {customer_code = 394021,
name = "Corrado",
address = "YYYYYY",
email = "corrado@yyy"}),
{ok, Cust3} = amnesia:add_new (sales,
#customer {customer_code = 102391,
name = "Dave",
address = "Dave's home",
email = "dave@zzz"}),
{ok, P1} = amnesia:add_new (sales,
#product { product_code = "001",
description = "CPU Intel",
price = 231.10 }),
{ok, P2} = amnesia:add_new (sales,
#product { product_code = "002",
description = "Compact Flash 4G",
price = 57.90 }),
{ok, P3} = amnesia:add_new (sales,
#product { product_code = "003",
description = "Hard Disk 500G",
price = 190.77 }),
{ok, Order} = amnesia:add_new (sales,
#orders { order_number = 30,
order_date = {2008, 7, 17},
customer = Cust2 }),
amnesia:add_new (sales, #order_line { orders = Order,
product = P2,
quantity = 3 }),
amnesia:add_new (sales, #order_line { orders = Order,
product = P1,
quantity = 10 }),
amnesia:add_new (sales,
[#product { product_code = "004",
description = "Data Server",
price = 5200.00 },
#orders { order_number = 31,
customer = Cust1},
#order_line { orders = '$2',
product = P3,
quantity = 2} ,
#order_line { orders = '$2',
product = '$1',
quantity = 11 }
]),
ok.
test_join () ->
amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]).
test_join (Pid) ->
amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]).
test_connections () ->
{ok, [Order]} = amnesia:fetch (sales, orders, {"order_number = 31", []}),
io:format ("Order #31 is: ~p~n", [Order]),
{ok, OrderWithCust} = amnesia:load_referenced (sales, Order),
io:format ("Order #31 with customer explicited is: ~p~n", [OrderWithCust]),
{ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line),
io:format ("The items of order #31 are: ~p~n", [OrderLines]),
OrderLinesWithProduct =
lists:map (fun (Line) ->
{ok, LineWithProduct} =
amnesia:load_referenced (sales, Line),
LineWithProduct
end, OrderLines),
io:format ("The items of order #31, with products explicited, are:~n~p~n",
[OrderLinesWithProduct]),
ok.
test_fetch () ->
{ok, X1} = amnesia:fetch (sales, customer),
io:format ("SIMPLE FETCH = ~p~n~n", [X1]),
{ok, X2} = amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line]),
io:format ("FETCH WITH JOINS = ~p~n~n", [X2]),
{ok, X3} = amnesia:fetch (sales, orders, {"order_number = $1", [30]}),
io:format ("SIMPLE FETCH WITH SELECTION = ~p~n~n", [X3]),
{ok, X4} = amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line],
{"name = $1", ["Corrado"]}),
io:format ("FETCH WITH JOINS AND SELECTION = ~p~n~n", [X4]),
{ok, X5} = amnesia:fetch (sales, customer,
{}, [{order_by, name}]),
io:format ("SIMPLE FETCH WITH ORDERING = ~p~n~n", [X5]),
{ok, X6} = amnesia:fetch (sales,
[customer, ?JOIN, orders],
{}, [{order_by, order_number}]),
io:format ("FETCH WITH JOINS AND ORDERING = ~p~n~n", [X6]),
ok.
test_aggregate() ->
{ok, X1} = amnesia:fetch (sales, customer, {},
[{aggregate, "count(*)", integer}]),
io:format ("SIMPLE COUNT = ~p~n~n", [X1]),
{ok, X2} = amnesia:fetch (sales, product, {},
[{aggregate, "max(price)", decimal}]),
io:format ("SIMPLE MAX = ~p~n~n", [X2]),
{ok, X3} = amnesia:fetch (sales, product, {},
[{aggregate, "count(*)",
integer, product_code}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n", [X3]),
{ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n", [X4]),
{ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code,
{"__aggregated_data__ > $1", [5]}}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n",
[X5]),
{ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},
[{aggregate, "sum(quantity)",
integer, product_code},
{order_by, '__aggregated_data__', desc}]),
io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n",
[X6]),
{ok, X7} = amnesia:fetch (sales,
[product, ?JOIN, order_line, ?JOIN, orders], {},
[{aggregate, "sum(quantity * price)",
decimal, order_number}]),
io:format ("~p~n~n",
[X7]),
X7.
test_cursor () ->
{ok, CursorID} =
amnesia:create_cursor (
sales,
amnesia:fetch (sales,
[customer, ?JOIN, orders, ?JOIN, order_line] )),
io:format ("CURSOR ID = ~p~n~n", [CursorID]),
show_cursor_data (CursorID, 1).
show_cursor_data (CursorID, N) ->
case amnesia:nth (sales, CursorID, N) of
{end_of_data} -> amnesia:delete_cursor (sales, CursorID);
{ok, X} ->
io:format ("Item #~p = ~p~n~n", [N, X]),
show_cursor_data (CursorID, N + 1)
end.
|
|
eafead00353d62cf3f87cb2ce301404270a1b20635a5474485f77056f0028da7 | SuzanneSoy/anaphoric | acond-test.rkt | #lang racket
(require anaphoric/acond
rackunit)
(define lst '(x y z a b c))
(define seen 0)
;; With else branch
(check-equal? (acond
[(member 'a lst) (set! seen (add1 seen))
(check-equal? it '(a b c))
'seen-01]
[(member 'b lst) (fail "acond selected wrong branch")]
[else (fail "acond selected wrong branch")])
'seen-01)
(check-equal? seen 1) ;; multiple body statements
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'b lst) (begin (check-equal? it '(b c))
'seen-02)]
[else (fail "acond selected wrong branch")])
'seen-02)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")]
[else 'seen-03])
'seen-03)
;; Just else branch
(check-equal? (acond
[else 'seen-04])
'seen-04)
;; Multiple body statements
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")]
[else (set! seen (add1 seen))
'seen-05])
'seen-05)
(check-equal? seen 2)
;; Without else branch
(check-equal? (acond
[(member 'a lst) (set! seen (add1 seen))
(check-equal? it '(a b c))
'seen-06]
[(member 'b lst) (fail "acond selected wrong branch")])
'seen-06)
(check-equal? seen 3)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'b lst) (begin (check-equal? it '(b c))
'seen-07)])
'seen-07)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")])
(void))
;; No branch
(check-equal? (acond)
(void))
;; Single branch
(check-equal? (acond
[(member 'a lst) (begin (check-equal? it '(a b c))
'seen-09)])
'seen-09)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")])
(void)) | null | https://raw.githubusercontent.com/SuzanneSoy/anaphoric/c648ec2aad6d2b2ec72acc729143454d1e855cf6/test/acond-test.rkt | racket | With else branch
multiple body statements
Just else branch
Multiple body statements
Without else branch
No branch
Single branch | #lang racket
(require anaphoric/acond
rackunit)
(define lst '(x y z a b c))
(define seen 0)
(check-equal? (acond
[(member 'a lst) (set! seen (add1 seen))
(check-equal? it '(a b c))
'seen-01]
[(member 'b lst) (fail "acond selected wrong branch")]
[else (fail "acond selected wrong branch")])
'seen-01)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'b lst) (begin (check-equal? it '(b c))
'seen-02)]
[else (fail "acond selected wrong branch")])
'seen-02)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")]
[else 'seen-03])
'seen-03)
(check-equal? (acond
[else 'seen-04])
'seen-04)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")]
[else (set! seen (add1 seen))
'seen-05])
'seen-05)
(check-equal? seen 2)
(check-equal? (acond
[(member 'a lst) (set! seen (add1 seen))
(check-equal? it '(a b c))
'seen-06]
[(member 'b lst) (fail "acond selected wrong branch")])
'seen-06)
(check-equal? seen 3)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'b lst) (begin (check-equal? it '(b c))
'seen-07)])
'seen-07)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")]
[(member 'absent2 lst) (fail "acond selected wrong branch")])
(void))
(check-equal? (acond)
(void))
(check-equal? (acond
[(member 'a lst) (begin (check-equal? it '(a b c))
'seen-09)])
'seen-09)
(check-equal? (acond
[(member 'absent lst) (fail "acond selected wrong branch")])
(void)) |
235f8bad04cb0fa799bd2d2e5e0ee94427f26199188a1fdbedbbe8c0b24e937e | dyzsr/ocaml-selectml | t330-compact-2.ml | TEST
include tool - ocaml - lib
flags = " -w -a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w -a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
Gc.compact ();;
let _ = Pervasives.do_at_exit();;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 CONST0
1198 C_CALL1 gc_compaction
1200 CONST0
1201 PUSHGETGLOBALFIELD Pervasives , 68
1204 APPLY1
1205 ATOM0
1206 SETGLOBAL T330 - compact-2
1208 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 CONST0
1198 C_CALL1 gc_compaction
1200 CONST0
1201 PUSHGETGLOBALFIELD Pervasives, 68
1204 APPLY1
1205 ATOM0
1206 SETGLOBAL T330-compact-2
1208 STOP
**)
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/tool-ocaml/t330-compact-2.ml | ocaml | TEST
include tool - ocaml - lib
flags = " -w -a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w -a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
Gc.compact ();;
let _ = Pervasives.do_at_exit();;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 CONST0
1198 C_CALL1 gc_compaction
1200 CONST0
1201 PUSHGETGLOBALFIELD Pervasives , 68
1204 APPLY1
1205 ATOM0
1206 SETGLOBAL T330 - compact-2
1208 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 CONST0
1198 C_CALL1 gc_compaction
1200 CONST0
1201 PUSHGETGLOBALFIELD Pervasives, 68
1204 APPLY1
1205 ATOM0
1206 SETGLOBAL T330-compact-2
1208 STOP
**)
|
|
1066710e01cdda0f4ce9c743f62c21693a49348e3eff5e199fadcac5b51e17b5 | shirok/WiLiKi | rss.scm | ;;;
wiliki / rss - an ad - hoc RSS generation routine for WiLiKi
;;;
Copyright ( c ) 2000 - 2009 < >
;;;
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without restriction ,
;;; including without limitation the rights to use, copy, modify,
;;; merge, publish, distribute, sublicense, and/or sell copies of
the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
;;; OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN
;;; AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
;;; OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
;;; IN THE SOFTWARE.
;;;
;; In future, this might be rewritten to use proper XML framework.
;; for now, I use an ad-hoc approach.
(define-module wiliki.rss
(use file.util)
(use text.html-lite)
(use text.tree)
(use util.match)
(use wiliki.core)
(export rss-page rss-item-count rss-item-description rss-item-extra-elements
rss-partial-content-lines rss-source rss-url-format))
(select-module wiliki.rss)
;; Parameters
# of items included in the RSS
(define rss-item-count (make-parameter 15))
;; What to include in the 'rdf:description' of each item.
;; none - omit rdf:description
;; raw - raw wiki-marked up text.
;; html - html rendered text. (heavy)
(define rss-item-description (make-parameter 'none))
;; # of maximum lines in the original wiki format to be included
;; in the partial content (raw-partial, html-partial).
(define rss-partial-content-lines (make-parameter 20))
;; A procedure that takes maximum # of entries, and returns a list
of entries to be included in the RSS . The returned list should be
;; in the following form:
;; <entries> : (<entry> ...)
;; <entry> : (<key> . <timestamp>) | ((<key> . <title>) . <timestamp>)
(define rss-source
(make-parameter (cut wiliki:recent-changes-alist :length <>)))
Whether the url in RSS should be in the format of url?key or url / key
(define rss-url-format (make-parameter 'query))
;; If not #f, this is inserted as is into each <item>...</item>
(define rss-item-extra-elements (make-parameter #f))
;; Main entry
(define (rss-page :key
(count (rss-item-count))
(item-description #f))
(rss-format ((rss-source) count)
(case (or item-description (rss-item-description))
[(raw) (cut raw-content <> #f)]
[(raw-partial) (cut raw-content <> #t)]
[(html) (cut html-content <> #f)]
[(html-partial) (cut html-content <> #t)]
[else (^_ "")])))
(define (rss-format entries item-description-proc)
(let* ([self (wiliki)]
[full-url (wiliki:url :full)])
`("Content-type: text/xml\n\n"
"<?xml version=\"1.0\" encoding=\"" ,(wiliki:output-charset) "\" ?>\n"
"<rdf:RDF
xmlns:rdf=\"-rdf-syntax-ns#\"
xmlns=\"/\"
xmlns:dc=\"/\"
xmlns:content=\"/\"
>\n"
,(rdf-channel
(wiliki:url :full)
(rdf-title (ref (wiliki)'title))
(rdf-link full-url)
(rdf-description (ref (wiliki)'description))
(rdf-items-seq (map (^e (rdf-li (entry->url e))) entries)))
,(map (^e (let1 url (entry->url e)
(rdf-item url
(rdf-title (entry->title e))
(rdf-link url)
(item-description-proc (entry->key e))
(dc-date (entry->timestamp e))
(or (rss-item-extra-elements) "")
)))
entries)
"</rdf:RDF>\n")))
(define (raw-content entry partial?)
(if-let1 page (wiliki:db-get entry)
(rdf-description (trim-content (ref page 'content) partial?))
""))
(define (html-content entry partial?)
(if-let1 page (wiliki:db-get entry)
($ rdf-content $ tree->string $ map wiliki:sxml->stree
$ wiliki:format-content $ trim-content (~ page'content) partial?)
""))
(define (trim-content raw-text partial?)
(if partial?
(string-join (take* (string-split raw-text "\n")
(rss-partial-content-lines))
"\n")
raw-text))
(define (entry->url entry)
(case (rss-url-format)
[(query) (wiliki:url :full "~a" (entry->key entry))]
[(path) (build-path (wiliki:url :full) (entry->key entry))]
[else (wiliki:url :full "config-error:invalid-rss-url-format")]))
(define (entry->title entry)
(match entry [((key . title) . _) title] [(key . _) key]))
(define (entry->key entry)
(match entry [((key . title) . _) key] [(key . _) key]))
(define (entry->timestamp entry) (cdr entry))
RDF rendering utilities .
NB : these should be implemented within xml framework
(define (rdf-channel about . content)
`("<channel rdf:about=\"" ,(html-escape-string about) "\">"
,@content
"\n</channel>\n"))
(define (rdf-li resource)
`("<rdf:li rdf:resource=\"" ,(html-escape-string resource) "\" />\n"))
(define (rdf-simple tag . content)
`("<" ,tag ">" ,@content "</" ,tag ">\n"))
(define (rdf-item about . content)
`("<item rdf:about=\"" ,(html-escape-string about) "\">"
,@content
"</item>\n"))
(define (rdf-items-seq . items)
`("<items><rdf:Seq>" ,@items "</rdf:Seq></items>\n"))
(define (rdf-simple-1 tag content)
`("<" ,tag ">" ,(html-escape-string content) "</" ,tag ">\n"))
(define (rdf-title title) (rdf-simple-1 "title" title))
(define (rdf-link link) (rdf-simple-1 "link" link))
(define (rdf-description desc) (rdf-simple-1 "description" desc))
(define (rdf-content content)
`("<content:encoded><![CDATA["
,(regexp-replace-all #/\]\]>/ content "]]]]><![CDATA[>")
"]]></content:encoded>"))
(define (dc-date secs)
(rdf-simple-1 "dc:date"
(sys-strftime "%Y-%m-%dT%H:%M:%S+00:00" (sys-gmtime secs))))
| null | https://raw.githubusercontent.com/shirok/WiLiKi/c910d5d936c833887f7c7bc99e0e681e262b5334/src/wiliki/rss.scm | scheme |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
In future, this might be rewritten to use proper XML framework.
for now, I use an ad-hoc approach.
Parameters
What to include in the 'rdf:description' of each item.
none - omit rdf:description
raw - raw wiki-marked up text.
html - html rendered text. (heavy)
# of maximum lines in the original wiki format to be included
in the partial content (raw-partial, html-partial).
A procedure that takes maximum # of entries, and returns a list
in the following form:
<entries> : (<entry> ...)
<entry> : (<key> . <timestamp>) | ((<key> . <title>) . <timestamp>)
If not #f, this is inserted as is into each <item>...</item>
Main entry | wiliki / rss - an ad - hoc RSS generation routine for WiLiKi
Copyright ( c ) 2000 - 2009 < >
files ( the " Software " ) , to deal in the Software without restriction ,
the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN
(define-module wiliki.rss
(use file.util)
(use text.html-lite)
(use text.tree)
(use util.match)
(use wiliki.core)
(export rss-page rss-item-count rss-item-description rss-item-extra-elements
rss-partial-content-lines rss-source rss-url-format))
(select-module wiliki.rss)
# of items included in the RSS
(define rss-item-count (make-parameter 15))
(define rss-item-description (make-parameter 'none))
(define rss-partial-content-lines (make-parameter 20))
of entries to be included in the RSS . The returned list should be
(define rss-source
(make-parameter (cut wiliki:recent-changes-alist :length <>)))
Whether the url in RSS should be in the format of url?key or url / key
(define rss-url-format (make-parameter 'query))
(define rss-item-extra-elements (make-parameter #f))
(define (rss-page :key
(count (rss-item-count))
(item-description #f))
(rss-format ((rss-source) count)
(case (or item-description (rss-item-description))
[(raw) (cut raw-content <> #f)]
[(raw-partial) (cut raw-content <> #t)]
[(html) (cut html-content <> #f)]
[(html-partial) (cut html-content <> #t)]
[else (^_ "")])))
(define (rss-format entries item-description-proc)
(let* ([self (wiliki)]
[full-url (wiliki:url :full)])
`("Content-type: text/xml\n\n"
"<?xml version=\"1.0\" encoding=\"" ,(wiliki:output-charset) "\" ?>\n"
"<rdf:RDF
xmlns:rdf=\"-rdf-syntax-ns#\"
xmlns=\"/\"
xmlns:dc=\"/\"
xmlns:content=\"/\"
>\n"
,(rdf-channel
(wiliki:url :full)
(rdf-title (ref (wiliki)'title))
(rdf-link full-url)
(rdf-description (ref (wiliki)'description))
(rdf-items-seq (map (^e (rdf-li (entry->url e))) entries)))
,(map (^e (let1 url (entry->url e)
(rdf-item url
(rdf-title (entry->title e))
(rdf-link url)
(item-description-proc (entry->key e))
(dc-date (entry->timestamp e))
(or (rss-item-extra-elements) "")
)))
entries)
"</rdf:RDF>\n")))
(define (raw-content entry partial?)
(if-let1 page (wiliki:db-get entry)
(rdf-description (trim-content (ref page 'content) partial?))
""))
(define (html-content entry partial?)
(if-let1 page (wiliki:db-get entry)
($ rdf-content $ tree->string $ map wiliki:sxml->stree
$ wiliki:format-content $ trim-content (~ page'content) partial?)
""))
(define (trim-content raw-text partial?)
(if partial?
(string-join (take* (string-split raw-text "\n")
(rss-partial-content-lines))
"\n")
raw-text))
(define (entry->url entry)
(case (rss-url-format)
[(query) (wiliki:url :full "~a" (entry->key entry))]
[(path) (build-path (wiliki:url :full) (entry->key entry))]
[else (wiliki:url :full "config-error:invalid-rss-url-format")]))
(define (entry->title entry)
(match entry [((key . title) . _) title] [(key . _) key]))
(define (entry->key entry)
(match entry [((key . title) . _) key] [(key . _) key]))
(define (entry->timestamp entry) (cdr entry))
RDF rendering utilities .
NB : these should be implemented within xml framework
(define (rdf-channel about . content)
`("<channel rdf:about=\"" ,(html-escape-string about) "\">"
,@content
"\n</channel>\n"))
(define (rdf-li resource)
`("<rdf:li rdf:resource=\"" ,(html-escape-string resource) "\" />\n"))
(define (rdf-simple tag . content)
`("<" ,tag ">" ,@content "</" ,tag ">\n"))
(define (rdf-item about . content)
`("<item rdf:about=\"" ,(html-escape-string about) "\">"
,@content
"</item>\n"))
(define (rdf-items-seq . items)
`("<items><rdf:Seq>" ,@items "</rdf:Seq></items>\n"))
(define (rdf-simple-1 tag content)
`("<" ,tag ">" ,(html-escape-string content) "</" ,tag ">\n"))
(define (rdf-title title) (rdf-simple-1 "title" title))
(define (rdf-link link) (rdf-simple-1 "link" link))
(define (rdf-description desc) (rdf-simple-1 "description" desc))
(define (rdf-content content)
`("<content:encoded><![CDATA["
,(regexp-replace-all #/\]\]>/ content "]]]]><![CDATA[>")
"]]></content:encoded>"))
(define (dc-date secs)
(rdf-simple-1 "dc:date"
(sys-strftime "%Y-%m-%dT%H:%M:%S+00:00" (sys-gmtime secs))))
|
c690364e591a5769521efd47dd5cff6c7cde5e8d612327964e51d04b0da6ea9d | tezos-checker/checker | testChecker.ml | open Ctok
open Kit
open Tok
open Lqt
open Burrow
open OUnit2
open TestLib
open CheckerTypes
open Fa2Interface
open Fa2Ledger
open Fa2Implementation
open Error
open Ptr
open LiquidationAuctionTypes
open LiquidationAuction
let property_test_count = 10000
let qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t
module PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end)
let checker_address = !Ligo.Tezos.self_address
let empty_checker =
initial_checker
{ ctok_fa2 = ctok_fa2_addr;
ctez_cfmm = ctez_cfmm_addr;
oracle = oracle_addr;
collateral_fa2 = collateral_fa2_addr;
}
(* The starting checker state should satisfy the invariants to begin with. *)
let _ = Checker.assert_checker_invariants empty_checker
Enhance the initial checker state with a populated cfmm in a consistent way .
let empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) =
let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal "1n")) in
let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal "1n")) in
let checker =
{ empty_checker with
parameters = { empty_checker.parameters with circulating_kit = checker_kit };
cfmm = cfmm;
fa2_state =
let fa2_state = initial_fa2_state in
let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in
let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in
fa2_state;
} in
Checker.assert_checker_invariants checker;
checker
Produces a checker state with burrows .
* Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state
* Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state
*)
let checker_with_liquidatable_burrows () =
let checker = empty_checker in
(* Create some burrows and mint some kit *)
let alice_burrow_1 = Ligo.nat_from_literal "0n" in
let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in
let bob_burrow_1 = Ligo.nat_from_literal "0n" in
Alice burrow 1 . Will NOT be
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in
burrow 2 : N. Will be
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal "100n")))) in
let checker = List.fold_left (
fun checker alice_burrow_no ->
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in
Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in
checker
)
checker
alice_burrow_nos
in
Bob burrow 1 . Will be .
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "20_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in
Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in
Increase value of kit to make some of the burrows by touching checker
(* Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high
* for the burrows to be liquidatable.
*)
Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_100_000n")) in
(* Touch burrows *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in
let checker = List.fold_left (
fun checker alice_burrow_no ->
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in
checker
)
checker
alice_burrow_nos
in
(* Check the expected properties of this test fixture *)
assert_bool "alice_burrow_1 was liquidatable but it is expected to not be"
(not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows))));
assert_bool "bob_burrow_1 was not liquidatable but it is expected to be"
(Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows)));
List.fold_left (
fun _ alice_burrow_no ->
assert_bool ("alice_burrow_" ^ (Ligo.string_of_nat alice_burrow_no) ^ " was not liquidatable but it is expected to be")
(Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows))))
()
alice_burrow_nos;
Checker.assert_checker_invariants checker;
let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in
let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in
liquidatable_burrow_ids, underburrowed_burrow_ids, checker
(* Produces a checker state with liquidation slices in the queue but no current auction.
* Returns a list of details for queued slices related to a Close liquidation,
* a list of details for all other slices in the queue, and the contract state.
*)
let checker_with_queued_liquidation_slices () =
let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in
Mark the burrows for liquidation . This will add slices to the queue .
let checker, close_slice_details, other_slice_details = List.fold_left
(fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id ->
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in
let slice_ptr = SliceList.slice_list_element_ptr new_slice in
let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in
let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in
let close_liquidation_slices, other_liquidation_slices =
if is_burrow_now_closed then
(List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices
else
close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)])
in
checker, close_liquidation_slices, other_liquidation_slices
)
(checker, [], [])
liquidatable_burrow_ids
in
assert_bool
"liquidation auction queue was empty, but it was expected to have some slices"
(Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices));
assert (List.length close_slice_details > 0);
assert (List.length other_slice_details > 0);
close_slice_details, other_slice_details, checker
(* Produces a checker state with an active liquidation auction *)
let checker_with_active_auction () =
let _, _, checker = checker_with_queued_liquidation_slices () in
Touch checker to start an auction
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
assert_bool "a current liquidation auction should have been started but was not" (Option.is_some checker.liquidation_auctions.current_auction);
checker
(* Produces a checker state with a completed liquidation auction *)
let checker_with_completed_auction () =
let checker = checker_with_active_auction () in
(* Get the current auction minimum bid *)
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
(* Mint enough kit to bid *)
let bidder = alice_addr in
let new_burrow_no = Ligo.nat_from_literal "100n" in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in
(* Place a bid *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in
(* Wait until enough time has passed for the auction to be completable then touch checker *)
Touch checker to start an auction
Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
assert_bool
"there was not a completed liquidation auction but one should exist"
(Option.is_some checker.liquidation_auctions.completed_auctions);
bidder, checker
Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops
let newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker =
let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, collateral)) in
((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker)
let get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat =
let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in
match ops with
| [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit
| _ -> failwith ("Unexpected fa2 response, got: " ^ show_operation_list ops)
let suite =
"Checker tests" >::: [
("initial touch (noop)" >::
fun _ ->
Ligo.Tezos.reset ();
let checker1 = empty_checker in
let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "0n")) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_equal checker1 checker2; (* NOTE: we really want them to be identical here, hence the '='. *)
()
);
("create_burrow - updates checker storage" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "1_000_000n")) in
assert_bool
"No matching burrow found after calling create_burrow"
(Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows));
assert_bool
"The burrow existed before calling create_burrow"
(Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows))
);
("create_burrow - collateral in burrow representation does not include creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in
let expected_collateral = tok_zero in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("create_burrow - fails when transaction amount is one mutez below creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal "1n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
assert_raises
(Failure (Ligo.string_of_int error_InsufficientFunds))
(fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)))
);
("create_burrow - passes when transaction amount is exactly the creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow ->
assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("deposit_collateral - owner can deposit" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in
(* Create the burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in
(* Make a deposit *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("deposit_collateral - non-owner cannot deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))))
);
("withdraw_collateral - owner can withdraw" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in
let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)) in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("withdraw_collateral - non-owner cannot withdraw" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" initial_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)))
);
("entrypoint_activate_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create a burrow and deactivate it *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in
(* Then activate it *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "1_000_000n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_add_liquidity - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
(* Create a burrow and mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_add_liquidity
(checker,
(* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = alice_addr;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "5_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_burn_kit - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
(* Create a burrow and mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
(* Then burn the kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_create_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let amnt = tok_of_denomination (Ligo.nat_from_literal "100_000_000n") in
let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amnt)) in
match ops with
Note : it 's not really possible to check the first parameter of the contract here which is the
* function which defines the contract 's logic .
* function which defines the contract's logic.
*)
| [ (CreateBurrowContract (_, delegate, tez, storage)) ;
(Transaction (FA2TransferTransactionValue _, _, _)) as op;
] ->
(* burrow creation values *)
assert_key_hash_option_equal ~expected:None ~real:delegate;
assert_tez_equal ~expected:Common.tez_zero ~real:tez;
assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage;
(* collateral initialization values *)
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal "0n")) checker.burrows) in
assert_operation_equal
~expected:(
LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat amnt;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
)
~real:op
| _ -> failwith ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
);
("entrypoint_deactivate_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create a burrow and deactivate it *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(alice_addr, (Ligo.nat_from_literal "100_000_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_deposit_collateral - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create the burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
(* Make a deposit *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "3_000_000n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_liquidation_auction_place_bid - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
(* Lookup the current minimum bid *)
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
Mint some kit to be able to bid
let new_burrow_no = Ligo.nat_from_literal "100n" in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in
(* Place a bid *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid
(checker,
((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid))
in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_mark_for_liquidation - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Use a checker state already containing some liquidatable burrows *)
let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in
let burrow_id = List.nth liquidatable_burrow_ids 0 in
let sender = bob_addr in
(* Mark one of the liquidatable burrows for liquidation *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(sender, (Ligo.nat_from_literal "1_001_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_cancel_liquidation_slice - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Use a checker state already containing some liquidatable burrows *)
(* Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow *)
let _, slice_details, checker = checker_with_queued_liquidation_slices () in
let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in
Deposit some extra collateral to one of the burrows with slices in the auction queue
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "4_000_000n"))) in
Now cancel one of the burrow 's liquidation slices
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_liquidation_auction_claim_win - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let winning_bidder, checker = checker_with_completed_auction () in
let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in
let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in
let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in
(* Touch the remaining slices so the bid can be claimed. *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in
(* Claim the winning bid *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = !Ligo.Tezos.self_address;
txs = [
{ to_ = winning_bidder;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat sold_tok;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_mint_kit - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
(* Create a burrow and mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_set_burrow_delegate - emits expected operations" >::
fun _ ->
(* NOTE: In a collateral=FA2 deployment this would actually fail. *)
Ligo.Tezos.reset ();
(* Create the burrow with no delegate *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
(* Then set the burrow's delegate *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.opt_key_hash_transaction
(Some charles_key_hash)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowSetDelegate" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_receive_price - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal "42n", Tok.tok_scaling_factor_nat)) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_remove_liquidity - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
(* Create a burrow and mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
(* Add some liquidity to the contract *)
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_add_liquidity
(checker,
(* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
(* Now remove the liquidity *)
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_remove_liquidity
(checker,
(* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)
( lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "5_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
FIXME : Operations differ between the FA2 deployment and the TEZ deployment
( " entrypoint_touch - emits expected operations when checker needs to be touched " > : :
fun _ - >
Ligo.Tezos.reset ( ) ;
let checker = empty_checker in
Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ;
let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in
let expected_ops = [
( LigoOp . Tezos.nat_contract_transaction
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_price " ! . ) )
( Ligo.tez_from_literal " 0mutez " )
( CheckerTypes.get_oracle_entrypoint checker.external_contracts )
) ;
( LigoOp . Tezos.nat_nat_contract_transaction
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_ctez_marginal_price " ! . ) )
( Ligo.tez_from_literal " 0mutez " )
( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts )
) ;
] in
assert_operation_list_equal ~expected : expected_ops ~real : ops
) ;
("entrypoint_touch - emits expected operations when checker needs to be touched" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch (checker, ()) in
let expected_ops = [
(LigoOp.Tezos.nat_contract_transaction
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_price" !Ligo.Tezos.self_address))
(Ligo.tez_from_literal "0mutez")
(CheckerTypes.get_oracle_entrypoint checker.external_contracts)
);
(LigoOp.Tezos.nat_nat_contract_transaction
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_ctez_marginal_price" !Ligo.Tezos.self_address))
(Ligo.tez_from_literal "0mutez")
(CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts)
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
*)
("entrypoint_touch - emits expected operations when checker has already been touched" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch (checker, ()) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_touch_liquidation_slices - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let _, checker = checker_with_completed_auction () in
let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in
let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in
let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in
Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded
* the " contents " record accessor in LiquidationAuctionTypes
* the "contents" record accessor in LiquidationAuctionTypes
*)
let expected_ops = let open LiquidationAuctionPrimitiveTypes in
List.rev (List.map (
fun slice ->
let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in
LigoOp.Tezos.address_nat_transaction
(checker_address, tok_to_denomination_nat slice.contents.tok)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
) slices) in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_touch_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create the burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
(* Then touch it *)
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_withdraw_collateral - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
(* Try to withdraw some tez from the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(alice_addr, (Ligo.nat_from_literal "1_000_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("calculate_touch_reward - expected result for last_touched 2s ago" >::
fun _ ->
The division in this case should return a remainder < 1/2
Ligo.Tezos.reset ();
let time_delta = 2 in
remainder : 12000 / 36000
let expected_reward = Ligo.int_from_literal "3333" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("calculate_touch_reward - expected result for last_touched 3s ago" >::
fun _ ->
(* The division in this case should produce no remainder *)
Ligo.Tezos.reset ();
let time_delta = 3 in
remainder : 0
let expected_reward = Ligo.int_from_literal "5000" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("calculate_touch_reward - expected result for last_touched 4s ago" >::
fun _ ->
The division in this case should return a remainder > 1/2
Ligo.Tezos.reset ();
let time_delta = 4 in
remainder : 24000 / 36000
let expected_reward = Ligo.int_from_literal "6666" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("burn_kit - owner can burn" >::
fun _ ->
Ligo.Tezos.reset ();
let sender = alice_addr in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
(* There should be no operations emitted. *)
assert_operation_list_equal ~expected:[] ~real:ops;
(* The owner should be able to burn it back. *)
let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) in
()
);
("burn_kit - non-owner cannot burn" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
(* There should be no operations emitted. *)
assert_operation_list_equal ~expected:[] ~real:ops;
(* Have the wrong person try to burn it back; this should fail. *)
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () ->
let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token))
);
()
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_respects_min_kit_expected"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let sender = alice_addr in
let checker = empty_checker_with_cfmm cfmm in
let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = ctok_to_denomination_nat ctok_amount;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
Ligo.geq_nat_nat
senders_new_kit
(Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected))
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_preserves_kit"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let checker = empty_checker_with_cfmm cfmm in
let sender = alice_addr in
let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in
let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in
let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = ctok_to_denomination_nat ctok_amount;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
Ligo.eq_nat_nat
(Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit)
(Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit)
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_preserves_tez"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let checker = empty_checker_with_cfmm cfmm in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_respects_min_tez_expected"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
let bought_muctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
ctok_of_denomination bought_muctok >= min_ctok_expected
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_preserves_kit"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_preserves_tez"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
let bought_muctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok
);
(
let cfmm_kit = Ligo.nat_from_literal ("1_000n") in
let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal ("1_000n")) in
The maximum amount of kit that you can buy with a finite amount of tez is
* ( 1 - fee ) * cfmm.kit - 1
* (1 - fee) * cfmm.kit - 1
*)
let max_buyable_kit = 997 in
let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ "n"))) QCheck.(1 -- max_buyable_kit) in
let arb_tez = TestArbitrary.arb_small_positive_tez in
qcheck_to_ounit
@@ QCheck.Test.make
~name:"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez"
~count:property_test_count
(QCheck.pair arb_kit arb_tez)
@@ fun (min_expected_kit, additional_tez) ->
Ligo.Tezos.reset();
let sender = alice_addr in
Populate cfmm with initial liquidity
let open Ratio in
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = cfmm_ctok;
kit = kit_of_denomination cfmm_kit;
} in
Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above
let ratio_minimum_tez = div_ratio
(ratio_of_nat cfmm_kit)
(
sub_ratio
(div_ratio (ratio_of_nat (Ligo.nat_from_literal "998n")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit)))
(ratio_of_nat (Ligo.nat_from_literal "1n"))
) in
let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal "1mutez") in
(* Adjust transaction by a random amount of extra tez *)
let ctok_provided = Ctok.ctok_of_denomination (Common.tez_to_mutez_nat (Ligo.add_tez_tez minimum_tez additional_tez)) in (* UNSAFE CAST *)
let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ctok.ctok_to_denomination_nat ctok_provided;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)
Ligo.geq_nat_nat
senders_new_kit
(Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit))
(* FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the
* number of QCheck samples may improve this.
*)
);
FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have
* a better way of testing different concrete cfmm implementations we should be able to re - enable this .
* a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)
(* ("buy_kit - returns expected kit" >::
fun _ ->
Ligo.Tezos.reset ();
(* Populate the cfmm with some liquidity *)
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
} in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal "1_000_000n"), kit_of_denomination (Ligo.nat_from_literal "1n"), Ligo.timestamp_from_seconds_literal 1)) in
let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = alice_addr;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "1_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit;
assert_operation_list_equal ~expected:expected_ops ~real:ops
); *)
FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have
* a better way of testing different concrete cfmm implementations we should be able to re - enable this .
* a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)
( " sell_kit - returns expected tez " > : :
fun _ - >
Ligo.Tezos.reset ( ) ;
let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal " 1_000_000n " ) in
let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal " 1n " ) in
let checker =
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination ( Ligo.nat_from_literal " 2n " ) ;
kit = kit_of_denomination ( Ligo.nat_from_literal " 2n " ) ;
lqt = lqt_of_denomination ( Ligo.nat_from_literal " 1n " ) ;
} in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add } ;
fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ;
} in
Checker.assert_checker_invariants checker ;
Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ;
let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in
let expected_ops = [
( LigoOp . Tezos.fa2_transfer_transaction
[ Fa2Interface . {
from _ = checker_address ;
= [
{ to _ = alice_addr ;
token_id = TokenMetadata.ctok_token_id ;
amount = Ligo.nat_from_literal " 1n " ;
}
]
}
]
( Ligo.tez_from_literal " 0mutez " )
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % transfer " checker.external_contracts.ctok_fa2 ) )
) ;
] in
assert_operation_list_equal ~expected : expected_ops ~real : ops
) ;
fun _ ->
Ligo.Tezos.reset ();
let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal "1_000_000n") in
let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in
let checker =
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
lqt = lqt_of_denomination (Ligo.nat_from_literal "1n");
} in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell };
fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "1n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
); *)
("remove_liquidity - returns expected kit and tez" >::
fun _ ->
Ligo.Tezos.reset ();
let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal "1n") in
let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in
let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal "1n") in
let sender = alice_addr in
(* Populate the cfmm with some liquidity (carefully crafted) *)
let checker =
{ empty_checker with
parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal "1n")};
cfmm =
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
lqt = lqt_of_denomination (Ligo.nat_from_literal "2n");
};
fa2_state =
let fa2_state = initial_fa2_state in
let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in
let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal "1n")) in
fa2_state;
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in
let ctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
let kit = get_balance_of checker sender TokenMetadata.kit_token_id in
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit;
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:ctok;
()
);
(* ************************************************************************* *)
(** FA2 *)
(* ************************************************************************* *)
("fa2 scenario" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
let initial_addr = Ligo.address_of_string "INIT_ADDR" in
(* mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal "0n"), checker) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in
(* get some liquidity *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
Checker.entrypoint_add_liquidity
( checker,
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
initialize alice , and leena accounts
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_ = initial_addr;
txs = [
{ to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal "5n" };
{ to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal "5n" }
];
}]) in
let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in
(* you can see the initial balances here for reference *)
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "5n");
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "5n");
assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n");
(* make leena an operator of bob for kit *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_update_operators (checker, [
(Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in
assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker));
assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker));
assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker));
(* alice can transfer some kit to bob *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal "2n"}]}]) in
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "3n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "2n");
(* but she can not transfer more than she has *)
assert_raises
(Failure "FA2_INSUFFICIENT_BALANCE")
(fun () -> Checker.strict_entrypoint_transfer (checker, [
{ from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "10n"}]}]));
(* and leena can send some of that kit back to alice *)
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]) in
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "4n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "1n");
but leena can not even send a single kit from 's account when he 's not an operator anymore
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_update_operators (checker, [
(Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure "FA2_NOT_OPERATOR")
(fun () -> Checker.strict_entrypoint_transfer (checker, [
{ from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]));
()
);
("view_total_supply (FA2) - initial kit supply" >::
fun _ ->
Ligo.Tezos.reset ();
let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in
assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_kit_amount;
()
);
("view_total_supply (FA2) - initial lqt supply" >::
fun _ ->
Ligo.Tezos.reset ();
let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in
assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_lqt_amount;
()
);
("view_total_supply (FA2) - undefined token id" >::
fun _ ->
assert_raises
(Failure "FA2_TOKEN_UNDEFINED")
(fun () -> Checker.view_total_supply (Ligo.nat_from_literal "3n", empty_checker))
);
("view_all_tokens (FA2)" >::
fun _ ->
Ligo.Tezos.reset ();
let all_tokens = Checker.view_all_tokens ((), empty_checker) in
assert_nat_list_equal
~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ]
~real:all_tokens;
()
);
(* ************************************************************************* *)
(** LiquidationAuctions *)
(* ************************************************************************* *)
("entrypoint_liquidation_auction_place_bid: should only allow the current auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) } in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in
let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal "0n"), checker) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in
let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "10_000_000n")) } in
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
let _, checker = Checker.entrypoint_touch (checker, ()) in
let res = Checker.view_current_liquidation_auction_details ((), checker) in
let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in
assert_raises
(Failure (Ligo.string_of_int error_InvalidLiquidationAuction))
(fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid)));
);
("can complete a liquidation auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
(* mint some kit to convert to liquidity *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _lqt_minted_ret_kit_ops, checker =
Checker.entrypoint_add_liquidity
( checker
, ( ctok_of_denomination (Ligo.nat_from_literal "1_000_000n")
, kit_one
, lqt_of_denomination (Ligo.nat_from_literal "1n")
, Ligo.timestamp_from_seconds_literal 1
)
) in (* barely on time *)
(* Activation/deactivation tests *)
let () =
(* Creation/deactivation does not incur any costs. *)
let tez = tok_of_denomination (Ligo.nat_from_literal "12_345_678n") in (* NOTE: tez is a misnomer; it's tok really *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tez)) in
(* created burrow should be deposited (incl. the creation deposit) *)
let burrow_addr =
burrow_address
(Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker0.burrows)) in
let () = match ops with
| [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ;
(Transaction (FA2TransferTransactionValue _, _, _)) as op ;
] ->
(* burrow creation values *)
assert_key_hash_option_equal ~expected:None ~real:cb_delegate;
assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez;
assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage;
(* collateral initialization values *)
assert_operation_equal
~expected:(
LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = bob_addr;
txs = [
{ to_ = burrow_addr;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat tez;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
)
~real:op
| _ -> assert_failure ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal "0n", alice_addr)) in
assert_operation_list_equal
~expected:[
LigoOp.Tezos.address_nat_transaction
(alice_addr, tok_to_denomination_nat tez)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr))
]
~real:ops;
(* deactivation/activation = identity (if conditions are met ofc). *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal "0n", tez)) in
FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives .
assert_equal checker0 checker2;
() in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
let burrow_id = (bob_addr, Ligo.nat_from_literal "0n") in
let burrow_addr =
burrow_address
(Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker.burrows)) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
assert_nat_equal ~expected:(Ligo.nat_from_literal "4_285_714n") ~real:kit;
assert_bool
"should not be overburrowed right after minting"
(not
@@ burrow_is_overburrowed
checker.parameters
(Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))
);
(* Minting another kit should fail *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_MintKitFailure))
(fun () ->
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))
)
);
(* Over time the burrows with outstanding kit should be overburrowed
* (NOTE: even if the index stays where it was before, but that would
* take more time I guess). *)
Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_001n")) in
let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_bool
"if the index goes up, then burrows should become overburrowed"
(burrow_is_overburrowed
checker.parameters
(Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))
);
(* If enough time passes and the index remains up, then the burrow is even liquidatable. *)
Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_int_equal
~expected:(Ligo.int_from_literal "202_000_000") (* wow, high reward, many blocks have passed. *)
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
assert_operation_list_equal
~expected:[
LigoOp.Tezos.address_nat_transaction
(alice_addr, Ligo.nat_from_literal "1_009_000n")
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr))
]
~real:ops;
let slice =
(Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices)
|> Option.get
|> fun i -> i.youngest_slice in
(* We shouldn't be able to cancel the liquidation of this slice if the
* prices don't change, even if it's not in an auction yet. *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_UnwarrantedCancellation))
(fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice));
(* Trying to cancel a liquidation using an invalid pointer should fail. *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_InvalidLeafPtr))
(fun () ->
let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in
Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice)
);
Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_NoOpenAuction))
(fun () -> Checker.view_current_liquidation_auction_details ((), checker));
let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
assert_bool "should start an auction"
(Option.is_some checker.liquidation_auctions.current_auction);
assert_int_equal
~expected:(Ligo.int_from_literal "500_000")
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in
let auction_id =
min_bid.auction_id in
assert_kit_equal
~expected:(kit_of_denomination (Ligo.nat_from_literal "2_709_183n"))
~real:min_bid.minimum_bid;
(* Bid the minimum first *)
let (ops, checker) =
Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in
assert_operation_list_equal ~expected:[] ~real:ops;
(* Same person increases the bid *)
let (ops, checker) =
Checker.entrypoint_liquidation_auction_place_bid
( checker
, (auction_id, kit_of_denomination (Ligo.nat_from_literal "4_200_000n"))
) in
let auction_id =
match checker.liquidation_auctions.current_auction with
| None -> assert_failure "entrypoint_liquidation_auction_place_bid should have succeeded"
| Some current_auction -> current_auction.contents in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_int_equal
~expected:(Ligo.int_from_literal "500_000")
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
assert_bool "auction should be completed"
(Option.is_none checker.liquidation_auctions.current_auction);
assert_int_equal
~expected:(Ligo.int_from_literal "21_000_000")
~real:touch_reward;
FIXME : Operations differ between the FA2 deployment and the TEZ deployment
( * Check that all the requests for burrows to send tez come _ before _ the
* request to the oracle to update the index .
(* Check that all the requests for burrows to send tez come _before_ the
* request to the oracle to update the index. *)
begin match ops with
| [
Transaction (AddressNatTransactionValue _, _, _); (* send tez requests *)
Transaction (NatContractTransactionValue _, _, _); (* oracle call *)
call
] -> ()
| _ -> assert_failure ("Unexpected operations/operation order: " ^ show_operation_list ops)
end;
*)
We do n't need to touch the slice on this test case since
* Checker.entrypoint_touch_with_index already touches the oldest 5
* slices .
* Checker.entrypoint_touch_with_index already touches the oldest 5
* slices. *)
assert_raises
(Failure (Ligo.string_of_int error_InvalidLeafPtr))
(fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice]));
assert_bool "burrow should have no liquidation slices"
(Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None);
let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in
assert_tok_equal
~expected:tok_zero
~real:(burrow_collateral_at_auction result);
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in
assert_operation_list_equal
~expected:[
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "3_156_446n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
]
~real:ops;
(* This should fail; shouldn't be able to claim the win twice. *)
assert_raises
(Failure (Ligo.string_of_int error_InvalidAvlPtr))
(fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id));
()
);
("entrypoint_mark_for_liquidation - should not create empty slices" >::
fun _ ->
(* Setup. *)
Ligo.Tezos.reset ();
let sender = alice_addr in
let checker = empty_checker in
(* Create a burrow with a very little tez in it. *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;
let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker "0n" (tok_of_denomination (Ligo.nat_from_literal "2_001_001n")) in
CALCULATIONS
~~~~~~~~~~~~
Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is
( 1tez + 1_001mutez = 1_001_001 ) . This means that
- The slice we WOULD send to auctions is empty .
- The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions .
~~~~~~~~~~~~
Tez in the burrow is (1_001_001mutez + 1tez) so the reward is
(1tez + 1_001mutez = 1_001_001). This means that
- The slice we WOULD send to auctions is empty.
- The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions.
*)
Mint as much kit as possible .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal "476_667n"))) in
(* Let some time pass. Over time the burrows with outstanding kit should
* become overburrowed, and eventually liquidatable. Note that this
* could be because of the index, but also it can happen because of the
* fees alone if the index remains the same. *)
NOTE : I am a little surprised / worried about this being again 211 ...
Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_105_283n")) in (* sup *)
let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
Ensure that the burrow is .
begin match Ligo.Big_map.find_opt burrow_id checker.burrows with
| None -> assert_failure "bug"
| Some burrow -> assert_bool "burrow needs to be liquidatable for the test to be potent." (Burrow.burrow_is_liquidatable checker.parameters burrow);
end;
Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *)
Let 's mark the burrow for liquidation now ( second pass : deactivates it ) .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *)
()
);
("deposit_collateral - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to deposit some tez to the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", amount)) in
()
);
("entrypoint_withdraw_collateral - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to withdraw some tez from the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", Constants.creation_deposit)) in
()
);
("entrypoint_mint_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
(* Create a burrow *)
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to mint some kit out of the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
()
);
("entrypoint_burn_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Mint some kit out of the burrow
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to burn some kit into the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
()
);
("entrypoint_activate_burrow - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
(* Deactivate the burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to activate the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal "0n", amount)) in
()
);
("entrypoint_deactivate_burrow - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to deactivate the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in
()
);
("entrypoint_mark_for_liquidation - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to mark the untouched burrow for liquidation *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
(* TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation
* to really succeed instead of failing for another reason. *)
assert_raises
(Failure (Ligo.string_of_int error_NotLiquidationCandidate))
(fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id));
);
(* TODO: Add test "entrypoint_cancel_liquidation_slice - fails on untouched burrows" *)
("entrypoint_set_burrow_delegate - does not fail on untouched burrows" >::
fun _ ->
(* NOTE: In a collateral=FA2 deployment this would actually fail. *)
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to set the delegate of the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal "0n", None)) in
()
);
("cfmm views" >:::
let
with_cfmm_setup f =
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
let burrow_id = Ligo.nat_from_literal "42n" in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
(* Mint some kit *)
Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in
(* Add some liquidity *)
Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal "400_000n") in
let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal "400_000n") in
let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in
Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = f checker in ()
in
[
"view_buy_kit_min_kit_expected" >:: with_cfmm_setup
(fun checker ->
let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in
let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
(* must succeed, otherwise view_buy_kit_min_kit_expected overapproximated *)
Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline)));
"view_buy_kit_min_kit_expected - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_BuyKitNoCtokGiven))
(fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker))
);
"view_sell_kit_min_ctok_expected" >:: with_cfmm_setup
(fun checker ->
let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal "100_000n") in
let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
(* must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated *)
Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline)));
"view_sell_kit_min_ctok_expected - fail if no kit is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_SellKitNoKitGiven))
(fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker))
);
"view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted" >:: with_cfmm_setup
(fun checker ->
let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in
let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in
let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
(* must succeed, otherwise
* view_add_liquidity_max_kit_deposited underapproximated or
* view_add_liquidity_min_lqt_minted overapproximated (or both of them did) *)
Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline)));
"view_add_liquidity_max_kit_deposited - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))
(fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker))
);
"view_add_liquidity_min_lqt_minted - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))
(fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker))
);
"view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn" >:: with_cfmm_setup
(fun checker ->
let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in
let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in
let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
(* must succeed, otherwise
* view_remove_liquidity_min_ctok_withdrawn overapproximated or
* view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did) *)
Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline)));
"view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker))
);
"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = checker.cfmm.lqt in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker))
);
"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = checker.cfmm.lqt in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))
);
]
);
("view_burrow_max_mintable_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to view the max mintable kit from the untouched burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in
()
);
("view_is_burrow_overburrowed - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
(* Try to view whether the untouched burrow is overburrowed *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in
()
);
("view_is_burrow_liquidatable - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
(* Create a burrow *)
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Try to view whether the untouched burrow is
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in
()
);
("view_current_liquidation_auction_details - raises error when there is no current auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
assert_raises
(Failure (Ligo.string_of_int error_NoOpenAuction))
(fun _ -> Checker.view_current_liquidation_auction_details ((), checker))
);
("view_current_liquidation_auction_details - expected value for descending auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
let auction = Option.get checker.liquidation_auctions.current_auction in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
let expected_auction_details = {
auction_id = auction.contents;
collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n");
minimum_bid = liquidation_auction_current_auction_minimum_bid auction;
current_bid = None;
remaining_blocks = None;
remaining_seconds = None;
}
in
assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details
);
("view_current_liquidation_auction_details - expected value for ascending auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
let auction = Option.get checker.liquidation_auctions.current_auction in
Place a bid to turn the descending auction into an ascending one
let bidder = bob_addr in
let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "1n", None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "1n", bid_amnt)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in
Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let auction = Option.get checker.liquidation_auctions.current_auction in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
let expected_auction_details = {
auction_id = auction.contents;
collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n");
minimum_bid = liquidation_auction_current_auction_minimum_bid auction;
current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;});
remaining_blocks = Some (Ligo.int_from_literal "-2");
remaining_seconds = Some (Ligo.int_from_literal "700");
}
in
assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details
);
]
let () =
run_test_tt_main
suite
| null | https://raw.githubusercontent.com/tezos-checker/checker/e4bd0f16aa14e10e8a62b28e85f8c98c388a0a6a/tests/testChecker.ml | ocaml | The starting checker state should satisfy the invariants to begin with.
Create some burrows and mint some kit
Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high
* for the burrows to be liquidatable.
Touch burrows
Check the expected properties of this test fixture
Produces a checker state with liquidation slices in the queue but no current auction.
* Returns a list of details for queued slices related to a Close liquidation,
* a list of details for all other slices in the queue, and the contract state.
Produces a checker state with an active liquidation auction
Produces a checker state with a completed liquidation auction
Get the current auction minimum bid
Mint enough kit to bid
Place a bid
Wait until enough time has passed for the auction to be completable then touch checker
NOTE: we really want them to be identical here, hence the '='.
Create the burrow
Make a deposit
Create a burrow and deactivate it
Then activate it
Create a burrow and mint some kit
Note: all values here were arbitrarily chosen based on the amount of kit we minted above
Create a burrow and mint some kit
Then burn the kit
burrow creation values
collateral initialization values
Create a burrow and deactivate it
Create the burrow
Make a deposit
Lookup the current minimum bid
Place a bid
Use a checker state already containing some liquidatable burrows
Mark one of the liquidatable burrows for liquidation
Use a checker state already containing some liquidatable burrows
Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow
Touch the remaining slices so the bid can be claimed.
Claim the winning bid
Create a burrow and mint some kit
NOTE: In a collateral=FA2 deployment this would actually fail.
Create the burrow with no delegate
Then set the burrow's delegate
Create a burrow and mint some kit
Add some liquidity to the contract
Note: all values here were arbitrarily chosen based on the amount of kit we minted above
Now remove the liquidity
Note: all values here were arbitrarily chosen based on the amount of kit we minted above
Create the burrow
Then touch it
Create a burrow
Try to withdraw some tez from the untouched burrow
The division in this case should produce no remainder
Create a burrow
There should be no operations emitted.
The owner should be able to burn it back.
Create a burrow
There should be no operations emitted.
Have the wrong person try to burn it back; this should fail.
before
after
before
after
Adjust transaction by a random amount of extra tez
UNSAFE CAST
before
after
FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the
* number of QCheck samples may improve this.
("buy_kit - returns expected kit" >::
fun _ ->
Ligo.Tezos.reset ();
(* Populate the cfmm with some liquidity
Populate the cfmm with some liquidity (carefully crafted)
*************************************************************************
* FA2
*************************************************************************
mint some kit
get some liquidity
you can see the initial balances here for reference
make leena an operator of bob for kit
alice can transfer some kit to bob
but she can not transfer more than she has
and leena can send some of that kit back to alice
*************************************************************************
* LiquidationAuctions
*************************************************************************
mint some kit to convert to liquidity
barely on time
Activation/deactivation tests
Creation/deactivation does not incur any costs.
NOTE: tez is a misnomer; it's tok really
created burrow should be deposited (incl. the creation deposit)
burrow creation values
collateral initialization values
deactivation/activation = identity (if conditions are met ofc).
Minting another kit should fail
Over time the burrows with outstanding kit should be overburrowed
* (NOTE: even if the index stays where it was before, but that would
* take more time I guess).
If enough time passes and the index remains up, then the burrow is even liquidatable.
wow, high reward, many blocks have passed.
We shouldn't be able to cancel the liquidation of this slice if the
* prices don't change, even if it's not in an auction yet.
Trying to cancel a liquidation using an invalid pointer should fail.
Bid the minimum first
Same person increases the bid
Check that all the requests for burrows to send tez come _before_ the
* request to the oracle to update the index.
send tez requests
oracle call
This should fail; shouldn't be able to claim the win twice.
Setup.
Create a burrow with a very little tez in it.
Let some time pass. Over time the burrows with outstanding kit should
* become overburrowed, and eventually liquidatable. Note that this
* could be because of the index, but also it can happen because of the
* fees alone if the index remains the same.
sup
Ensures no empty slices in the queue.
Ensures no empty slices in the queue.
Create a burrow
Try to deposit some tez to the untouched burrow
Create a burrow
Try to withdraw some tez from the untouched burrow
Create a burrow
Try to mint some kit out of the untouched burrow
Create a burrow
Try to burn some kit into the untouched burrow
Create a burrow
Deactivate the burrow
Try to activate the untouched burrow
Create a burrow
Try to deactivate the untouched burrow
Create a burrow
Try to mark the untouched burrow for liquidation
TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation
* to really succeed instead of failing for another reason.
TODO: Add test "entrypoint_cancel_liquidation_slice - fails on untouched burrows"
NOTE: In a collateral=FA2 deployment this would actually fail.
Create a burrow
Try to set the delegate of the untouched burrow
Create a burrow
Mint some kit
Add some liquidity
must succeed, otherwise view_buy_kit_min_kit_expected overapproximated
must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated
must succeed, otherwise
* view_add_liquidity_max_kit_deposited underapproximated or
* view_add_liquidity_min_lqt_minted overapproximated (or both of them did)
must succeed, otherwise
* view_remove_liquidity_min_ctok_withdrawn overapproximated or
* view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did)
Create a burrow
Try to view the max mintable kit from the untouched burrow
Create a burrow
Try to view whether the untouched burrow is overburrowed
Create a burrow | open Ctok
open Kit
open Tok
open Lqt
open Burrow
open OUnit2
open TestLib
open CheckerTypes
open Fa2Interface
open Fa2Ledger
open Fa2Implementation
open Error
open Ptr
open LiquidationAuctionTypes
open LiquidationAuction
let property_test_count = 10000
let qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t
module PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end)
let checker_address = !Ligo.Tezos.self_address
let empty_checker =
initial_checker
{ ctok_fa2 = ctok_fa2_addr;
ctez_cfmm = ctez_cfmm_addr;
oracle = oracle_addr;
collateral_fa2 = collateral_fa2_addr;
}
let _ = Checker.assert_checker_invariants empty_checker
Enhance the initial checker state with a populated cfmm in a consistent way .
let empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) =
let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal "1n")) in
let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal "1n")) in
let checker =
{ empty_checker with
parameters = { empty_checker.parameters with circulating_kit = checker_kit };
cfmm = cfmm;
fa2_state =
let fa2_state = initial_fa2_state in
let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in
let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in
fa2_state;
} in
Checker.assert_checker_invariants checker;
checker
Produces a checker state with burrows .
* Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state
* Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state
*)
let checker_with_liquidatable_burrows () =
let checker = empty_checker in
let alice_burrow_1 = Ligo.nat_from_literal "0n" in
let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in
let bob_burrow_1 = Ligo.nat_from_literal "0n" in
Alice burrow 1 . Will NOT be
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in
burrow 2 : N. Will be
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal "100n")))) in
let checker = List.fold_left (
fun checker alice_burrow_no ->
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in
Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in
checker
)
checker
alice_burrow_nos
in
Bob burrow 1 . Will be .
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "20_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in
Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in
Increase value of kit to make some of the burrows by touching checker
Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_100_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in
let checker = List.fold_left (
fun checker alice_burrow_no ->
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in
checker
)
checker
alice_burrow_nos
in
assert_bool "alice_burrow_1 was liquidatable but it is expected to not be"
(not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows))));
assert_bool "bob_burrow_1 was not liquidatable but it is expected to be"
(Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows)));
List.fold_left (
fun _ alice_burrow_no ->
assert_bool ("alice_burrow_" ^ (Ligo.string_of_nat alice_burrow_no) ^ " was not liquidatable but it is expected to be")
(Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows))))
()
alice_burrow_nos;
Checker.assert_checker_invariants checker;
let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in
let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in
liquidatable_burrow_ids, underburrowed_burrow_ids, checker
let checker_with_queued_liquidation_slices () =
let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in
Mark the burrows for liquidation . This will add slices to the queue .
let checker, close_slice_details, other_slice_details = List.fold_left
(fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id ->
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in
let slice_ptr = SliceList.slice_list_element_ptr new_slice in
let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in
let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in
let close_liquidation_slices, other_liquidation_slices =
if is_burrow_now_closed then
(List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices
else
close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)])
in
checker, close_liquidation_slices, other_liquidation_slices
)
(checker, [], [])
liquidatable_burrow_ids
in
assert_bool
"liquidation auction queue was empty, but it was expected to have some slices"
(Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices));
assert (List.length close_slice_details > 0);
assert (List.length other_slice_details > 0);
close_slice_details, other_slice_details, checker
let checker_with_active_auction () =
let _, _, checker = checker_with_queued_liquidation_slices () in
Touch checker to start an auction
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
assert_bool "a current liquidation auction should have been started but was not" (Option.is_some checker.liquidation_auctions.current_auction);
checker
let checker_with_completed_auction () =
let checker = checker_with_active_auction () in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
let bidder = alice_addr in
let new_burrow_no = Ligo.nat_from_literal "100n" in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in
Touch checker to start an auction
Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
assert_bool
"there was not a completed liquidation auction but one should exist"
(Option.is_some checker.liquidation_auctions.completed_auctions);
bidder, checker
Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops
let newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker =
let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, collateral)) in
((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker)
let get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat =
let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in
match ops with
| [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit
| _ -> failwith ("Unexpected fa2 response, got: " ^ show_operation_list ops)
let suite =
"Checker tests" >::: [
("initial touch (noop)" >::
fun _ ->
Ligo.Tezos.reset ();
let checker1 = empty_checker in
let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "0n")) in
assert_operation_list_equal ~expected:[] ~real:ops;
()
);
("create_burrow - updates checker storage" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "1_000_000n")) in
assert_bool
"No matching burrow found after calling create_burrow"
(Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows));
assert_bool
"The burrow existed before calling create_burrow"
(Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows))
);
("create_burrow - collateral in burrow representation does not include creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in
let expected_collateral = tok_zero in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("create_burrow - fails when transaction amount is one mutez below creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal "1n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
assert_raises
(Failure (Ligo.string_of_int error_InsufficientFunds))
(fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)))
);
("create_burrow - passes when transaction amount is exactly the creation deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow ->
assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("deposit_collateral - owner can deposit" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("deposit_collateral - non-owner cannot deposit" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))))
);
("withdraw_collateral - owner can withdraw" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in
let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)) in
match Ligo.Big_map.find_opt burrow_id checker.burrows with
| Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)
| None -> assert_failure "Expected a burrow representation to exist but none was found"
);
("withdraw_collateral - non-owner cannot withdraw" >::
fun _ ->
Ligo.Tezos.reset ();
let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in
let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" initial_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)))
);
("entrypoint_activate_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "1_000_000n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_add_liquidity - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_add_liquidity
(checker,
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = alice_addr;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "5_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_burn_kit - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_create_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let amnt = tok_of_denomination (Ligo.nat_from_literal "100_000_000n") in
let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amnt)) in
match ops with
Note : it 's not really possible to check the first parameter of the contract here which is the
* function which defines the contract 's logic .
* function which defines the contract's logic.
*)
| [ (CreateBurrowContract (_, delegate, tez, storage)) ;
(Transaction (FA2TransferTransactionValue _, _, _)) as op;
] ->
assert_key_hash_option_equal ~expected:None ~real:delegate;
assert_tez_equal ~expected:Common.tez_zero ~real:tez;
assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage;
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal "0n")) checker.burrows) in
assert_operation_equal
~expected:(
LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat amnt;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
)
~real:op
| _ -> failwith ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
);
("entrypoint_deactivate_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(alice_addr, (Ligo.nat_from_literal "100_000_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_deposit_collateral - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = alice_addr;
txs = [
{ to_ = burrow_address burrow;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "3_000_000n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_liquidation_auction_place_bid - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
Mint some kit to be able to bid
let new_burrow_no = Ligo.nat_from_literal "100n" in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid
(checker,
((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid))
in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_mark_for_liquidation - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in
let burrow_id = List.nth liquidatable_burrow_ids 0 in
let sender = bob_addr in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(sender, (Ligo.nat_from_literal "1_001_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_cancel_liquidation_slice - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let _, slice_details, checker = checker_with_queued_liquidation_slices () in
let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in
Deposit some extra collateral to one of the burrows with slices in the auction queue
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "4_000_000n"))) in
Now cancel one of the burrow 's liquidation slices
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_liquidation_auction_claim_win - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let winning_bidder, checker = checker_with_completed_auction () in
let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in
let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in
let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = !Ligo.Tezos.self_address;
txs = [
{ to_ = winning_bidder;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat sold_tok;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_mint_kit - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_set_burrow_delegate - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.opt_key_hash_transaction
(Some charles_key_hash)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowSetDelegate" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_receive_price - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal "42n", Tok.tok_scaling_factor_nat)) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_remove_liquidity - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_add_liquidity
(checker,
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_remove_liquidity
(checker,
( lqt_of_denomination (Ligo.nat_from_literal "5_000_000n")
, ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "5_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
FIXME : Operations differ between the FA2 deployment and the TEZ deployment
( " entrypoint_touch - emits expected operations when checker needs to be touched " > : :
fun _ - >
Ligo.Tezos.reset ( ) ;
let checker = empty_checker in
Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ;
let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in
let expected_ops = [
( LigoOp . Tezos.nat_contract_transaction
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_price " ! . ) )
( Ligo.tez_from_literal " 0mutez " )
( CheckerTypes.get_oracle_entrypoint checker.external_contracts )
) ;
( LigoOp . Tezos.nat_nat_contract_transaction
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_ctez_marginal_price " ! . ) )
( Ligo.tez_from_literal " 0mutez " )
( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts )
) ;
] in
assert_operation_list_equal ~expected : expected_ops ~real : ops
) ;
("entrypoint_touch - emits expected operations when checker needs to be touched" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch (checker, ()) in
let expected_ops = [
(LigoOp.Tezos.nat_contract_transaction
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_price" !Ligo.Tezos.self_address))
(Ligo.tez_from_literal "0mutez")
(CheckerTypes.get_oracle_entrypoint checker.external_contracts)
);
(LigoOp.Tezos.nat_nat_contract_transaction
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_ctez_marginal_price" !Ligo.Tezos.self_address))
(Ligo.tez_from_literal "0mutez")
(CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts)
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
*)
("entrypoint_touch - emits expected operations when checker has already been touched" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch (checker, ()) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_touch_liquidation_slices - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
let _, checker = checker_with_completed_auction () in
let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in
let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in
let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in
Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded
* the " contents " record accessor in LiquidationAuctionTypes
* the "contents" record accessor in LiquidationAuctionTypes
*)
let expected_ops = let open LiquidationAuctionPrimitiveTypes in
List.rev (List.map (
fun slice ->
let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in
LigoOp.Tezos.address_nat_transaction
(checker_address, tok_to_denomination_nat slice.contents.tok)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
) slices) in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("entrypoint_touch_burrow - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
assert_operation_list_equal ~expected:[] ~real:ops
);
("entrypoint_withdraw_collateral - emits expected operations" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))) in
let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in
let expected_ops = [
(LigoOp.Tezos.address_nat_transaction
(alice_addr, (Ligo.nat_from_literal "1_000_000n"))
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow)))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
);
("calculate_touch_reward - expected result for last_touched 2s ago" >::
fun _ ->
The division in this case should return a remainder < 1/2
Ligo.Tezos.reset ();
let time_delta = 2 in
remainder : 12000 / 36000
let expected_reward = Ligo.int_from_literal "3333" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("calculate_touch_reward - expected result for last_touched 3s ago" >::
fun _ ->
Ligo.Tezos.reset ();
let time_delta = 3 in
remainder : 0
let expected_reward = Ligo.int_from_literal "5000" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("calculate_touch_reward - expected result for last_touched 4s ago" >::
fun _ ->
The division in this case should return a remainder > 1/2
Ligo.Tezos.reset ();
let time_delta = 4 in
remainder : 24000 / 36000
let expected_reward = Ligo.int_from_literal "6666" in
let last_touched = Ligo.timestamp_from_seconds_literal 0 in
Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in
assert_int_equal ~expected:expected_reward ~real:actual_reward;
);
("burn_kit - owner can burn" >::
fun _ ->
Ligo.Tezos.reset ();
let sender = alice_addr in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
assert_operation_list_equal ~expected:[] ~real:ops;
let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) in
()
);
("burn_kit - non-owner cannot burn" >::
fun _ ->
Ligo.Tezos.reset ();
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_raises
(Failure (Ligo.string_of_int error_NonExistentBurrow))
(fun () ->
let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token))
);
()
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_respects_min_kit_expected"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let sender = alice_addr in
let checker = empty_checker_with_cfmm cfmm in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = ctok_to_denomination_nat ctok_amount;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
Ligo.geq_nat_nat
senders_new_kit
(Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected))
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_preserves_kit"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let checker = empty_checker_with_cfmm cfmm in
let sender = alice_addr in
let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = ctok_to_denomination_nat ctok_amount;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
Ligo.eq_nat_nat
(Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit)
(Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit)
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_buy_kit_preserves_tez"
~count:property_test_count
make_inputs_for_buy_kit_to_succeed
@@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->
let checker = empty_checker_with_cfmm cfmm in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in
ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_respects_min_tez_expected"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
let bought_muctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
ctok_of_denomination bought_muctok >= min_ctok_expected
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_preserves_kit"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit
);
(
Ligo.Tezos.reset();
qcheck_to_ounit
@@ QCheck.Test.make
~name:"test_sell_kit_preserves_tez"
~count:property_test_count
make_inputs_for_sell_kit_to_succeed
@@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->
let sender = alice_addr in
let checker =
let checker = empty_checker_with_cfmm cfmm in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };
fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in
let bought_muctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok
);
(
let cfmm_kit = Ligo.nat_from_literal ("1_000n") in
let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal ("1_000n")) in
The maximum amount of kit that you can buy with a finite amount of tez is
* ( 1 - fee ) * cfmm.kit - 1
* (1 - fee) * cfmm.kit - 1
*)
let max_buyable_kit = 997 in
let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ "n"))) QCheck.(1 -- max_buyable_kit) in
let arb_tez = TestArbitrary.arb_small_positive_tez in
qcheck_to_ounit
@@ QCheck.Test.make
~name:"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez"
~count:property_test_count
(QCheck.pair arb_kit arb_tez)
@@ fun (min_expected_kit, additional_tez) ->
Ligo.Tezos.reset();
let sender = alice_addr in
Populate cfmm with initial liquidity
let open Ratio in
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = cfmm_ctok;
kit = kit_of_denomination cfmm_kit;
} in
Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above
let ratio_minimum_tez = div_ratio
(ratio_of_nat cfmm_kit)
(
sub_ratio
(div_ratio (ratio_of_nat (Ligo.nat_from_literal "998n")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit)))
(ratio_of_nat (Ligo.nat_from_literal "1n"))
) in
let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal "1mutez") in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in
begin match ops with
| [Transaction (FA2TransferTransactionValue transfer, _, _)] ->
assert_fa2_transfer_list_equal
~expected:[
Fa2Interface.{
from_ = sender;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ctok.ctok_to_denomination_nat ctok_provided;
}
]
}
]
~real:transfer
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops)
end;
Ligo.geq_nat_nat
senders_new_kit
(Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit))
);
FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have
* a better way of testing different concrete cfmm implementations we should be able to re - enable this .
* a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
} in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal "1_000_000n"), kit_of_denomination (Ligo.nat_from_literal "1n"), Ligo.timestamp_from_seconds_literal 1)) in
let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = alice_addr;
txs = [
{ to_ = checker_address;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "1_000_000n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit;
assert_operation_list_equal ~expected:expected_ops ~real:ops
); *)
FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have
* a better way of testing different concrete cfmm implementations we should be able to re - enable this .
* a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)
( " sell_kit - returns expected tez " > : :
fun _ - >
Ligo.Tezos.reset ( ) ;
let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal " 1_000_000n " ) in
let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal " 1n " ) in
let checker =
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination ( Ligo.nat_from_literal " 2n " ) ;
kit = kit_of_denomination ( Ligo.nat_from_literal " 2n " ) ;
lqt = lqt_of_denomination ( Ligo.nat_from_literal " 1n " ) ;
} in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add } ;
fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ;
} in
Checker.assert_checker_invariants checker ;
Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ;
let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in
let expected_ops = [
( LigoOp . Tezos.fa2_transfer_transaction
[ Fa2Interface . {
from _ = checker_address ;
= [
{ to _ = alice_addr ;
token_id = TokenMetadata.ctok_token_id ;
amount = Ligo.nat_from_literal " 1n " ;
}
]
}
]
( Ligo.tez_from_literal " 0mutez " )
( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % transfer " checker.external_contracts.ctok_fa2 ) )
) ;
] in
assert_operation_list_equal ~expected : expected_ops ~real : ops
) ;
fun _ ->
Ligo.Tezos.reset ();
let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal "1_000_000n") in
let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in
let checker =
let checker =
empty_checker_with_cfmm
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
lqt = lqt_of_denomination (Ligo.nat_from_literal "1n");
} in
{ checker with
parameters =
{ checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell };
fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell);
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in
let expected_ops = [
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.{
from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.ctok_token_id;
amount = Ligo.nat_from_literal "1n";
}
]
}
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2))
);
] in
assert_operation_list_equal ~expected:expected_ops ~real:ops
); *)
("remove_liquidity - returns expected kit and tez" >::
fun _ ->
Ligo.Tezos.reset ();
let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal "1n") in
let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in
let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal "1n") in
let sender = alice_addr in
let checker =
{ empty_checker with
parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal "1n")};
cfmm =
{ empty_checker.cfmm with
ctok = ctok_of_denomination (Ligo.nat_from_literal "2n");
kit = kit_of_denomination (Ligo.nat_from_literal "2n");
lqt = lqt_of_denomination (Ligo.nat_from_literal "2n");
};
fa2_state =
let fa2_state = initial_fa2_state in
let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in
let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal "1n")) in
fa2_state;
} in
Checker.assert_checker_invariants checker;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in
let ctok = match ops with
| [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->
begin
assert_address_equal ~expected:checker_address ~real:from_address;
assert_address_equal ~expected:sender ~real:tx.to_;
tx.amount
end
| _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops)
in
let kit = get_balance_of checker sender TokenMetadata.kit_token_id in
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit;
assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:ctok;
()
);
("fa2 scenario" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
let initial_addr = Ligo.address_of_string "INIT_ADDR" in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in
let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal "0n"), checker) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker =
Checker.entrypoint_add_liquidity
( checker,
( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n")
, kit_of_denomination (Ligo.nat_from_literal "5_000_000n")
, lqt_of_denomination (Ligo.nat_from_literal "5n")
, Ligo.timestamp_from_seconds_literal 999
)
) in
initialize alice , and leena accounts
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_ = initial_addr;
txs = [
{ to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal "5n" };
{ to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal "5n" }
];
}]) in
let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "5n");
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "5n");
assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n");
assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n");
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_update_operators (checker, [
(Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in
assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker));
assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker));
assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker));
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal "2n"}]}]) in
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "3n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "2n");
assert_raises
(Failure "FA2_INSUFFICIENT_BALANCE")
(fun () -> Checker.strict_entrypoint_transfer (checker, [
{ from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "10n"}]}]));
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.strict_entrypoint_transfer (checker, [
{ from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]) in
assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "4n");
assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "1n");
but leena can not even send a single kit from 's account when he 's not an operator anymore
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_update_operators (checker, [
(Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure "FA2_NOT_OPERATOR")
(fun () -> Checker.strict_entrypoint_transfer (checker, [
{ from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]));
()
);
("view_total_supply (FA2) - initial kit supply" >::
fun _ ->
Ligo.Tezos.reset ();
let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in
assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_kit_amount;
()
);
("view_total_supply (FA2) - initial lqt supply" >::
fun _ ->
Ligo.Tezos.reset ();
let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in
assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_lqt_amount;
()
);
("view_total_supply (FA2) - undefined token id" >::
fun _ ->
assert_raises
(Failure "FA2_TOKEN_UNDEFINED")
(fun () -> Checker.view_total_supply (Ligo.nat_from_literal "3n", empty_checker))
);
("view_all_tokens (FA2)" >::
fun _ ->
Ligo.Tezos.reset ();
let all_tokens = Checker.view_all_tokens ((), empty_checker) in
assert_nat_list_equal
~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ]
~real:all_tokens;
()
);
("entrypoint_liquidation_auction_place_bid: should only allow the current auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) } in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in
let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal "0n"), checker) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in
let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "10_000_000n")) } in
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch (checker, ()) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal "0n")) in
let _, checker = Checker.entrypoint_touch (checker, ()) in
let res = Checker.view_current_liquidation_auction_details ((), checker) in
let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in
assert_raises
(Failure (Ligo.string_of_int error_InvalidLiquidationAuction))
(fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid)));
);
("can complete a liquidation auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _lqt_minted_ret_kit_ops, checker =
Checker.entrypoint_add_liquidity
( checker
, ( ctok_of_denomination (Ligo.nat_from_literal "1_000_000n")
, kit_one
, lqt_of_denomination (Ligo.nat_from_literal "1n")
, Ligo.timestamp_from_seconds_literal 1
)
let () =
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tez)) in
let burrow_addr =
burrow_address
(Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker0.burrows)) in
let () = match ops with
| [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ;
(Transaction (FA2TransferTransactionValue _, _, _)) as op ;
] ->
assert_key_hash_option_equal ~expected:None ~real:cb_delegate;
assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez;
assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage;
assert_operation_equal
~expected:(
LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = bob_addr;
txs = [
{ to_ = burrow_addr;
token_id = TokenMetadata.tok_token_id;
amount = tok_to_denomination_nat tez;
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
)
~real:op
| _ -> assert_failure ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal "0n", alice_addr)) in
assert_operation_list_equal
~expected:[
LigoOp.Tezos.address_nat_transaction
(alice_addr, tok_to_denomination_nat tez)
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr))
]
~real:ops;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal "0n", tez)) in
FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives .
assert_equal checker0 checker2;
() in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;
let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
let burrow_id = (bob_addr, Ligo.nat_from_literal "0n") in
let burrow_addr =
burrow_address
(Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker.burrows)) in
Mint as much kit as possible
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) =
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n"))
) in
let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
assert_nat_equal ~expected:(Ligo.nat_from_literal "4_285_714n") ~real:kit;
assert_bool
"should not be overburrowed right after minting"
(not
@@ burrow_is_overburrowed
checker.parameters
(Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))
);
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_MintKitFailure))
(fun () ->
Checker.entrypoint_mint_kit
( checker
, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))
)
);
Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_001n")) in
let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_bool
"if the index goes up, then burrows should become overburrowed"
(burrow_is_overburrowed
checker.parameters
(Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))
);
Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_int_equal
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
assert_operation_list_equal
~expected:[
LigoOp.Tezos.address_nat_transaction
(alice_addr, Ligo.nat_from_literal "1_009_000n")
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr))
]
~real:ops;
let slice =
(Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices)
|> Option.get
|> fun i -> i.youngest_slice in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_UnwarrantedCancellation))
(fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice));
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_InvalidLeafPtr))
(fun () ->
let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in
Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice)
);
Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_NoOpenAuction))
(fun () -> Checker.view_current_liquidation_auction_details ((), checker));
let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
assert_bool "should start an auction"
(Option.is_some checker.liquidation_auctions.current_auction);
assert_int_equal
~expected:(Ligo.int_from_literal "500_000")
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in
let auction_id =
min_bid.auction_id in
assert_kit_equal
~expected:(kit_of_denomination (Ligo.nat_from_literal "2_709_183n"))
~real:min_bid.minimum_bid;
let (ops, checker) =
Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in
assert_operation_list_equal ~expected:[] ~real:ops;
let (ops, checker) =
Checker.entrypoint_liquidation_auction_place_bid
( checker
, (auction_id, kit_of_denomination (Ligo.nat_from_literal "4_200_000n"))
) in
let auction_id =
match checker.liquidation_auctions.current_auction with
| None -> assert_failure "entrypoint_liquidation_auction_place_bid should have succeeded"
| Some current_auction -> current_auction.contents in
assert_operation_list_equal ~expected:[] ~real:ops;
assert_int_equal
~expected:(Ligo.int_from_literal "500_000")
~real:touch_reward;
Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in
let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in
let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in
assert_bool "auction should be completed"
(Option.is_none checker.liquidation_auctions.current_auction);
assert_int_equal
~expected:(Ligo.int_from_literal "21_000_000")
~real:touch_reward;
FIXME : Operations differ between the FA2 deployment and the TEZ deployment
( * Check that all the requests for burrows to send tez come _ before _ the
* request to the oracle to update the index .
begin match ops with
| [
call
] -> ()
| _ -> assert_failure ("Unexpected operations/operation order: " ^ show_operation_list ops)
end;
*)
We do n't need to touch the slice on this test case since
* Checker.entrypoint_touch_with_index already touches the oldest 5
* slices .
* Checker.entrypoint_touch_with_index already touches the oldest 5
* slices. *)
assert_raises
(Failure (Ligo.string_of_int error_InvalidLeafPtr))
(fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice]));
assert_bool "burrow should have no liquidation slices"
(Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None);
let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in
assert_tok_equal
~expected:tok_zero
~real:(burrow_collateral_at_auction result);
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in
assert_operation_list_equal
~expected:[
(LigoOp.Tezos.fa2_transfer_transaction
[ Fa2Interface.(
{ from_ = checker_address;
txs = [
{ to_ = alice_addr;
token_id = TokenMetadata.tok_token_id;
amount = Ligo.nat_from_literal "3_156_446n";
};
];
}
)
]
(Ligo.tez_from_literal "0mutez")
(Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2))
);
]
~real:ops;
assert_raises
(Failure (Ligo.string_of_int error_InvalidAvlPtr))
(fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id));
()
);
("entrypoint_mark_for_liquidation - should not create empty slices" >::
fun _ ->
Ligo.Tezos.reset ();
let sender = alice_addr in
let checker = empty_checker in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;
let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker "0n" (tok_of_denomination (Ligo.nat_from_literal "2_001_001n")) in
CALCULATIONS
~~~~~~~~~~~~
Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is
( 1tez + 1_001mutez = 1_001_001 ) . This means that
- The slice we WOULD send to auctions is empty .
- The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions .
~~~~~~~~~~~~
Tez in the burrow is (1_001_001mutez + 1tez) so the reward is
(1tez + 1_001mutez = 1_001_001). This means that
- The slice we WOULD send to auctions is empty.
- The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions.
*)
Mint as much kit as possible .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal "476_667n"))) in
NOTE : I am a little surprised / worried about this being again 211 ...
Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in
Ensure that the burrow is .
begin match Ligo.Big_map.find_opt burrow_id checker.burrows with
| None -> assert_failure "bug"
| Some burrow -> assert_bool "burrow needs to be liquidatable for the test to be potent." (Burrow.burrow_is_liquidatable checker.parameters burrow);
end;
Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
Let 's mark the burrow for liquidation now ( second pass : deactivates it ) .
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in
()
);
("deposit_collateral - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", amount)) in
()
);
("entrypoint_withdraw_collateral - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", Constants.creation_deposit)) in
()
);
("entrypoint_mint_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
()
);
("entrypoint_burn_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = tok_add Constants.creation_deposit Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Mint some kit out of the burrow
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in
()
);
("entrypoint_activate_burrow - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal "0n", amount)) in
()
);
("entrypoint_deactivate_burrow - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in
()
);
("entrypoint_mark_for_liquidation - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
assert_raises
(Failure (Ligo.string_of_int error_NotLiquidationCandidate))
(fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id));
);
("entrypoint_set_burrow_delegate - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal "0n", None)) in
()
);
("cfmm views" >:::
let
with_cfmm_setup f =
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
let burrow_id = Ligo.nat_from_literal "42n" in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in
Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal "400_000n") in
let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal "400_000n") in
let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in
Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = f checker in ()
in
[
"view_buy_kit_min_kit_expected" >:: with_cfmm_setup
(fun checker ->
let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in
let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline)));
"view_buy_kit_min_kit_expected - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_BuyKitNoCtokGiven))
(fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker))
);
"view_sell_kit_min_ctok_expected" >:: with_cfmm_setup
(fun checker ->
let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal "100_000n") in
let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline)));
"view_sell_kit_min_ctok_expected - fail if no kit is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_SellKitNoKitGiven))
(fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker))
);
"view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted" >:: with_cfmm_setup
(fun checker ->
let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in
let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in
let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline)));
"view_add_liquidity_max_kit_deposited - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))
(fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker))
);
"view_add_liquidity_min_lqt_minted - fail if no ctok is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))
(fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker))
);
"view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn" >:: with_cfmm_setup
(fun checker ->
let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in
let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in
let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in
let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in
Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline)));
"view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker))
);
"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = checker.cfmm.lqt in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup
(fun checker ->
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker))
);
"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = checker.cfmm.lqt in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))
);
"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup
(fun checker ->
let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in
assert_raises
(Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))
(fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))
);
]
);
("view_burrow_max_mintable_kit - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in
()
);
("view_is_burrow_overburrowed - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in
()
);
("view_is_burrow_liquidatable - does not fail on untouched burrows" >::
fun _ ->
Ligo.Tezos.reset ();
let amount = Constants.creation_deposit in
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;
let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in
let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in
Touch checker
Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in
Try to view whether the untouched burrow is
Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez");
let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in
()
);
("view_current_liquidation_auction_details - raises error when there is no current auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = empty_checker in
assert_raises
(Failure (Ligo.string_of_int error_NoOpenAuction))
(fun _ -> Checker.view_current_liquidation_auction_details ((), checker))
);
("view_current_liquidation_auction_details - expected value for descending auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
let auction = Option.get checker.liquidation_auctions.current_auction in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
let expected_auction_details = {
auction_id = auction.contents;
collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n");
minimum_bid = liquidation_auction_current_auction_minimum_bid auction;
current_bid = None;
remaining_blocks = None;
remaining_seconds = None;
}
in
assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details
);
("view_current_liquidation_auction_details - expected value for ascending auction" >::
fun _ ->
Ligo.Tezos.reset ();
let checker = checker_with_active_auction () in
let auction = Option.get checker.liquidation_auctions.current_auction in
Place a bid to turn the descending auction into an ascending one
let bidder = bob_addr in
let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;
let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "1n", None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "1n", bid_amnt)) in
Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in
Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez");
let auction = Option.get checker.liquidation_auctions.current_auction in
let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in
let expected_auction_details = {
auction_id = auction.contents;
collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n");
minimum_bid = liquidation_auction_current_auction_minimum_bid auction;
current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;});
remaining_blocks = Some (Ligo.int_from_literal "-2");
remaining_seconds = Some (Ligo.int_from_literal "700");
}
in
assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details
);
]
let () =
run_test_tt_main
suite
|
0a8f25532cf0971dde3236b9b47e1c109be42b879b02b5a765e5a27342932666 | exercism/common-lisp | affine-cipher.lisp | (defpackage :affine-cipher
(:use :cl)
(:export :encode
:decode))
(in-package :affine-cipher)
(defun encode (plaintext a b))
(defun decode (ciphertext a b))
| null | https://raw.githubusercontent.com/exercism/common-lisp/4bf94609c7ef0f9ca7ec0b6dca04cc10314cb598/exercises/practice/affine-cipher/affine-cipher.lisp | lisp | (defpackage :affine-cipher
(:use :cl)
(:export :encode
:decode))
(in-package :affine-cipher)
(defun encode (plaintext a b))
(defun decode (ciphertext a b))
|
|
b7ec84b46d276604ff9a1cdceaf78ec6d78cc1f28205716dd22d9eda14b7ea30 | coq/coq | extend.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
(** Entry keys for constr notations *)
type side = Left | Right
type production_position =
| BorderProd of side * Gramlib.Gramext.g_assoc option
| InternalProd
type production_level =
| NextLevel
| NumLevel of int
| DefaultLevel (** Interpreted differently at the border or inside a rule *)
val production_level_eq : production_level -> production_level -> bool
(** User-level types used to tell how to parse or interpret of the non-terminal *)
type 'a constr_entry_key_gen =
| ETIdent
| ETName
| ETGlobal
| ETBigint
| ETBinder of bool (* open list of binders if true, closed list of binders otherwise *)
| ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a
| ETPattern of bool * int option (* true = strict pattern, i.e. not a single variable *)
(** Entries level (left-hand side of grammar rules) *)
type constr_entry_key =
(production_level * production_position) constr_entry_key_gen
val constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool
* Entries used in productions , vernac side ( e.g. " x bigint " or " x ident " )
type simple_constr_prod_entry_key =
production_level constr_entry_key_gen
(** Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals *)
type binder_target = ForBinder | ForTerm
type binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list
and constr_prod_entry_key =
as an ident
as a name ( ident or _ )
as a global reference
as an ( unbounded ) integer
as name , or name : type or ' pattern , possibly in closed form
as or pattern , or a subentry of those
as pattern as a binder ( as subpart of a constr )
as non - empty list of constr , or subentries of those
as non - empty list of local binders
* { 5 AST for user - provided entries }
type 'a user_symbol =
| Ulist1 of 'a user_symbol
| Ulist1sep of 'a user_symbol * string
| Ulist0 of 'a user_symbol
| Ulist0sep of 'a user_symbol * string
| Uopt of 'a user_symbol
| Uentry of 'a
| Uentryl of 'a * int
type ('a,'b,'c) ty_user_symbol =
| TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
| TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
| TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
| TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
| TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol
| TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol
| TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol
| null | https://raw.githubusercontent.com/coq/coq/f66b58cc7e6a8e245b35c3858989181825c591ce/parsing/extend.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* Entry keys for constr notations
* Interpreted differently at the border or inside a rule
* User-level types used to tell how to parse or interpret of the non-terminal
open list of binders if true, closed list of binders otherwise
true = strict pattern, i.e. not a single variable
* Entries level (left-hand side of grammar rules)
* Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
type side = Left | Right
type production_position =
| BorderProd of side * Gramlib.Gramext.g_assoc option
| InternalProd
type production_level =
| NextLevel
| NumLevel of int
val production_level_eq : production_level -> production_level -> bool
type 'a constr_entry_key_gen =
| ETIdent
| ETName
| ETGlobal
| ETBigint
| ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a
type constr_entry_key =
(production_level * production_position) constr_entry_key_gen
val constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool
* Entries used in productions , vernac side ( e.g. " x bigint " or " x ident " )
type simple_constr_prod_entry_key =
production_level constr_entry_key_gen
type binder_target = ForBinder | ForTerm
type binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list
and constr_prod_entry_key =
as an ident
as a name ( ident or _ )
as a global reference
as an ( unbounded ) integer
as name , or name : type or ' pattern , possibly in closed form
as or pattern , or a subentry of those
as pattern as a binder ( as subpart of a constr )
as non - empty list of constr , or subentries of those
as non - empty list of local binders
* { 5 AST for user - provided entries }
type 'a user_symbol =
| Ulist1 of 'a user_symbol
| Ulist1sep of 'a user_symbol * string
| Ulist0 of 'a user_symbol
| Ulist0sep of 'a user_symbol * string
| Uopt of 'a user_symbol
| Uentry of 'a
| Uentryl of 'a * int
type ('a,'b,'c) ty_user_symbol =
| TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
| TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
| TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
| TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
| TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol
| TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol
| TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol
|
3bf1a899432101bb7482989a15f22c893ae938fa93801a4d53a8da84702149d2 | emanjavacas/cosycat | results_frame.cljs | (ns cosycat.review.components.results-frame
(:require [reagent.core :as reagent]
[re-frame.core :as re-frame]
[react-bootstrap.components :as bs]
[cosycat.components :refer [error-panel throbbing-panel]]
[cosycat.app-utils :refer [parse-hit-id]]
[cosycat.snippet :refer [snippet-modal]]
[cosycat.annotation.components.annotation-component
:refer [annotation-component]]))
(defn highlight-fn [{{:keys [anns]} :meta}]
(fn [{id :_id}]
(contains? anns id)))
(defn hit-row [hit-id]
(let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id])
color-map (re-frame/subscribe [:project-users-colors])]
(fn [hit-id]
[:div.row
(if (get-in @hit-map [:meta :throbbing?])
"loading..."
[annotation-component @hit-map color-map
:db-path :review
:corpus (get-in @hit-map [:meta :corpus])
:editable? true
:highlight-fn (highlight-fn @hit-map)
:show-match? false
:show-hit-id? true])])))
(defn sort-by-doc [hit-ids]
(sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids))
(defn results-frame []
(let [results (re-frame/subscribe [:project-session :review :results :results-by-id])
throbbing? (re-frame/subscribe [:throbbing? :review-frame])]
(fn []
[:div.container-fluid
(cond
(empty? @results) [:div.row [error-panel {:status "Ooops! Found zero annotations"}]]
@throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]]
:else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))]
^{:key (str "review-" hit-id)} [hit-row hit-id]))])
[snippet-modal :review]])))
| null | https://raw.githubusercontent.com/emanjavacas/cosycat/a7186363d3c0bdc7b714af126feb565f98793a6e/src/cljs/cosycat/review/components/results_frame.cljs | clojure | (ns cosycat.review.components.results-frame
(:require [reagent.core :as reagent]
[re-frame.core :as re-frame]
[react-bootstrap.components :as bs]
[cosycat.components :refer [error-panel throbbing-panel]]
[cosycat.app-utils :refer [parse-hit-id]]
[cosycat.snippet :refer [snippet-modal]]
[cosycat.annotation.components.annotation-component
:refer [annotation-component]]))
(defn highlight-fn [{{:keys [anns]} :meta}]
(fn [{id :_id}]
(contains? anns id)))
(defn hit-row [hit-id]
(let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id])
color-map (re-frame/subscribe [:project-users-colors])]
(fn [hit-id]
[:div.row
(if (get-in @hit-map [:meta :throbbing?])
"loading..."
[annotation-component @hit-map color-map
:db-path :review
:corpus (get-in @hit-map [:meta :corpus])
:editable? true
:highlight-fn (highlight-fn @hit-map)
:show-match? false
:show-hit-id? true])])))
(defn sort-by-doc [hit-ids]
(sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids))
(defn results-frame []
(let [results (re-frame/subscribe [:project-session :review :results :results-by-id])
throbbing? (re-frame/subscribe [:throbbing? :review-frame])]
(fn []
[:div.container-fluid
(cond
(empty? @results) [:div.row [error-panel {:status "Ooops! Found zero annotations"}]]
@throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]]
:else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))]
^{:key (str "review-" hit-id)} [hit-row hit-id]))])
[snippet-modal :review]])))
|
|
cb9ed9478a4d22f62b2ca09141e35797bb1fe89e08300a8ab41d4b959ce3b598 | kiselgra/c-mera | version.lisp | (in-package :c-mera)
(defparameter *version* (asdf:component-version (asdf:find-system :c-mera)))
(defparameter *generator* :undefined)
(defun print-version ()
(format t "~a~%" *version*))
| null | https://raw.githubusercontent.com/kiselgra/c-mera/d06ed96d50a40a3fefe188202c8c535d6784f392/src/c-mera/version.lisp | lisp | (in-package :c-mera)
(defparameter *version* (asdf:component-version (asdf:find-system :c-mera)))
(defparameter *generator* :undefined)
(defun print-version ()
(format t "~a~%" *version*))
|
|
a1a25142787e2a42ab680a10e6441fea2092ed0d95338d09d9706830d633b512 | jeffshrager/biobike | doc-objects.lisp | -*- Package : help ; mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-
(in-package :help)
;;; +=========================================================================+
| Copyright ( c ) 2002 - 2006 JP , , |
;;; | |
;;; | Permission is hereby granted, free of charge, to any person obtaining |
;;; | a copy of this software and associated documentation files (the |
| " Software " ) , to deal in the Software without restriction , including |
;;; | without limitation the rights to use, copy, modify, merge, publish, |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
;;; | the following conditions: |
;;; | |
;;; | The above copyright notice and this permission notice shall be included |
| in all copies or substantial portions of the Software . |
;;; | |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
;;; +=========================================================================+
Authors : JP Massar , .
;;; All the various types of documentation (theoretically) available
;;; in the system.
;;; Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged
(defparameter *documentation-types*
'(
documentation-file
function-documentation
glossary-entry
;; macro-documentation
module
symbol-doc
topic
tutorial
;; variable-documentation
))
(defparameter *doc-types-hash-types*
'(
(documentation-file equal)
(function-documentation eq)
(glossary-entry equalp)
;; macro-documentation
(module equalp)
(symbol-doc eq)
(topic equalp)
(tutorial equalp)
;; variable-documentation
))
;; Where all documentation objects are stored.
;; Use FIND-DOCUMENTATION to pull something out
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun create-doc-hash-tables ()
(let ((ht (make-hash-table)))
(loop for (doc-type hash-test) in *doc-types-hash-types* do
(setf (gethash doc-type ht) (make-hash-table :test hash-test)))
ht
)))
(defvar *documentation* (create-doc-hash-tables))
(defun intern-documentation (name type)
(or (find-documentation name type)
(setf (gethash name (gethash type *documentation*))
(make-instance type :name name))))
(defun remove-documentation (name type)
(remhash name (gethash type *documentation*)) (make-instance type :name name))
(defun find-documentation (name type)
(gethash name (gethash type *documentation*)))
(defun clear-documentation ()
(setf *documentation* (create-doc-hash-tables)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The hierarchy of documentation classes
(defclass basicdoc ()
((name :initarg :name :accessor name)
;; AKA 'summary'
(docstring :initform nil :initarg :docstring :accessor docstring)
(referred-to-by :initform nil :accessor referred-to-by)
))
(defmethod summary ((obj basicdoc)) (docstring obj))
(defmethod text ((obj basicdoc)) nil)
(defmethod keywords ((obj basicdoc)) nil)
(defmethod see-also ((obj basicdoc)) nil)
(defmethod explicitly-documented-p ((obj basicdoc)) nil)
(defmethod author ((obj basicdoc)) nil)
(defmethod print-object ((obj basicdoc) stream)
(format stream "<Docobj ~A (~A)>" (help:name obj) (type-of obj)))
(defclass documented (basicdoc)
((text :initform nil :accessor text)
(keywords :initform nil :accessor keywords)
(see-also :initform nil :accessor see-also)
(author :initform nil :accessor author)
(explicitly-documented-p :initform nil :accessor explicitly-documented-p)))
(defclass mode-documented (documented)
((display-modes
:initform (list :all)
:accessor display-modes
)))
(defclass documentation-file (mode-documented)
((label :initform nil :accessor label)
(source-file :initform nil :accessor source-file)
(associated-text-file
:initform nil
:accessor associated-text-file
)
(matches :initform nil :accessor matches)
(descriptor :initform nil :accessor descriptor)
))
;; the reader methods are defined in document-function.lisp
(defclass function-documentation (documented module-element)
((parameters :initform nil :writer (setf parameters))
(return-values :initform nil :writer (setf return-values))
(syntax :initform nil :writer (setf syntax))
(vpl-syntax :initform nil :writer (setf vpl-syntax))
(examples :initform nil :writer (setf examples))
(examples-package :initform nil :writer (setf examples-package))
(synonyms :initform nil :writer (setf synonyms))
(flavor :initform :defun :writer (setf flavor))
(canonical :initform nil :accessor canonical)
(aliased :initform nil :accessor aliased)
))
(defmethod print-object ((obj function-documentation) stream)
(print-symbol-docobj obj stream "DocFunc"))
(defclass glossary-entry (documented) ())
;; If/when actually implemented, should become a subtype of DOCUMENTED
(defclass macro-documentation (basicdoc) ())
(defclass module (mode-documented)
((functions :initform nil :accessor functions)
(variables :initform nil :accessor variables)
(macros :initform nil :accessor macros)
(submodules :initform nil :accessor submodules)
(toplevel? :initform t :accessor toplevel?)
(alpha-listing? :initform t :accessor alpha-listing?)
))
(defclass symbol-doc (basicdoc)
(
one of : special - operator , : define - function , : macro , : function ,
;; :constant, :variable, or :type
(stype :initform nil :initarg :stype :accessor stype)
one of : function , : variable , or : type
(dtype :initform nil :initarg :dtype :accessor dtype)))
(defmethod print-object ((obj symbol-doc) stream)
(print-symbol-docobj obj stream "Symbol"))
(defclass topic (mode-documented) ())
(defclass tutorial (mode-documented)
;; :filename -- a string, must be full pathname
: file - type -- either : html or :
;; :user-mode -- a keyword or a list of keywords
;; :sort-order -- an integer
;; :description -- a string, this is really the summary
: section - header -- two strings , a title , and a color
: lhtml - function -- used only wth file type lhtml , must be a symbol
: start - function -- used only with file type : lhtml , must be a symbol
((filename :initform nil :accessor filename)
(file-type :initform nil :accessor file-type)
(user-mode :initform nil :accessor user-mode)
(sort-order :initform nil :accessor sort-order)
(description :initform nil :accessor description)
(lhtml-function :initform nil :accessor lhtml-function)
(start-function :initform nil :accessor start-function)
(section-header :initform nil :accessor section-header)
))
;; If/when actually implemented, should become a subtype of DOCUMENTED
(defclass variable-documentation (basicdoc) ())
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The definitions that create the verifiers and parsers for
;;; the definition forms for each documentation object.
(define-doc-definer
documentation-file
def-documentation-file
create-documentation-file
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
(:descriptor :one-or-none ddd-string-or-nil identity help:descriptor)
))
;; function-documentation has no define-doc-definer, its verifer and parser
;; are implemented by hand in document-function.lisp
(define-doc-definer
glossary-entry
def-glossary-entry
create-glossary-entry
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
))
;; not doing macro-documentation for now since it is not used
(define-doc-definer
module
def-module
create-module
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
(:functions :list ddd-all-symbols identity help:functions)
(:variables :list ddd-all-symbols identity help:variables)
(:macros :list ddd-all-symbols identity help:macros)
(:submodules :list ddd-all-symbols identity help:submodules)
(:toplevel? :exactly-one ddd-boolean identity help:toplevel?)
(:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?)
(:display-modes :list ddd-all-symbols identity help:display-modes)
)
:after-code (setf (explicitly-documented-p obj) t))
(defmacro document-module (name &body (docstring &rest meta))
`(def-module ,(string name) (:summary ,docstring) ,@meta))
(defmacro undocument-module (name &key remove-functions)
`(progn
(let ((module (find-documentation ',name 'module)))
(if ,remove-functions
(loop for fn in (functions module) do
(remove-documentation fn 'function-documentation))
(let ((uncategorized (intern-documentation 'uncategorized 'module)))
(loop for fn in (functions module)
for fn-doc = (find-documentation fn 'function-documentation)
when fn-doc do
(setf (module fn-doc) uncategorized)
(push fn (functions uncategorized))))))
(remove-documentation ',name 'module)))
(defun modules () (hash-table-values (gethash 'module *documentation*)))
(defparameter *uncategorized-key* "UNCATEGORIZED")
;;; Setup the Uncategorized module.
(let ((uncategorized (intern-documentation *uncategorized-key* 'module)))
(setf (docstring uncategorized)
"Documented elements not part of any other module."))
;; No symbol-doc creator macro because symbol-doc entries are reserved for
;; those exported symbols which do not have define-function entries. These
;; symbols are to be searched out at the end of the system load and
;; at that point symbol-doc objects are created for each such symbol
;; (using the below function)
(defun create-symbol-doc (symbol &key docstring dtype stype)
(make-instance
'help:symbol-doc
:name symbol :docstring docstring :dtype dtype :stype stype))
(defun create-symbol-doc-entries (&key (mode :external))
(declare (ignore mode))
(loop
with hash = (gethash 'help:symbol-doc *documentation*)
with packages-not-to-search =
(remove (find-package :cl-user) cl-user::*startup-packages*)
with cl-package = (find-package :common-lisp)
for package in (list-all-packages)
do
;; The startup packages are those that exist at the start
;; of our system load. Hence we only look for symbols in
our own packages , CL , and third party stuff we load , like PPCRE
(unless (and (member package packages-not-to-search)
(not (eq package cl-package)))
(do-external-symbols (symbol package)
(when (or (eq package cl-package)
(not (eq (symbol-package symbol) cl-package)))
(cond
((get symbol :alias-of) (create-alias-for symbol))
(t
(vwhen (docs (maybe-create-symbol-docs symbol))
(setf (gethash symbol hash) docs)
))))))))
;; create a dummy function-documentation object whose only meaningful slots
;; are explicitly-documented-p, which is given the value :alias-of to denote
;; that this is a dummy, and see-also, which contains the real function
;; that the symbol is an alias for.
(defun create-alias-for (symbol)
(let ((real-function (get symbol :alias-of))
(docobj (intern-documentation symbol 'help:function-documentation)))
(setf (explicitly-documented-p docobj) :alias-of)
(setf (docstring docobj) (formatn "Alias for ~A" real-function))
(setf (see-also docobj) nil)
;; (list (find-documentation real-function 'help:function-documentation))
))
;;; Create a set of HELP:SYMBOL-DOC data structures, for a symbol
(defun maybe-create-symbol-docs (symbol)
(remove-if
'null
(list
(when (fboundp symbol)
;; Don't create an entry if the symbol is already
;; documented by DOCUMENT-FUNCTION
(unless (find-documentation symbol 'help:function-documentation)
(create-symbol-doc
symbol
:docstring (documentation symbol 'function)
:stype
(cond
((special-operator-p symbol) :special-operator)
((define-function-p symbol) :define-function)
((macro-function symbol) :macro)
(t :function))
:dtype :function
)))
(when (boundp symbol)
(create-symbol-doc
symbol
:docstring (documentation symbol 'variable)
:stype
(cond
((constantp symbol) :constant)
(t :variable))
:dtype :variable
))
;; Figure out if a symbol denotes a type. Not portable.
;; This type checking causes the autoloading of the stream goo in ACL.
(ignore-errors
(typep nil symbol)
(create-symbol-doc
symbol
:docstring (documentation symbol 'type)
:stype :type
:dtype :type
)))))
(define-doc-definer
topic
def-topic
create-topic
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
))
The define - doc - definer for tutorials is in live-tutorial.lisp
;; not doing variable-documentation for now since it is not used
#+not-used
(defmacro document-variable (name docstring)
`(let ((thing (intern-documentation ',name 'variable-documentation)))
(setf (explicitly-documented-p thing) t)
(setf (docstring thing) ,docstring)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Each documentation type has an associated URL which displays
;; the documentation object identified by the parameters of the URL
;; (which are taken from the SEE-ALSO data structure). See-also
;; lists can also contain objects which are not documentation-objects
per se ( e.g. , references , URLs and frames )
(wb::define-url&pkg&args
help-documentation-file-url
"/new-help/help-documentation-file-url" :name)
;; File documentation doesn't have its own URL because the documentation
directory already has its own AllegroServe PUBLISH - DIRECTORY url
(wb::define-url&pkg&args
help-function-documentation-url
"/new-help/help-function-documentation-url" :name :package)
(wb::define-url&pkg&args
help-glossary-entry-url "/new-help/help-glossary-entry-url" :name)
;; not doing macro-documentation because it's not used.
(wb::define-url&pkg&args
help-module-url "/new-help/help-module-url" :name)
(wb::define-url&pkg&args
help-symbol-doc-url
"/new-help/help-symbol-doc-url" :name :package :type)
(wb::define-url&pkg&args
help-topic-url "/new-help/help-topic-url" :name)
(wb::define-url&pkg&args
help-tutorial-url "/new-help/help-tutorial-url" :name)
;; not doing variable-documentation because it's not used.
;; URLs don't have their own URL because they are already a URL!
;; Frames don't have their own URL here because one exists already.
;;; A page which lists all the glossary entries
(wb::define-url&pkg&args
help-glossary-url "/new-help/help-glossary-url")
;;; A page which lists all the pertinent modules
(wb::define-url&pkg&args help-modules-url "/help/modules")
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Help/doc-objects.lisp | lisp | mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-
+=========================================================================+
| |
| Permission is hereby granted, free of charge, to any person obtaining |
| a copy of this software and associated documentation files (the |
| without limitation the rights to use, copy, modify, merge, publish, |
| the following conditions: |
| |
| The above copyright notice and this permission notice shall be included |
| |
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
+=========================================================================+
All the various types of documentation (theoretically) available
in the system.
Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged
macro-documentation
variable-documentation
macro-documentation
variable-documentation
Where all documentation objects are stored.
Use FIND-DOCUMENTATION to pull something out
The hierarchy of documentation classes
AKA 'summary'
the reader methods are defined in document-function.lisp
If/when actually implemented, should become a subtype of DOCUMENTED
:constant, :variable, or :type
:filename -- a string, must be full pathname
:user-mode -- a keyword or a list of keywords
:sort-order -- an integer
:description -- a string, this is really the summary
If/when actually implemented, should become a subtype of DOCUMENTED
The definitions that create the verifiers and parsers for
the definition forms for each documentation object.
function-documentation has no define-doc-definer, its verifer and parser
are implemented by hand in document-function.lisp
not doing macro-documentation for now since it is not used
Setup the Uncategorized module.
No symbol-doc creator macro because symbol-doc entries are reserved for
those exported symbols which do not have define-function entries. These
symbols are to be searched out at the end of the system load and
at that point symbol-doc objects are created for each such symbol
(using the below function)
The startup packages are those that exist at the start
of our system load. Hence we only look for symbols in
create a dummy function-documentation object whose only meaningful slots
are explicitly-documented-p, which is given the value :alias-of to denote
that this is a dummy, and see-also, which contains the real function
that the symbol is an alias for.
(list (find-documentation real-function 'help:function-documentation))
Create a set of HELP:SYMBOL-DOC data structures, for a symbol
Don't create an entry if the symbol is already
documented by DOCUMENT-FUNCTION
Figure out if a symbol denotes a type. Not portable.
This type checking causes the autoloading of the stream goo in ACL.
not doing variable-documentation for now since it is not used
Each documentation type has an associated URL which displays
the documentation object identified by the parameters of the URL
(which are taken from the SEE-ALSO data structure). See-also
lists can also contain objects which are not documentation-objects
File documentation doesn't have its own URL because the documentation
not doing macro-documentation because it's not used.
not doing variable-documentation because it's not used.
URLs don't have their own URL because they are already a URL!
Frames don't have their own URL here because one exists already.
A page which lists all the glossary entries
A page which lists all the pertinent modules |
(in-package :help)
| Copyright ( c ) 2002 - 2006 JP , , |
| " Software " ) , to deal in the Software without restriction , including |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
| in all copies or substantial portions of the Software . |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
Authors : JP Massar , .
(defparameter *documentation-types*
'(
documentation-file
function-documentation
glossary-entry
module
symbol-doc
topic
tutorial
))
(defparameter *doc-types-hash-types*
'(
(documentation-file equal)
(function-documentation eq)
(glossary-entry equalp)
(module equalp)
(symbol-doc eq)
(topic equalp)
(tutorial equalp)
))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun create-doc-hash-tables ()
(let ((ht (make-hash-table)))
(loop for (doc-type hash-test) in *doc-types-hash-types* do
(setf (gethash doc-type ht) (make-hash-table :test hash-test)))
ht
)))
(defvar *documentation* (create-doc-hash-tables))
(defun intern-documentation (name type)
(or (find-documentation name type)
(setf (gethash name (gethash type *documentation*))
(make-instance type :name name))))
(defun remove-documentation (name type)
(remhash name (gethash type *documentation*)) (make-instance type :name name))
(defun find-documentation (name type)
(gethash name (gethash type *documentation*)))
(defun clear-documentation ()
(setf *documentation* (create-doc-hash-tables)))
(defclass basicdoc ()
((name :initarg :name :accessor name)
(docstring :initform nil :initarg :docstring :accessor docstring)
(referred-to-by :initform nil :accessor referred-to-by)
))
(defmethod summary ((obj basicdoc)) (docstring obj))
(defmethod text ((obj basicdoc)) nil)
(defmethod keywords ((obj basicdoc)) nil)
(defmethod see-also ((obj basicdoc)) nil)
(defmethod explicitly-documented-p ((obj basicdoc)) nil)
(defmethod author ((obj basicdoc)) nil)
(defmethod print-object ((obj basicdoc) stream)
(format stream "<Docobj ~A (~A)>" (help:name obj) (type-of obj)))
(defclass documented (basicdoc)
((text :initform nil :accessor text)
(keywords :initform nil :accessor keywords)
(see-also :initform nil :accessor see-also)
(author :initform nil :accessor author)
(explicitly-documented-p :initform nil :accessor explicitly-documented-p)))
(defclass mode-documented (documented)
((display-modes
:initform (list :all)
:accessor display-modes
)))
(defclass documentation-file (mode-documented)
((label :initform nil :accessor label)
(source-file :initform nil :accessor source-file)
(associated-text-file
:initform nil
:accessor associated-text-file
)
(matches :initform nil :accessor matches)
(descriptor :initform nil :accessor descriptor)
))
(defclass function-documentation (documented module-element)
((parameters :initform nil :writer (setf parameters))
(return-values :initform nil :writer (setf return-values))
(syntax :initform nil :writer (setf syntax))
(vpl-syntax :initform nil :writer (setf vpl-syntax))
(examples :initform nil :writer (setf examples))
(examples-package :initform nil :writer (setf examples-package))
(synonyms :initform nil :writer (setf synonyms))
(flavor :initform :defun :writer (setf flavor))
(canonical :initform nil :accessor canonical)
(aliased :initform nil :accessor aliased)
))
(defmethod print-object ((obj function-documentation) stream)
(print-symbol-docobj obj stream "DocFunc"))
(defclass glossary-entry (documented) ())
(defclass macro-documentation (basicdoc) ())
(defclass module (mode-documented)
((functions :initform nil :accessor functions)
(variables :initform nil :accessor variables)
(macros :initform nil :accessor macros)
(submodules :initform nil :accessor submodules)
(toplevel? :initform t :accessor toplevel?)
(alpha-listing? :initform t :accessor alpha-listing?)
))
(defclass symbol-doc (basicdoc)
(
one of : special - operator , : define - function , : macro , : function ,
(stype :initform nil :initarg :stype :accessor stype)
one of : function , : variable , or : type
(dtype :initform nil :initarg :dtype :accessor dtype)))
(defmethod print-object ((obj symbol-doc) stream)
(print-symbol-docobj obj stream "Symbol"))
(defclass topic (mode-documented) ())
(defclass tutorial (mode-documented)
: file - type -- either : html or :
: section - header -- two strings , a title , and a color
: lhtml - function -- used only wth file type lhtml , must be a symbol
: start - function -- used only with file type : lhtml , must be a symbol
((filename :initform nil :accessor filename)
(file-type :initform nil :accessor file-type)
(user-mode :initform nil :accessor user-mode)
(sort-order :initform nil :accessor sort-order)
(description :initform nil :accessor description)
(lhtml-function :initform nil :accessor lhtml-function)
(start-function :initform nil :accessor start-function)
(section-header :initform nil :accessor section-header)
))
(defclass variable-documentation (basicdoc) ())
(define-doc-definer
documentation-file
def-documentation-file
create-documentation-file
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
(:descriptor :one-or-none ddd-string-or-nil identity help:descriptor)
))
(define-doc-definer
glossary-entry
def-glossary-entry
create-glossary-entry
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
))
(define-doc-definer
module
def-module
create-module
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
(:functions :list ddd-all-symbols identity help:functions)
(:variables :list ddd-all-symbols identity help:variables)
(:macros :list ddd-all-symbols identity help:macros)
(:submodules :list ddd-all-symbols identity help:submodules)
(:toplevel? :exactly-one ddd-boolean identity help:toplevel?)
(:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?)
(:display-modes :list ddd-all-symbols identity help:display-modes)
)
:after-code (setf (explicitly-documented-p obj) t))
(defmacro document-module (name &body (docstring &rest meta))
`(def-module ,(string name) (:summary ,docstring) ,@meta))
(defmacro undocument-module (name &key remove-functions)
`(progn
(let ((module (find-documentation ',name 'module)))
(if ,remove-functions
(loop for fn in (functions module) do
(remove-documentation fn 'function-documentation))
(let ((uncategorized (intern-documentation 'uncategorized 'module)))
(loop for fn in (functions module)
for fn-doc = (find-documentation fn 'function-documentation)
when fn-doc do
(setf (module fn-doc) uncategorized)
(push fn (functions uncategorized))))))
(remove-documentation ',name 'module)))
(defun modules () (hash-table-values (gethash 'module *documentation*)))
(defparameter *uncategorized-key* "UNCATEGORIZED")
(let ((uncategorized (intern-documentation *uncategorized-key* 'module)))
(setf (docstring uncategorized)
"Documented elements not part of any other module."))
(defun create-symbol-doc (symbol &key docstring dtype stype)
(make-instance
'help:symbol-doc
:name symbol :docstring docstring :dtype dtype :stype stype))
(defun create-symbol-doc-entries (&key (mode :external))
(declare (ignore mode))
(loop
with hash = (gethash 'help:symbol-doc *documentation*)
with packages-not-to-search =
(remove (find-package :cl-user) cl-user::*startup-packages*)
with cl-package = (find-package :common-lisp)
for package in (list-all-packages)
do
our own packages , CL , and third party stuff we load , like PPCRE
(unless (and (member package packages-not-to-search)
(not (eq package cl-package)))
(do-external-symbols (symbol package)
(when (or (eq package cl-package)
(not (eq (symbol-package symbol) cl-package)))
(cond
((get symbol :alias-of) (create-alias-for symbol))
(t
(vwhen (docs (maybe-create-symbol-docs symbol))
(setf (gethash symbol hash) docs)
))))))))
(defun create-alias-for (symbol)
(let ((real-function (get symbol :alias-of))
(docobj (intern-documentation symbol 'help:function-documentation)))
(setf (explicitly-documented-p docobj) :alias-of)
(setf (docstring docobj) (formatn "Alias for ~A" real-function))
(setf (see-also docobj) nil)
))
(defun maybe-create-symbol-docs (symbol)
(remove-if
'null
(list
(when (fboundp symbol)
(unless (find-documentation symbol 'help:function-documentation)
(create-symbol-doc
symbol
:docstring (documentation symbol 'function)
:stype
(cond
((special-operator-p symbol) :special-operator)
((define-function-p symbol) :define-function)
((macro-function symbol) :macro)
(t :function))
:dtype :function
)))
(when (boundp symbol)
(create-symbol-doc
symbol
:docstring (documentation symbol 'variable)
:stype
(cond
((constantp symbol) :constant)
(t :variable))
:dtype :variable
))
(ignore-errors
(typep nil symbol)
(create-symbol-doc
symbol
:docstring (documentation symbol 'type)
:stype :type
:dtype :type
)))))
(define-doc-definer
topic
def-topic
create-topic
((:summary :one-or-none ddd-string-or-nil identity help:docstring)
(:text :non-nil-list ddd-identity identity help:text)
(:keywords :list ddd-all-symbols-or-strings identity help:keywords)
(:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)
(:author :list ddd-all-strings identity help:author)
))
The define - doc - definer for tutorials is in live-tutorial.lisp
#+not-used
(defmacro document-variable (name docstring)
`(let ((thing (intern-documentation ',name 'variable-documentation)))
(setf (explicitly-documented-p thing) t)
(setf (docstring thing) ,docstring)))
per se ( e.g. , references , URLs and frames )
(wb::define-url&pkg&args
help-documentation-file-url
"/new-help/help-documentation-file-url" :name)
directory already has its own AllegroServe PUBLISH - DIRECTORY url
(wb::define-url&pkg&args
help-function-documentation-url
"/new-help/help-function-documentation-url" :name :package)
(wb::define-url&pkg&args
help-glossary-entry-url "/new-help/help-glossary-entry-url" :name)
(wb::define-url&pkg&args
help-module-url "/new-help/help-module-url" :name)
(wb::define-url&pkg&args
help-symbol-doc-url
"/new-help/help-symbol-doc-url" :name :package :type)
(wb::define-url&pkg&args
help-topic-url "/new-help/help-topic-url" :name)
(wb::define-url&pkg&args
help-tutorial-url "/new-help/help-tutorial-url" :name)
(wb::define-url&pkg&args
help-glossary-url "/new-help/help-glossary-url")
(wb::define-url&pkg&args help-modules-url "/help/modules")
|
30a50b63c3ecf654875c6c6afdd4fa9e3bf4343a85b180670388b1bef846aa42 | jixiuf/helloerlang | emysql_app.erl | Copyright ( c ) 2009
< >
< >
%%
%% Permission is hereby granted, free of charge, to any person
%% obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
%% restriction, including without limitation the rights to use,
%% copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the
%% Software is furnished to do so, subject to the following
%% conditions:
%%
%% The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
%% OTHER DEALINGS IN THE SOFTWARE.
-module(emysql_app).
-behaviour(application).
-export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]).
-include("emysql.hrl").
start(_Type, _StartArgs) ->
case of
% "%MAKETIME%" -> ok; % happens with rebar build
_ - > io : format("Build time : ~p ~ n " , )
% end,
emysql_sup:start_link().
stop(_State) ->
lists:foreach(
fun(Pool) ->
lists:foreach(
fun emysql_conn:close_connection/1,
lists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked))
)
end,
emysql_conn_mgr:pools()
),
ok.
modules() ->
{ok, Modules} = application_controller:get_key(emysql, modules), Modules.
default_timeout() ->
case application:get_env(emysql, default_timeout) of
undefined -> ?TIMEOUT;
{ok, Timeout} -> Timeout
end.
lock_timeout() ->
case application:get_env(emysql, lock_timeout) of
undefined -> ?LOCK_TIMEOUT;
{ok, Timeout} -> Timeout
end.
pools() ->
case application:get_env(emysql, pools) of
{ok, Pools} when is_list(Pools) ->
Pools;
_ ->
[]
end.
| null | https://raw.githubusercontent.com/jixiuf/helloerlang/3960eb4237b026f98edf35d6064539259a816d58/gls/sgLogServer/deps/emysql/src/emysql_app.erl | erlang |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"%MAKETIME%" -> ok; % happens with rebar build
end, | Copyright ( c ) 2009
< >
< >
files ( the " Software " ) , to deal in the Software without
copies of the Software , and to permit persons to whom the
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
-module(emysql_app).
-behaviour(application).
-export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]).
-include("emysql.hrl").
start(_Type, _StartArgs) ->
case of
_ - > io : format("Build time : ~p ~ n " , )
emysql_sup:start_link().
stop(_State) ->
lists:foreach(
fun(Pool) ->
lists:foreach(
fun emysql_conn:close_connection/1,
lists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked))
)
end,
emysql_conn_mgr:pools()
),
ok.
modules() ->
{ok, Modules} = application_controller:get_key(emysql, modules), Modules.
default_timeout() ->
case application:get_env(emysql, default_timeout) of
undefined -> ?TIMEOUT;
{ok, Timeout} -> Timeout
end.
lock_timeout() ->
case application:get_env(emysql, lock_timeout) of
undefined -> ?LOCK_TIMEOUT;
{ok, Timeout} -> Timeout
end.
pools() ->
case application:get_env(emysql, pools) of
{ok, Pools} when is_list(Pools) ->
Pools;
_ ->
[]
end.
|
880432877c4576b2b53a6ab8d82cfcf659217ae9f40897338270e33faf2d362d | kenbot/church | ChurchList.hs | {-# LANGUAGE RankNTypes #-}
module ChurchList where
type CList a = forall r. (a -> r -> r) -> r -> r
cNil :: CList a
cNil f nil = nil
cCons :: a -> CList a -> CList a
cCons a clist = \f b -> f a (clist f b)
cListToList :: CList a -> [a]
cListToList clist = clist (:) []
listToCList :: [a] -> CList a
listToCList [] = cNil
listToCList (a : as) = a `cCons` (listToCList as)
cListToString :: Show a => CList a -> String
cListToString = show . cListToList
| null | https://raw.githubusercontent.com/kenbot/church/a3da46b584dde00b66da14943154f225f062eb86/ChurchList.hs | haskell | # LANGUAGE RankNTypes # |
module ChurchList where
type CList a = forall r. (a -> r -> r) -> r -> r
cNil :: CList a
cNil f nil = nil
cCons :: a -> CList a -> CList a
cCons a clist = \f b -> f a (clist f b)
cListToList :: CList a -> [a]
cListToList clist = clist (:) []
listToCList :: [a] -> CList a
listToCList [] = cNil
listToCList (a : as) = a `cCons` (listToCList as)
cListToString :: Show a => CList a -> String
cListToString = show . cListToList
|
4fe3c1118b59f0dea430902df4d1007ca2565110ef0da6b5820a22aaf91e766e | vvvvalvalval/mapdag | default.cljc | (ns mapdag.test.runtime.default
(:require [clojure.test :refer :all]
[mapdag.test.core]
[mapdag.runtime.default]))
(deftest compute--examples
(mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute))
| null | https://raw.githubusercontent.com/vvvvalvalval/mapdag/c0758a7dcd986e7062d80c3dd368ea769d0d5b41/test/mapdag/test/runtime/default.cljc | clojure | (ns mapdag.test.runtime.default
(:require [clojure.test :refer :all]
[mapdag.test.core]
[mapdag.runtime.default]))
(deftest compute--examples
(mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute))
|
|
eb9144d628de1e3551cfcd9ac43fcb375008cf9a1d19819e0bdcb413fc287fd3 | alevy/postgresql-orm | Model_old.hs | # LANGUAGE Trustworthy #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FunctionalDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleInstances #
# LANGUAGE DefaultSignatures #
module Database.PostgreSQL.ORM.LIO.Model where
import qualified Data.ByteString as S
import qualified Database.PostgreSQL.Simple as M
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.ORM.Model (Model, GDBRef)
import qualified Database.PostgreSQL.ORM.Model as M
import qualified Database.PostgreSQL.ORM.DBSelect as M
import qualified Database.PostgreSQL.ORM as M
import Data.Typeable
import LIO
import LIO.DCLabel
import LIO.TCB
import GHC.Generics
import Database.PostgreSQL.ORM.CreateTable
import Data.Vector (Vector, toList)
findAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m]
findAllP (ConnectionTCB c dcc) = do
rows <- ioTCB $ M.dbSelect c selectModel
mapM (labelModel dcc) rows
findRow :: (Model r, ModelPolicy c r m)
=> Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m))
findRow (ConnectionTCB c dcc) k = do
mrow <- ioTCB $ M.findRow c k
case mrow of
Nothing -> return Nothing
Just row -> labelModel dcc row >>= \lr -> return $ Just lr
data Connection c = ConnectionTCB M.Connection c
class DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where
labelModel :: c -> a -> DC (DCLabeled b)
selectModel :: M.DBSelect a
default selectModel :: (Model a) => M.DBSelect a
selectModel = M.modelDBSelect
lookupModel :: M.DBSelect a
default lookupModel :: Model a => M.DBSelect a
lookupModel =
let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a)
in M.addWhere_ (Query $ S.concat [primKey, " = ?"]) $ M.modelDBSelect
class Typeable c => DCConnection c where
newConnection :: DCPriv -> c
connect :: forall c. DCConnection c => DC (Connection c)
connect = do
let tc = typeRepTyCon $ typeOf (undefined :: c)
pd = concat
[ tyConPackage tc
, ":"
, tyConModule tc
, "."
, tyConName tc ]
cpriv = PrivTCB $ toCNF $ principal pd
M.defaultConnectInfo -- { M.connectDatabase = pd }
return $ ConnectionTCB conn $ newConnection cpriv
--- EXAMPLE
data MyConn = MyConnTCB DCPriv deriving (Typeable)
instance DCConnection MyConn where
newConnection = MyConnTCB
data Owner = Owner { ownerId :: M.DBKey
, ownerPrincipal :: String } deriving (Generic, Show)
data Region = Region { regionId :: M.DBKey
, regionName :: String
, regionOwner :: M.DBRef Owner } deriving (Generic, Show)
instance Model Region where
modelInfo = M.underscoreModelInfo "region"
instance Model Owner where
modelInfo = M.underscoreModelInfo "region"
instance ModelPolicy MyConn (Region M.:. Owner) Region where
selectModel = M.addExpression "" $ M.modelDBSelect
labelModel (MyConnTCB mypriv) (region M.:. owner) = do
labelP mypriv (ownerPrincipal owner \/ mypriv %% ownerPrincipal owner \/ mypriv) region
instance ModelPolicy MyConn Owner Owner where
labelModel (MyConnTCB mypriv) owner =
labelP mypriv (True %% mypriv) owner
data Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable)
instance Model Owners where
| null | https://raw.githubusercontent.com/alevy/postgresql-orm/9316db2f226c512036c2b72983020f6bdefd41bd/src/Database/PostgreSQL/ORM/LIO/Model_old.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
# LANGUAGE DeriveDataTypeable #
{ M.connectDatabase = pd }
- EXAMPLE | # LANGUAGE Trustworthy #
# LANGUAGE DeriveGeneric #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FunctionalDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleInstances #
# LANGUAGE DefaultSignatures #
module Database.PostgreSQL.ORM.LIO.Model where
import qualified Data.ByteString as S
import qualified Database.PostgreSQL.Simple as M
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.ORM.Model (Model, GDBRef)
import qualified Database.PostgreSQL.ORM.Model as M
import qualified Database.PostgreSQL.ORM.DBSelect as M
import qualified Database.PostgreSQL.ORM as M
import Data.Typeable
import LIO
import LIO.DCLabel
import LIO.TCB
import GHC.Generics
import Database.PostgreSQL.ORM.CreateTable
import Data.Vector (Vector, toList)
findAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m]
findAllP (ConnectionTCB c dcc) = do
rows <- ioTCB $ M.dbSelect c selectModel
mapM (labelModel dcc) rows
findRow :: (Model r, ModelPolicy c r m)
=> Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m))
findRow (ConnectionTCB c dcc) k = do
mrow <- ioTCB $ M.findRow c k
case mrow of
Nothing -> return Nothing
Just row -> labelModel dcc row >>= \lr -> return $ Just lr
data Connection c = ConnectionTCB M.Connection c
class DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where
labelModel :: c -> a -> DC (DCLabeled b)
selectModel :: M.DBSelect a
default selectModel :: (Model a) => M.DBSelect a
selectModel = M.modelDBSelect
lookupModel :: M.DBSelect a
default lookupModel :: Model a => M.DBSelect a
lookupModel =
let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a)
in M.addWhere_ (Query $ S.concat [primKey, " = ?"]) $ M.modelDBSelect
class Typeable c => DCConnection c where
newConnection :: DCPriv -> c
connect :: forall c. DCConnection c => DC (Connection c)
connect = do
let tc = typeRepTyCon $ typeOf (undefined :: c)
pd = concat
[ tyConPackage tc
, ":"
, tyConModule tc
, "."
, tyConName tc ]
cpriv = PrivTCB $ toCNF $ principal pd
return $ ConnectionTCB conn $ newConnection cpriv
data MyConn = MyConnTCB DCPriv deriving (Typeable)
instance DCConnection MyConn where
newConnection = MyConnTCB
data Owner = Owner { ownerId :: M.DBKey
, ownerPrincipal :: String } deriving (Generic, Show)
data Region = Region { regionId :: M.DBKey
, regionName :: String
, regionOwner :: M.DBRef Owner } deriving (Generic, Show)
instance Model Region where
modelInfo = M.underscoreModelInfo "region"
instance Model Owner where
modelInfo = M.underscoreModelInfo "region"
instance ModelPolicy MyConn (Region M.:. Owner) Region where
selectModel = M.addExpression "" $ M.modelDBSelect
labelModel (MyConnTCB mypriv) (region M.:. owner) = do
labelP mypriv (ownerPrincipal owner \/ mypriv %% ownerPrincipal owner \/ mypriv) region
instance ModelPolicy MyConn Owner Owner where
labelModel (MyConnTCB mypriv) owner =
labelP mypriv (True %% mypriv) owner
data Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable)
instance Model Owners where
|
079a9581678803d1db11a5532e99753bb7f9380076b7da3e20f4f3725a4665f2 | fission-codes/fission | Init.hs | -- | Initialize a new Fission app in an existing directory
module Fission.CLI.Handler.App.Init (appInit) where
import qualified Crypto.PubKey.Ed25519 as Ed25519
import qualified Data.Yaml as YAML
import qualified System.Console.ANSI as ANSI
import Fission.Prelude
import qualified Fission.App.Name as App
import Fission.Authorization.ServerDID
import Fission.Error.Types
import qualified Fission.Internal.UTF8 as UTF8
import Fission.Web.Auth.Token.Types
import Fission.Web.Client
import Fission.CLI.Display.Text
import qualified Fission.CLI.Display.Error as CLI.Error
import qualified Fission.CLI.Display.Success as CLI.Success
import qualified Fission.CLI.App.Environment as App.Env
import qualified Fission.CLI.Prompt.BuildDir as BuildDir
import Fission.CLI.Environment
import Fission.CLI.WebNative.Mutation.Auth.Store as UCAN
| Sync the current working directory to the server over IPFS
appInit ::
( MonadIO m
, MonadTime m
, MonadLogger m
, MonadEnvironment m
, UCAN.MonadStore m
, MonadWebClient m
, ServerDID m
, MonadCleanup m
, m `Raises` ClientError
, m `Raises` YAML.ParseException
, m `Raises` NotFound FilePath
, Contains (Errors m) (Errors m)
, Display (OpenUnion (Errors m))
, Show (OpenUnion (Errors m))
, MonadWebAuth m Token
, MonadWebAuth m Ed25519.SecretKey
)
=> FilePath
-> Maybe FilePath
-> Maybe App.Name
-> m ()
appInit appDir mayBuildDir' mayAppName = do
logDebug @Text "appInit"
proof <- getRootUserProof
attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \case
Left err -> do
logDebug $ textDisplay err
CLI.Error.put err $ textDisplay err
raise err
Right appURL -> do
logDebug $ "Created app " <> textDisplay appURL
case mayBuildDir' of
Nothing -> do
guess <- BuildDir.prompt appDir
App.Env.create appURL $ fromMaybe guess mayBuildDir'
Just dir -> do
logDebug $ "BuildDir passed from flag: " <> dir
App.Env.create appURL dir
CLI.Success.putOk $ "App initialized as " <> textDisplay appURL
UTF8.putText "⏯️ Next, run "
colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do
UTF8.putText "fission app publish [--open|--watch]"
UTF8.putText " to sync data\n"
UTF8.putText "💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at "
colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do
UTF8.putText $ "/" <> textDisplay appURL <> "\n"
return ()
| null | https://raw.githubusercontent.com/fission-codes/fission/e5a5d6f30fb4451918efba5b72787cbc7632aecf/fission-cli/library/Fission/CLI/Handler/App/Init.hs | haskell | | Initialize a new Fission app in an existing directory | module Fission.CLI.Handler.App.Init (appInit) where
import qualified Crypto.PubKey.Ed25519 as Ed25519
import qualified Data.Yaml as YAML
import qualified System.Console.ANSI as ANSI
import Fission.Prelude
import qualified Fission.App.Name as App
import Fission.Authorization.ServerDID
import Fission.Error.Types
import qualified Fission.Internal.UTF8 as UTF8
import Fission.Web.Auth.Token.Types
import Fission.Web.Client
import Fission.CLI.Display.Text
import qualified Fission.CLI.Display.Error as CLI.Error
import qualified Fission.CLI.Display.Success as CLI.Success
import qualified Fission.CLI.App.Environment as App.Env
import qualified Fission.CLI.Prompt.BuildDir as BuildDir
import Fission.CLI.Environment
import Fission.CLI.WebNative.Mutation.Auth.Store as UCAN
| Sync the current working directory to the server over IPFS
appInit ::
( MonadIO m
, MonadTime m
, MonadLogger m
, MonadEnvironment m
, UCAN.MonadStore m
, MonadWebClient m
, ServerDID m
, MonadCleanup m
, m `Raises` ClientError
, m `Raises` YAML.ParseException
, m `Raises` NotFound FilePath
, Contains (Errors m) (Errors m)
, Display (OpenUnion (Errors m))
, Show (OpenUnion (Errors m))
, MonadWebAuth m Token
, MonadWebAuth m Ed25519.SecretKey
)
=> FilePath
-> Maybe FilePath
-> Maybe App.Name
-> m ()
appInit appDir mayBuildDir' mayAppName = do
logDebug @Text "appInit"
proof <- getRootUserProof
attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \case
Left err -> do
logDebug $ textDisplay err
CLI.Error.put err $ textDisplay err
raise err
Right appURL -> do
logDebug $ "Created app " <> textDisplay appURL
case mayBuildDir' of
Nothing -> do
guess <- BuildDir.prompt appDir
App.Env.create appURL $ fromMaybe guess mayBuildDir'
Just dir -> do
logDebug $ "BuildDir passed from flag: " <> dir
App.Env.create appURL dir
CLI.Success.putOk $ "App initialized as " <> textDisplay appURL
UTF8.putText "⏯️ Next, run "
colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do
UTF8.putText "fission app publish [--open|--watch]"
UTF8.putText " to sync data\n"
UTF8.putText "💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at "
colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do
UTF8.putText $ "/" <> textDisplay appURL <> "\n"
return ()
|
755f3076fcb051d464f7dacf35ce09e4f8a5bb8d808cd80d344aad7c19313bc2 | dbuenzli/remat | descr.ml | ---------------------------------------------------------------------------
Copyright 2012 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright 2012 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
open Rresult
open Bos
(* Repository description *)
type t =
{ dir : Path.t;
mutable repo : Ddescr.Repo.t option;
mutable index_ids : D.index_id list option;
indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t;
mutable doc_ids : D.doc_id list option;
docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; }
(* Description filename lookup *)
let warn_junk_file = format_of_string "suspicious file `%a` in %s directory"
let err_miss_repo p _ = R.msgf "no repository description file `%a'" Path.pp p
let err_miss_dir dir p _ = R.msgf "missing %s directory `%a'" dir Path.pp p
let err_miss_file k id p _ =
R.msgf "%s `%s': missing description file `%a'" k id Path.pp p
let lookup_file err_msg f =
(OS.File.exists ~err:true f >>= fun _ -> R.ok f)
|> R.reword_error_msg ~replace:true (err_msg f)
let lookup_dir err_msg d =
(OS.Dir.exists ~err:true d >>= fun _ -> R.ok d)
|> R.reword_error_msg ~replace:true (err_msg d)
let repo_file d = lookup_file err_miss_repo Path.(d.dir / "repo.json")
let index_path d = Path.(d.dir / "i")
let index_dir d = lookup_dir (err_miss_dir "index") (index_path d)
let index_file d id =
let err = err_miss_file "index" id in
lookup_file err Path.(index_path d / strf "%s.json" id)
let doc_path d = Path.(d.dir / "d")
let doc_dir d = lookup_dir (err_miss_dir "document") (doc_path d)
let doc_file d id =
let err = err_miss_file "document" id in
lookup_file err Path.(doc_path d / strf "%s.json" id)
(* Description decoder *)
let decode_file file codec =
let decode ic () =
let d = Jsonm.decoder (`Channel ic) in
let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in
let rec loop () = match Jsont.decode d with
| `Ok v -> R.ok v
| `Await -> loop ()
| `Error (loc, e) ->
let err = (Jsont.error_to_string e) in
Log.show "%a:%a: %s" Path.pp file Fmt.pp_range loc err;
loop ()
in
loop ()
in
OS.File.with_inf decode file ()
let create dir =
OS.Dir.exists ~err:true dir
>>= fun _ -> R.ok { dir;
repo = None;
index_ids = None;
indexes = Hashtbl.create 100;
doc_ids = None;
docs = Hashtbl.create 1000; }
let rec repo d = match d.repo with
| Some r -> r
| None ->
(repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec)
|> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec))
|> fun (_, r) -> d.repo <- Some r; r
let find_ids kind dir =
let add_id acc p =
if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else
(Log.warn warn_junk_file Path.pp p kind; acc)
in
(dir
>>= OS.Dir.contents
>>= fun paths -> R.ok (List.fold_left add_id [] paths))
|> Log.on_error_msg ~use:[]
let index_ids d = match d.index_ids with
| Some ids -> ids
| None ->
let ids = find_ids "index" (index_dir d) in
d.index_ids <- Some ids; ids
let index d id =
match try Some (Hashtbl.find d.indexes id) with Not_found -> None with
| Some i -> i
| None ->
(index_file d id >>= fun file -> decode_file file Ddescr.Index.codec)
|> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec))
|> fun (_, i) -> Hashtbl.add d.indexes id i; i
let doc_ids d = match d.doc_ids with
| Some ids -> ids
| None ->
let ids = find_ids "document" (doc_dir d) in
d.doc_ids <- Some ids; ids
FIXME see if modification of jsont can avoid double parse
match try Some (Hashtbl.find d.docs id) with Not_found -> None with
| Some d -> d
| None ->
(doc_file d id
>>= fun file -> decode_file file Ddescr.Doc.codec
>>= fun (_, doc) -> decode_file file Jsont.json
>>= fun (_, meta) -> R.ok (doc, meta))
|> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O [])
|> fun doc -> Hashtbl.add d.docs id doc; doc
(* Member lookup *)
let path_to_str ps = String.concat "." ps
let value_type = function
| `Null -> "null" | `Bool _ -> "boolean" | `Float _ -> "number"
| `String _ -> "string" | `A _ -> "array" | `O _ -> "object"
let err_find_type path seen j =
R.error_msgf "path %s stops at %s: value of type %s"
(path_to_str path) (path_to_str seen) (value_type j)
let err_find_name path seen =
R.error_msgf "path %s stops at %s: no such member."
(path_to_str path) (path_to_str seen)
let json_find path j =
let rec loop j seen = function
| [] -> R.ok j
| p :: ps ->
match j with
| `O mems ->
begin match try Some (List.assoc p mems) with Not_found -> None with
| None -> err_find_name path (List.rev (p :: seen))
| Some j -> loop j (p :: seen) ps
end
| j -> err_find_type path (List.rev (p :: seen)) j
in
loop j [] path
let lookup_to_str = function
| `Bool b -> R.ok (strf "%b" b)
| `Float f -> R.ok (strf "%g" f)
| `String s -> R.ok s
| `A _ | `O _ | `Null as v ->
R.error_msgf "unexpected %s in member data" (value_type v)
let lookup path obj =
json_find path obj >>= function
| `A vs ->
let rec loop acc = function
| v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs
| [] -> R.ok (List.rev acc)
in
loop [] vs
| v -> lookup_to_str v >>= fun s -> R.ok [s]
(* Formatting
TODO better error reports, correct string extractors.
*)
let parse_fuzzy_date s =
let is_digit c = (0x0030 <= c && c <= 0x0039) || c = 0x23 (* # *) in
let check_digits n s =
let len = String.length s in
if len <> n then false else
try
for i = 0 to len - 1 do
if not (is_digit (Char.code s.[i])) then raise Exit
done;
true
with Exit -> false
in
match String.split ~sep:"-" s with
| [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d ->
R.ok (y, Some m, Some d)
| [y; m] when check_digits 4 y && check_digits 2 m ->
R.ok (y, Some m, None)
| [y] when check_digits 4 y ->
R.ok (y, None, None)
| _ ->
R.error_msgf "could not parse fuzzy date (%s)" s
let map_todo m =
let err = R.msgf "map %s is unimplemented" m in
Ok (fun s -> R.error (err, s))
(* let err fmt = Printf.ksprintf (fun e -> R.error e) fmt *)
let err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt
let map_case var kind = match kind with
| "less" | "lower" | "upper" -> map_todo ("case_" ^ kind)
| _ -> R.error_msgf "variable $(%s): unknown case map kind `%s`" var kind
TODO implement dates correctly
let map_date_y s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok y
let map_date_yy s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok (String.sub y 2 2)
let map_date_yyyy s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok y
let map_date_m s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, m, _) -> Ok (match m with None -> "#" | Some m -> m)
let map_date_mm s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, m, _) -> Ok (match m with None -> "##" | Some m -> m)
let map_date_d s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, _, d) -> Ok (match d with None -> "#" | Some m -> m)
let map_date_dd s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, _, d) -> Ok (match d with None -> "##" | Some m -> m)
let map_date var kind = match kind with
| "Y" -> Ok map_date_y | "YY" -> Ok map_date_yy | "YYYY" -> Ok map_date_yyyy
| "M" -> Ok map_date_m | "MM" -> Ok map_date_mm
| "d" -> Ok map_date_d | "dd" -> Ok map_date_dd
| "e" -> map_todo "date_e"
| _ -> R.error_msgf "variable $(%s): unknown date map kind `%s`" var kind
let map_letter var n = match R.int_of_string n with
| None -> R.error_msgf "variable $(%s): unknown letter map kind `%s`" var n
| Some n ->
let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in
Ok map
let map_int var count = match R.int_of_string count with
| None -> R.error_msgf "variable $(%s): unknown int map kind `%s`" var count
| Some count ->
let map s =
let fmt count i = Printf.sprintf "%0*d" count i in
try Ok (fmt count (int_of_string s)) with
| Failure _ ->
err_map ~use:(fmt count 0)
"variable $(%s): value `%s` not an int" var s
in
Ok map
let map_id_find var smaps id = match (String.Map.find id smaps) with
| None -> R.error_msgf "variable $(%s): unknown map id `%s`" var id
| Some m -> Ok m
let map_id var smaps id =
map_id_find var smaps id >>= fun m ->
let map s = match String.Map.find s m with
| Some v -> Ok v
| None ->
err_map ~use:s
"variable $(%s): map id `%s` could not map `%s`" var id s
in
Ok map
let pmap_id var smaps id = match map_id_find var smaps id with
| Error _ as e -> e
| Ok m ->
let map s = match String.Map.find s m with
| None -> Ok s
| Some s -> Ok s
in
Ok map
let get_map var smaps m = match String.cut ~sep:"_" (String.trim m) with
| Some ("case", kind) -> map_case var kind
| Some ("letter", n) -> map_letter var n
| Some ("date", kind) -> map_date var kind
| Some ("int", count) -> map_int var count
| Some ("map", id) -> map_id var smaps m
| Some ("pmap", id) -> pmap_id var smaps m
| None | _ -> R.error_msgf "variable $(%s): unknown map `%s`" var m
TODO splicing , de - uglify
let r = match String.split ~sep:"," var_spec with
| var :: maps ->
let add_map acc m = match acc with
| Error _ as e -> e
| Ok maps ->
match get_map var smaps m with
| Error _ as e -> e
| Ok m -> Ok (m :: maps)
in
begin match List.fold_left add_map (Ok []) maps with
| Error err -> Error (err, "MAPERROR")
| Ok maps -> Ok (String.trim var, List.rev maps)
end
| _ ->
Error (R.msgf "var `$(%s)`: illegal format variable." var_spec, "ILLEGAL")
in
match r with
| Error _ as e -> e
| Ok (var, maps) ->
match String.Map.find var env with
| None | Some [] ->
Error (R.msgf "var `%s`: undefined variable: `$(%s)'" var_spec var,
"UNDEFINED")
| Some [v] ->
let apply acc m = match acc with
| Error _ as e -> e
| Ok s -> m s
in
List.fold_left apply (Ok v) maps
| Some l ->
Error (R.msgf "var `%s`: unspliced multiple value" var_spec,
"UNSPLICED")
let format ?buf fmt ~env ~smaps = failwith "TODO"
let buf = match buf with Some b - > b | None - > Buffer.create 255 in
let err = ref ( ` Msg " " ) in
let lookup_var = match lookup_var env with
| Error ( e , v ) - > err : = e ; v
| Ok v - > v
in
Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ;
let data = Buffer.contents buf in
if ! err < > ( ` Msg " " ) then Error ( ! err , data ) else Ok data
let buf = match buf with Some b -> b | None -> Buffer.create 255 in
let err = ref (`Msg "") in
let lookup_var var_spec = match lookup_var env smaps var_spec with
| Error (e, v) -> err := e; v
| Ok v -> v
in
Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt;
let data = Buffer.contents buf in
if !err <> (`Msg "") then Error (!err, data) else Ok data
*)
let formats ?buf fmt ~env ~smaps = failwith "TODO"
(*
let rec product vss = (* ordered cartesian product of lists. *)
let rec push_v acc v = function
| l :: lists -> push_v ((v :: l) :: acc) v lists
| [] -> acc
in
let rec push_vs acc lists = function
| v :: vs -> push_vs (push_v acc v lists) lists vs
| [] -> acc
in
let rec loop acc = function
| vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss
| [] -> acc
in
if vss = [] then [] else loop [[]] (List.rev vss)
FIXME better error report
let lookup_var env var =
match try Some (List.assoc var env) with Not_found -> None with
| None ->
FIXME this should n't occur here
Log.err "variable %s undefined" var; "UNDEFINED"
| Some l -> l
in
let rec assigns acc = function
| [] -> acc
| (name, Error e) :: vars ->
Log.err "var %s lookup error: %s" name e;
assigns ([(name, "ERROR")] :: acc) vars
| (name, Ok vs) :: vars ->
assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars
in
let vars = Ddescr.Formatter.vars fmt in
let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in
let envs = product assigns in
let format = Ddescr.Formatter.format fmt in
let add_run b acc run =
Buffer.clear b;
Buffer.add_substitute b (lookup_var run) format;
Buffer.contents b :: acc
in
let b = Buffer.create 255 in
List.fold_left (add_run b) [] envs
let format_str fmt j =
FIXME report error in case of list ?
String.concat "" (format fmt j)
*)
(* Variable environements *)
let cache = Hashtbl.create 255
type fmt = [`Lit of string | `Var of string ] list
let parse_fmt ?buf s =
try
let b = match buf with
| None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf
in
let acc = ref [] in
let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in
let flush_lit b =
if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc
in
let state = ref `Lit in
for i = 0 to String.length s - 1 do match !state with
| `Lit ->
begin match s.[i] with
| '$' -> state := `Dollar
| c -> Buffer.add_char b c
end
| `Dollar ->
begin match s.[i] with
| '$' -> state := `Lit; Buffer.add_char b '$'
| '(' -> state := `Var; flush_lit b;
| _ -> raise Exit
end
| `Var ->
begin match s.[i] with
| ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc;
| c -> Buffer.add_char b c
end
done;
if !state <> `Lit then raise Exit else
(flush_lit b; Ok (List.rev !acc))
with Exit -> Error (strf "malformed format: `%s`" s)
let cache = Hashtbl.create 255
let file_scan pat = try Hashtbl.find cache pat with
| Not_found ->
(OS.Path.unify (Path.of_string pat)
>>= fun envs -> R.ok (List.rev_map snd envs))
|> Log.on_error_msg ~use:[]
|> fun envs -> Hashtbl.add cache pat envs; envs
---------------------------------------------------------------------------
Copyright 2012
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright 2012 Daniel C. Bünzli
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/remat/28d572e77bbd1ad46bbfde87c0ba8bd0ab99ed28/src-remat/descr.ml | ocaml | Repository description
Description filename lookup
Description decoder
Member lookup
Formatting
TODO better error reports, correct string extractors.
#
let err fmt = Printf.ksprintf (fun e -> R.error e) fmt
let rec product vss = (* ordered cartesian product of lists.
Variable environements | ---------------------------------------------------------------------------
Copyright 2012 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright 2012 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
open Rresult
open Bos
type t =
{ dir : Path.t;
mutable repo : Ddescr.Repo.t option;
mutable index_ids : D.index_id list option;
indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t;
mutable doc_ids : D.doc_id list option;
docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; }
let warn_junk_file = format_of_string "suspicious file `%a` in %s directory"
let err_miss_repo p _ = R.msgf "no repository description file `%a'" Path.pp p
let err_miss_dir dir p _ = R.msgf "missing %s directory `%a'" dir Path.pp p
let err_miss_file k id p _ =
R.msgf "%s `%s': missing description file `%a'" k id Path.pp p
let lookup_file err_msg f =
(OS.File.exists ~err:true f >>= fun _ -> R.ok f)
|> R.reword_error_msg ~replace:true (err_msg f)
let lookup_dir err_msg d =
(OS.Dir.exists ~err:true d >>= fun _ -> R.ok d)
|> R.reword_error_msg ~replace:true (err_msg d)
let repo_file d = lookup_file err_miss_repo Path.(d.dir / "repo.json")
let index_path d = Path.(d.dir / "i")
let index_dir d = lookup_dir (err_miss_dir "index") (index_path d)
let index_file d id =
let err = err_miss_file "index" id in
lookup_file err Path.(index_path d / strf "%s.json" id)
let doc_path d = Path.(d.dir / "d")
let doc_dir d = lookup_dir (err_miss_dir "document") (doc_path d)
let doc_file d id =
let err = err_miss_file "document" id in
lookup_file err Path.(doc_path d / strf "%s.json" id)
let decode_file file codec =
let decode ic () =
let d = Jsonm.decoder (`Channel ic) in
let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in
let rec loop () = match Jsont.decode d with
| `Ok v -> R.ok v
| `Await -> loop ()
| `Error (loc, e) ->
let err = (Jsont.error_to_string e) in
Log.show "%a:%a: %s" Path.pp file Fmt.pp_range loc err;
loop ()
in
loop ()
in
OS.File.with_inf decode file ()
let create dir =
OS.Dir.exists ~err:true dir
>>= fun _ -> R.ok { dir;
repo = None;
index_ids = None;
indexes = Hashtbl.create 100;
doc_ids = None;
docs = Hashtbl.create 1000; }
let rec repo d = match d.repo with
| Some r -> r
| None ->
(repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec)
|> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec))
|> fun (_, r) -> d.repo <- Some r; r
let find_ids kind dir =
let add_id acc p =
if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else
(Log.warn warn_junk_file Path.pp p kind; acc)
in
(dir
>>= OS.Dir.contents
>>= fun paths -> R.ok (List.fold_left add_id [] paths))
|> Log.on_error_msg ~use:[]
let index_ids d = match d.index_ids with
| Some ids -> ids
| None ->
let ids = find_ids "index" (index_dir d) in
d.index_ids <- Some ids; ids
let index d id =
match try Some (Hashtbl.find d.indexes id) with Not_found -> None with
| Some i -> i
| None ->
(index_file d id >>= fun file -> decode_file file Ddescr.Index.codec)
|> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec))
|> fun (_, i) -> Hashtbl.add d.indexes id i; i
let doc_ids d = match d.doc_ids with
| Some ids -> ids
| None ->
let ids = find_ids "document" (doc_dir d) in
d.doc_ids <- Some ids; ids
FIXME see if modification of jsont can avoid double parse
match try Some (Hashtbl.find d.docs id) with Not_found -> None with
| Some d -> d
| None ->
(doc_file d id
>>= fun file -> decode_file file Ddescr.Doc.codec
>>= fun (_, doc) -> decode_file file Jsont.json
>>= fun (_, meta) -> R.ok (doc, meta))
|> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O [])
|> fun doc -> Hashtbl.add d.docs id doc; doc
let path_to_str ps = String.concat "." ps
let value_type = function
| `Null -> "null" | `Bool _ -> "boolean" | `Float _ -> "number"
| `String _ -> "string" | `A _ -> "array" | `O _ -> "object"
let err_find_type path seen j =
R.error_msgf "path %s stops at %s: value of type %s"
(path_to_str path) (path_to_str seen) (value_type j)
let err_find_name path seen =
R.error_msgf "path %s stops at %s: no such member."
(path_to_str path) (path_to_str seen)
let json_find path j =
let rec loop j seen = function
| [] -> R.ok j
| p :: ps ->
match j with
| `O mems ->
begin match try Some (List.assoc p mems) with Not_found -> None with
| None -> err_find_name path (List.rev (p :: seen))
| Some j -> loop j (p :: seen) ps
end
| j -> err_find_type path (List.rev (p :: seen)) j
in
loop j [] path
let lookup_to_str = function
| `Bool b -> R.ok (strf "%b" b)
| `Float f -> R.ok (strf "%g" f)
| `String s -> R.ok s
| `A _ | `O _ | `Null as v ->
R.error_msgf "unexpected %s in member data" (value_type v)
let lookup path obj =
json_find path obj >>= function
| `A vs ->
let rec loop acc = function
| v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs
| [] -> R.ok (List.rev acc)
in
loop [] vs
| v -> lookup_to_str v >>= fun s -> R.ok [s]
let parse_fuzzy_date s =
let check_digits n s =
let len = String.length s in
if len <> n then false else
try
for i = 0 to len - 1 do
if not (is_digit (Char.code s.[i])) then raise Exit
done;
true
with Exit -> false
in
match String.split ~sep:"-" s with
| [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d ->
R.ok (y, Some m, Some d)
| [y; m] when check_digits 4 y && check_digits 2 m ->
R.ok (y, Some m, None)
| [y] when check_digits 4 y ->
R.ok (y, None, None)
| _ ->
R.error_msgf "could not parse fuzzy date (%s)" s
let map_todo m =
let err = R.msgf "map %s is unimplemented" m in
Ok (fun s -> R.error (err, s))
let err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt
let map_case var kind = match kind with
| "less" | "lower" | "upper" -> map_todo ("case_" ^ kind)
| _ -> R.error_msgf "variable $(%s): unknown case map kind `%s`" var kind
TODO implement dates correctly
let map_date_y s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok y
let map_date_yy s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok (String.sub y 2 2)
let map_date_yyyy s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (y, _, _) -> Ok y
let map_date_m s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, m, _) -> Ok (match m with None -> "#" | Some m -> m)
let map_date_mm s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, m, _) -> Ok (match m with None -> "##" | Some m -> m)
let map_date_d s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, _, d) -> Ok (match d with None -> "#" | Some m -> m)
let map_date_dd s = match parse_fuzzy_date s with
| Error err -> Error (err, s)
| Ok (_, _, d) -> Ok (match d with None -> "##" | Some m -> m)
let map_date var kind = match kind with
| "Y" -> Ok map_date_y | "YY" -> Ok map_date_yy | "YYYY" -> Ok map_date_yyyy
| "M" -> Ok map_date_m | "MM" -> Ok map_date_mm
| "d" -> Ok map_date_d | "dd" -> Ok map_date_dd
| "e" -> map_todo "date_e"
| _ -> R.error_msgf "variable $(%s): unknown date map kind `%s`" var kind
let map_letter var n = match R.int_of_string n with
| None -> R.error_msgf "variable $(%s): unknown letter map kind `%s`" var n
| Some n ->
let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in
Ok map
let map_int var count = match R.int_of_string count with
| None -> R.error_msgf "variable $(%s): unknown int map kind `%s`" var count
| Some count ->
let map s =
let fmt count i = Printf.sprintf "%0*d" count i in
try Ok (fmt count (int_of_string s)) with
| Failure _ ->
err_map ~use:(fmt count 0)
"variable $(%s): value `%s` not an int" var s
in
Ok map
let map_id_find var smaps id = match (String.Map.find id smaps) with
| None -> R.error_msgf "variable $(%s): unknown map id `%s`" var id
| Some m -> Ok m
let map_id var smaps id =
map_id_find var smaps id >>= fun m ->
let map s = match String.Map.find s m with
| Some v -> Ok v
| None ->
err_map ~use:s
"variable $(%s): map id `%s` could not map `%s`" var id s
in
Ok map
let pmap_id var smaps id = match map_id_find var smaps id with
| Error _ as e -> e
| Ok m ->
let map s = match String.Map.find s m with
| None -> Ok s
| Some s -> Ok s
in
Ok map
let get_map var smaps m = match String.cut ~sep:"_" (String.trim m) with
| Some ("case", kind) -> map_case var kind
| Some ("letter", n) -> map_letter var n
| Some ("date", kind) -> map_date var kind
| Some ("int", count) -> map_int var count
| Some ("map", id) -> map_id var smaps m
| Some ("pmap", id) -> pmap_id var smaps m
| None | _ -> R.error_msgf "variable $(%s): unknown map `%s`" var m
TODO splicing , de - uglify
let r = match String.split ~sep:"," var_spec with
| var :: maps ->
let add_map acc m = match acc with
| Error _ as e -> e
| Ok maps ->
match get_map var smaps m with
| Error _ as e -> e
| Ok m -> Ok (m :: maps)
in
begin match List.fold_left add_map (Ok []) maps with
| Error err -> Error (err, "MAPERROR")
| Ok maps -> Ok (String.trim var, List.rev maps)
end
| _ ->
Error (R.msgf "var `$(%s)`: illegal format variable." var_spec, "ILLEGAL")
in
match r with
| Error _ as e -> e
| Ok (var, maps) ->
match String.Map.find var env with
| None | Some [] ->
Error (R.msgf "var `%s`: undefined variable: `$(%s)'" var_spec var,
"UNDEFINED")
| Some [v] ->
let apply acc m = match acc with
| Error _ as e -> e
| Ok s -> m s
in
List.fold_left apply (Ok v) maps
| Some l ->
Error (R.msgf "var `%s`: unspliced multiple value" var_spec,
"UNSPLICED")
let format ?buf fmt ~env ~smaps = failwith "TODO"
let buf = match buf with Some b - > b | None - > Buffer.create 255 in
let err = ref ( ` Msg " " ) in
let lookup_var = match lookup_var env with
| Error ( e , v ) - > err : = e ; v
| Ok v - > v
in
Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ;
let data = Buffer.contents buf in
if ! err < > ( ` Msg " " ) then Error ( ! err , data ) else Ok data
let buf = match buf with Some b -> b | None -> Buffer.create 255 in
let err = ref (`Msg "") in
let lookup_var var_spec = match lookup_var env smaps var_spec with
| Error (e, v) -> err := e; v
| Ok v -> v
in
Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt;
let data = Buffer.contents buf in
if !err <> (`Msg "") then Error (!err, data) else Ok data
*)
let formats ?buf fmt ~env ~smaps = failwith "TODO"
let rec push_v acc v = function
| l :: lists -> push_v ((v :: l) :: acc) v lists
| [] -> acc
in
let rec push_vs acc lists = function
| v :: vs -> push_vs (push_v acc v lists) lists vs
| [] -> acc
in
let rec loop acc = function
| vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss
| [] -> acc
in
if vss = [] then [] else loop [[]] (List.rev vss)
FIXME better error report
let lookup_var env var =
match try Some (List.assoc var env) with Not_found -> None with
| None ->
FIXME this should n't occur here
Log.err "variable %s undefined" var; "UNDEFINED"
| Some l -> l
in
let rec assigns acc = function
| [] -> acc
| (name, Error e) :: vars ->
Log.err "var %s lookup error: %s" name e;
assigns ([(name, "ERROR")] :: acc) vars
| (name, Ok vs) :: vars ->
assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars
in
let vars = Ddescr.Formatter.vars fmt in
let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in
let envs = product assigns in
let format = Ddescr.Formatter.format fmt in
let add_run b acc run =
Buffer.clear b;
Buffer.add_substitute b (lookup_var run) format;
Buffer.contents b :: acc
in
let b = Buffer.create 255 in
List.fold_left (add_run b) [] envs
let format_str fmt j =
FIXME report error in case of list ?
String.concat "" (format fmt j)
*)
let cache = Hashtbl.create 255
type fmt = [`Lit of string | `Var of string ] list
let parse_fmt ?buf s =
try
let b = match buf with
| None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf
in
let acc = ref [] in
let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in
let flush_lit b =
if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc
in
let state = ref `Lit in
for i = 0 to String.length s - 1 do match !state with
| `Lit ->
begin match s.[i] with
| '$' -> state := `Dollar
| c -> Buffer.add_char b c
end
| `Dollar ->
begin match s.[i] with
| '$' -> state := `Lit; Buffer.add_char b '$'
| '(' -> state := `Var; flush_lit b;
| _ -> raise Exit
end
| `Var ->
begin match s.[i] with
| ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc;
| c -> Buffer.add_char b c
end
done;
if !state <> `Lit then raise Exit else
(flush_lit b; Ok (List.rev !acc))
with Exit -> Error (strf "malformed format: `%s`" s)
let cache = Hashtbl.create 255
let file_scan pat = try Hashtbl.find cache pat with
| Not_found ->
(OS.Path.unify (Path.of_string pat)
>>= fun envs -> R.ok (List.rev_map snd envs))
|> Log.on_error_msg ~use:[]
|> fun envs -> Hashtbl.add cache pat envs; envs
---------------------------------------------------------------------------
Copyright 2012
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright 2012 Daniel C. Bünzli
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
|
657a6cd99033f97e945cf6b4f87a957ce5827c72d837271d7e728fce049d8792 | acieroid/scala-am | church-2-num-1.scm | (letrec ((zero (lambda (f x) x))
(inc (lambda (n)
(lambda (f x)
(f (n f x)))))
(plus (lambda (m n)
(lambda (f x)
(m f (n f x))))))
((inc (inc zero)) (lambda (x) (+ x 1)) 0))
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/changesBenevolPaper/church-2-num-1.scm | scheme | (letrec ((zero (lambda (f x) x))
(inc (lambda (n)
(lambda (f x)
(f (n f x)))))
(plus (lambda (m n)
(lambda (f x)
(m f (n f x))))))
((inc (inc zero)) (lambda (x) (+ x 1)) 0))
|
|
6febfa553216a882c43330bcab22bc2bad66244caf548a9e136e371549e11481 | funcool/httpurr | generators.cljc | (ns httpurr.test.generators
(:require
[clojure.test.check.generators :as gen]
[httpurr.status :as http]))
(defn gen-statuses
[coll]
(gen/such-that
#(not (empty? %)) (gen/map (gen/return :status)
(gen/elements coll))))
(def informational-response
(gen-statuses http/informational-codes))
(def success-response
(gen-statuses http/success-codes))
(def redirection-response
(gen-statuses http/redirection-codes))
(def client-error-response
(gen-statuses http/client-error-codes))
(def server-error-response
(gen-statuses http/server-error-codes))
(def error-response
(gen-statuses (concat http/client-error-codes
http/server-error-codes)))
| null | https://raw.githubusercontent.com/funcool/httpurr/22fb1b921864155a6b4eff113e2456ee924dd681/test/httpurr/test/generators.cljc | clojure | (ns httpurr.test.generators
(:require
[clojure.test.check.generators :as gen]
[httpurr.status :as http]))
(defn gen-statuses
[coll]
(gen/such-that
#(not (empty? %)) (gen/map (gen/return :status)
(gen/elements coll))))
(def informational-response
(gen-statuses http/informational-codes))
(def success-response
(gen-statuses http/success-codes))
(def redirection-response
(gen-statuses http/redirection-codes))
(def client-error-response
(gen-statuses http/client-error-codes))
(def server-error-response
(gen-statuses http/server-error-codes))
(def error-response
(gen-statuses (concat http/client-error-codes
http/server-error-codes)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.