_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
71b9d9d66853635368984537741c2d3a9dfe580c5c9ad3c6481fdbc964d1d349 | macourtney/Conjure | database_session_store.clj | (ns conjure.model.database-session-store
(:import [java.util Calendar Date])
(:require [clojure.tools.logging :as logging]
[drift-db.core :as database]
[conjure.util.session-utils :as session-utils]
[clojure.tools.string-utils :as conjure-str-utils]))
(def session-table :sessions)
(def created-at-column :created_at)
(def session-id-column :session_id)
(def data-column :data)
(defn
#^{ :doc "Makes sure the session table exists in the database. If it doesn't exist, then this method creates it." }
init []
(when (not (database/table-exists? session-table))
(database/create-table session-table
(database/id)
(database/date-time created-at-column)
(database/string session-id-column)
(database/text data-column))))
(defn
#^{ :doc "Creates a row in the database for a new session and returns the session id. If a value is given, it is saved
in the database." }
create-session
([key-name value]
(database/insert-into session-table
{ created-at-column (database/format-date-time (new Date)),
session-id-column (session-utils/session-id),
data-column (conjure-str-utils/form-str { key-name value }) })))
(defn
#^{ :doc "Deletes the row in the database for the given session-id, or the session id from the request-map." }
drop-session
([] (drop-session (session-utils/session-id)))
([session-id]
(database/delete session-table [ (str (conjure-str-utils/str-keyword session-id-column) " = ?") session-id ])))
(defn-
#^{ :doc "Replaces the map stored in the session table with the given store-map." }
save-map [store-map]
(database/update
session-table
[(str (conjure-str-utils/str-keyword session-id-column) " = ?") (session-utils/session-id)]
{ data-column (conjure-str-utils/form-str store-map) }))
(defn
#^{ :doc "Retrieves the value stored in the database for the given session id or the id in the request-map." }
retrieve
([] (retrieve (session-utils/session-id)))
([session-id]
(when-let [row-values (database/sql-find
{ :table (conjure-str-utils/str-keyword session-table),
:select (conjure-str-utils/str-keyword data-column),
:where [(str (conjure-str-utils/str-keyword session-id-column) " = ?") session-id] })]
(when-let [data (get (first row-values) data-column)]
(read-string data)))))
(defn
#^{ :doc "Deletes the given key-name from the session store." }
delete [key-name]
(let [ stored-map (retrieve)]
(if stored-map
(save-map (dissoc stored-map key-name)))))
(defn
#^{ :doc "Stores the given value in the session from the request-map." }
save [key-name value]
(let [stored-map (retrieve)]
(if stored-map
(save-map (assoc stored-map key-name value))
(create-session key-name value))))
(def session-store
{ :init init,
:drop drop-session,
:delete delete,
:save save,
:retrieve retrieve }) | null | https://raw.githubusercontent.com/macourtney/Conjure/1d6cb22d321ea75af3a6abe2a5bc140ad36e20d1/conjure_model/src/conjure/model/database_session_store.clj | clojure | (ns conjure.model.database-session-store
(:import [java.util Calendar Date])
(:require [clojure.tools.logging :as logging]
[drift-db.core :as database]
[conjure.util.session-utils :as session-utils]
[clojure.tools.string-utils :as conjure-str-utils]))
(def session-table :sessions)
(def created-at-column :created_at)
(def session-id-column :session_id)
(def data-column :data)
(defn
#^{ :doc "Makes sure the session table exists in the database. If it doesn't exist, then this method creates it." }
init []
(when (not (database/table-exists? session-table))
(database/create-table session-table
(database/id)
(database/date-time created-at-column)
(database/string session-id-column)
(database/text data-column))))
(defn
#^{ :doc "Creates a row in the database for a new session and returns the session id. If a value is given, it is saved
in the database." }
create-session
([key-name value]
(database/insert-into session-table
{ created-at-column (database/format-date-time (new Date)),
session-id-column (session-utils/session-id),
data-column (conjure-str-utils/form-str { key-name value }) })))
(defn
#^{ :doc "Deletes the row in the database for the given session-id, or the session id from the request-map." }
drop-session
([] (drop-session (session-utils/session-id)))
([session-id]
(database/delete session-table [ (str (conjure-str-utils/str-keyword session-id-column) " = ?") session-id ])))
(defn-
#^{ :doc "Replaces the map stored in the session table with the given store-map." }
save-map [store-map]
(database/update
session-table
[(str (conjure-str-utils/str-keyword session-id-column) " = ?") (session-utils/session-id)]
{ data-column (conjure-str-utils/form-str store-map) }))
(defn
#^{ :doc "Retrieves the value stored in the database for the given session id or the id in the request-map." }
retrieve
([] (retrieve (session-utils/session-id)))
([session-id]
(when-let [row-values (database/sql-find
{ :table (conjure-str-utils/str-keyword session-table),
:select (conjure-str-utils/str-keyword data-column),
:where [(str (conjure-str-utils/str-keyword session-id-column) " = ?") session-id] })]
(when-let [data (get (first row-values) data-column)]
(read-string data)))))
(defn
#^{ :doc "Deletes the given key-name from the session store." }
delete [key-name]
(let [ stored-map (retrieve)]
(if stored-map
(save-map (dissoc stored-map key-name)))))
(defn
#^{ :doc "Stores the given value in the session from the request-map." }
save [key-name value]
(let [stored-map (retrieve)]
(if stored-map
(save-map (assoc stored-map key-name value))
(create-session key-name value))))
(def session-store
{ :init init,
:drop drop-session,
:delete delete,
:save save,
:retrieve retrieve }) |
|
dee1d4d2b46b97f8e1eb7e51eab7694f733d084d970a60ea3f3273f536b7c431 | lspitzner/exference | Ord.hs | module Data.Ord where
data Ordering
class Data.Eq.Eq a => Ord a where
compare :: a -> a -> Ordering
(<) :: a -> a -> Data.Bool.Bool
(>=) :: a -> a -> Data.Bool.Bool
(>) :: a -> a -> Data.Bool.Bool
(<=) :: a -> a -> Data.Bool.Bool
comparing :: Ord a => (b -> a) -> b -> b -> Ordering
instance Ord a => Ord [a]
instance Prelude.Integral a => Ord (Ratio a)
instance p = > Ord ( Par1 p )
instance Ord a => Ord (Data.Maybe.Maybe a)
instance Ord a => Ord (Down a)
instance Ord a => Ord (Last a)
instance Ord a => Ord (First a)
instance Ord a => Ord (Product a)
instance Ord a => Ord (Sum a)
instance Ord a => Ord (Dual a)
instance Ord a => Ord (ZipList a)
instance (Ord a, Ord b) => Ord (Data.Either.Either a b)
instance ( f p ) = > Ord ( Rec1 f p )
instance (Ord a, Ord b) => Ord (a, b)
instance c = > Ord ( K1 i c p )
instance ( Ord ( f p ) , ( g p ) ) = > Ord ( (: + :) f g p )
instance ( Ord ( f p ) , ( g p ) ) = > Ord ( ( :* :) f g p )
instance ( f ( g p ) ) = > Ord ( (: . )
instance (Ord a, Ord b, Ord c) => Ord (a, b, c)
instance ( f p ) = > Ord ( M1 i c f p )
instance (Ord a, Ord b, Ord c, Ord d) => Ord (a, b, c, d)
instance (Ord a, Ord b, Ord c, Ord d, Ord e) => Ord (a, b, c, d, e)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f) => Ord (a, b, c, d, e, f)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g) => Ord (a, b, c, d, e, f, g)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h) => Ord (a, b, c, d, e, f, g, h)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i) => Ord (a, b, c, d, e, f, g, h, i)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j) => Ord (a, b, c, d, e, f, g, h, i, j)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k) => Ord (a, b, c, d, e, f, g, h, i, j, k)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l) => Ord (a, b, c, d, e, f, g, h, i, j, k, l)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n, Ord o) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
data Down a
instance Data.Eq.Eq a => Data.Eq.Eq (Down a)
instance Data.Ord.Ord a => Data.Ord.Ord (Down a)
instance Text.Read.Read a => Text.Read.Read (Down a)
instance Text.Show.Show a => Text.Show.Show (Down a)
| null | https://raw.githubusercontent.com/lspitzner/exference/d8a336f8b9df905e54173339f78ba892daa3f688/environment/Ord.hs | haskell | module Data.Ord where
data Ordering
class Data.Eq.Eq a => Ord a where
compare :: a -> a -> Ordering
(<) :: a -> a -> Data.Bool.Bool
(>=) :: a -> a -> Data.Bool.Bool
(>) :: a -> a -> Data.Bool.Bool
(<=) :: a -> a -> Data.Bool.Bool
comparing :: Ord a => (b -> a) -> b -> b -> Ordering
instance Ord a => Ord [a]
instance Prelude.Integral a => Ord (Ratio a)
instance p = > Ord ( Par1 p )
instance Ord a => Ord (Data.Maybe.Maybe a)
instance Ord a => Ord (Down a)
instance Ord a => Ord (Last a)
instance Ord a => Ord (First a)
instance Ord a => Ord (Product a)
instance Ord a => Ord (Sum a)
instance Ord a => Ord (Dual a)
instance Ord a => Ord (ZipList a)
instance (Ord a, Ord b) => Ord (Data.Either.Either a b)
instance ( f p ) = > Ord ( Rec1 f p )
instance (Ord a, Ord b) => Ord (a, b)
instance c = > Ord ( K1 i c p )
instance ( Ord ( f p ) , ( g p ) ) = > Ord ( (: + :) f g p )
instance ( Ord ( f p ) , ( g p ) ) = > Ord ( ( :* :) f g p )
instance ( f ( g p ) ) = > Ord ( (: . )
instance (Ord a, Ord b, Ord c) => Ord (a, b, c)
instance ( f p ) = > Ord ( M1 i c f p )
instance (Ord a, Ord b, Ord c, Ord d) => Ord (a, b, c, d)
instance (Ord a, Ord b, Ord c, Ord d, Ord e) => Ord (a, b, c, d, e)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f) => Ord (a, b, c, d, e, f)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g) => Ord (a, b, c, d, e, f, g)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h) => Ord (a, b, c, d, e, f, g, h)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i) => Ord (a, b, c, d, e, f, g, h, i)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j) => Ord (a, b, c, d, e, f, g, h, i, j)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k) => Ord (a, b, c, d, e, f, g, h, i, j, k)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l) => Ord (a, b, c, d, e, f, g, h, i, j, k, l)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
instance (Ord a, Ord b, Ord c, Ord d, Ord e, Ord f, Ord g, Ord h, Ord i, Ord j, Ord k, Ord l, Ord m, Ord n, Ord o) => Ord (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
data Down a
instance Data.Eq.Eq a => Data.Eq.Eq (Down a)
instance Data.Ord.Ord a => Data.Ord.Ord (Down a)
instance Text.Read.Read a => Text.Read.Read (Down a)
instance Text.Show.Show a => Text.Show.Show (Down a)
|
|
9e0f93d33591da8e3cf800a3f0ba1508e1aec4cf95a9698259fc0fd98675dbfc | uscensusbureau/citysdk | core.cljc | (ns census.wmsAPI.core
(:require
#?(:cljs [cljs.core.async :refer [>! <! chan promise-chan close! take! put! to-chan!
timeout]
:refer-macros [go alt!]]
:clj [clojure.core.async :refer [>! <! chan promise-chan close! take! put! to-chan!
timeout go alt!]])
[clojure.set :refer [map-invert]]
[cuerdas.core :refer [join]]
[linked.core :as -=-]
[census.utils.core :refer [=O?>-cb $GET$
filter-nil-tails
amap-type vec-type throw-err ->args
URL-WMS URL-GEOKEYMAP]]))
(defn $g$->wms-cfg
"
Creates a configuration map for the WMS url-builder from the geoHierarchy map.
if : lookup key is a vec -> direct looked up
else: lookup at :id<-json key
($g$->wms-cfg
$g$
{:vintage 2014,
:geoHierarchy {:state {:lat 28.2639, :lng -80.7214}, :county '*'}})
;=>
{:vintage 2014,
:layers ['84'],
:cur-layer-idx 0,
:lat 28.2639,
:lng -80.7214,
:sub-level [:county '*'],
:geo [:STATE],
:looked-up-in :2010}
"
([$g$ args] ($g$->wms-cfg $g$ args 0))
([$g$ {:keys [geoHierarchy vintage]} server-index]
(let [[[scope {:keys [lat lng]}] & sub-levels] (vec geoHierarchy)
{:keys [lookup layers]} (get-in $g$ [scope (keyword (str vintage)) :wms])
config {:vintage vintage
:layers layers
:cur-layer-idx server-index
:lat lat
:lng lng
:sub-levels sub-levels}]
(if (instance? vec-type lookup)
(merge-with assoc config
{:geo lookup
:looked-up-in (keyword vintage)})
(merge-with assoc config
{:geo (get-in $g$ [scope lookup :id<-json])
:looked-up-in lookup})))))
( filter - nil - tails ( { : state { : lat 1 : lng 2 } : county nil : tract " * " } ) )
(defn lookup-id->match?
"
:id<-json
Looks in a single entry from the inverted geoKeyMap for a matching geoKey via
`some`ing through each of its vintages for a match with a provided WMS
geographic identifier.
(lookup-id->match? :CONCITY ;; ↓ seq'd inverted geoKeyMap | looks up ↓
[{:2017 {:wms {:layers ['24'], :lookup [:STATE :CONCITY]}}
:2016 {:wms {:layers ['24'], :lookup [:STATE :CONCITY]}}}
:consolidated-cities
{:2014 {:wms {:layers ['24'], :lookup [:BLOOP]}}
:2016 {:wms {:layers ['24'], :lookup :2010}}}
:something-else])
; => :consolidated-cities
"
[GEO [geo-val geo-key]]
(let [vins (map (fn [[_ {:keys [id<-json] {:keys [lookup]} :wms}]]
(if (instance? vec-type lookup)
(last lookup)
(last id<-json)))
(vec geo-val))]
(if (some #(= GEO %) vins)
geo-key
nil)))
(defn search-id->match?
"
Searches the entire geoKeyMap (inverted) for a geo key match provided a given
WMS geographic identifier. Returned values are used in combination with a
response from the TigerWeb WMS geocoding response to determine the geographic
hierarchy of a geography for filling-in the data API request geography for
geocoding requests
(search-id->match? $g$ :CONCITY)
; => :consolidated-cities
"
[$g$ GEO]
(let [inverted-geoKeyMap (seq (map-invert $g$))]
(remove nil?
(map #(lookup-id->match? GEO %)
inverted-geoKeyMap))))
;
;
(defn C->GIS-url
"
Constructs a URL for the TigerWeb Web Mapping Service (WMS) using a lookup
from the geoKeyMap configuration file cross-referenced against the users args.
"
([$g$ args] (C->GIS-url $g$ args 0))
([$g$ args server-index]
(let [{:keys [vintage layers cur-layer-idx lat lng geo]}
($g$->wms-cfg $g$ args server-index)]
(str URL-WMS
(cond
(= 0 (mod vintage 10)) (str "TIGERweb/tigerWMS_Census" vintage)
:else (str "TIGERweb/tigerWMS_ACS" vintage))
"/MapServer/"
(get layers cur-layer-idx)
"/query?"
(join "&"
(map #(join "=" %)
[["geometry" (str lng "," lat)]
["geometryType" "esriGeometryPoint"]
["inSR" "4269"]
["spatialRel" "esriSpatialRelIntersects"]
["returnGeometry" "false"]
["f" "json"]
["outFields" (join "," (map name geo))]]))))))
(defn configed-map
"IMPORTANT!
The :id<-json key in index.edn is double loaded, to both pull ids from GeoJSON
as well as configure the API call
(configed-map $g$ {:STATE '51', :COUNTY '013'})
;=> {:STATE {:state '51'}, :COUNTY {:county '013'}}
Takes the geoKeyMap configuration and the attributes from the WMS service
API (js->cljs response) and returns a config map (:key = attribute ; value =
corresponding configured map with (:geography 'value') needed to call Census'
data API).
this will also handle filling in any parent geographies left out of
the original geoHierarchy object when WMS is triggered
"
[$g$ attrs]
(let [wms-keys (into [] (keys attrs))
wms-vals (into [] (vals attrs))
geo-keys (map #(search-id->match? $g$ %) wms-keys)]
(loop [idx 0
result {}]
(if (= nil (get wms-keys idx))
result
(recur (inc idx)
(assoc result
(get wms-keys idx)
;; returns an empty map ({}) if invalid
{(get (mapv #(first %) geo-keys) idx)
(get wms-vals idx)}))))))
(def $url$ (atom ""))
(def $res$ (atom []))
(def $err$ (atom {}))
(def $GET$-wms ($GET$ :json "Census FIPS Geocoding" $url$ $res$ $err$))
(defn try-census-wms
"
Takes the geoKeyMap with the users' arguments, a current WMS server index (used
for retrying if more than one exists for a given geography in WMS) and a
channel that will convey the result. Tries to cal the WMS and puts the
`configed-map` into the channel if successful.
"
[$g$ args server-idx =res=]
(let [=args=> (chan 1 (map #(configed-map $g$ (get-in % [:features 0 :attributes]))))
url (C->GIS-url $g$ args server-idx)]
($GET$-wms (to-chan! [url]) =args=> =args=>)
(take! =args=> (fn [args->] (put! =res= args->)
(close! =args=>)))))
(defn wms-engage?
"
Engages the wms-service workflow if the first element in the geoHierarchy
contains a map argument, which implies that the user doesn't have a GEOID handy.
"
[{:keys [geoHierarchy]}]
(let [[_ geo-val] (first geoHierarchy)]
(if (instance? amap-type geo-val)
true
false)))
; TODO: can this be cleaned up?
(defn =>args=GIS=args=>
"
Tries to find the appropriate geographic identifiers for a provided
geoHierarchy argument, which contains a {:lat <float> :lng <float>} coordinate
instead of an actual FIPS code set. If FIPS are already provided, this step is
skipped. If not, the users' arguments are augmented with FIPS codes from the
Census Tiger WMS.
"
[$g$]
(fn [=>args= =args=>]
(go (let [->args (<! =>args=)
=res= (chan 1)]
(if (not (wms-engage? ->args))
(do (>! =args=> ->args)
(close! =res=))
(loop [args ->args
idx 0]
(try-census-wms $g$ args idx =res=)
(let [{:keys [layers sub-levels]} ($g$->wms-cfg $g$ args)
res (<! =res=)]
(cond
(not (empty? res))
(do (>! =args=>
(merge args
(assoc {} :geoHierarchy
(conj (-=-/map)
(into (-=-/map) (vals res))
(into (-=-/map) sub-levels))))) ;; e.g.: ([:county "*"])
(close! =res=))
; if another layer is available: recur
(and (empty? res) (not (nil? (get layers (inc idx)))))
(recur ->args (inc idx))
:else
(do (>! =args=> "No FIPS (Census geocodes) found for given arguments")
(close! =res=))))))))))
(defn I-<wms=I=
"Provides a synchronous input to a function that accepts a channel for args
and calls the Census WMS for geocoding; providing the results to the channel"
[$g$]
(fn [I =args=>]
((=>args=GIS=args=> $g$) (to-chan! [(->args I)]) =args=>)))
| null | https://raw.githubusercontent.com/uscensusbureau/citysdk/ae5c1c2d826dec76dfca903d6784750811ec24bc/v2/src/census/wmsAPI/core.cljc | clojure | =>
↓ seq'd inverted geoKeyMap | looks up ↓
=> :consolidated-cities
=> :consolidated-cities
=> {:STATE {:state '51'}, :COUNTY {:county '013'}}
value =
returns an empty map ({}) if invalid
TODO: can this be cleaned up?
e.g.: ([:county "*"])
if another layer is available: recur
providing the results to the channel"
| (ns census.wmsAPI.core
(:require
#?(:cljs [cljs.core.async :refer [>! <! chan promise-chan close! take! put! to-chan!
timeout]
:refer-macros [go alt!]]
:clj [clojure.core.async :refer [>! <! chan promise-chan close! take! put! to-chan!
timeout go alt!]])
[clojure.set :refer [map-invert]]
[cuerdas.core :refer [join]]
[linked.core :as -=-]
[census.utils.core :refer [=O?>-cb $GET$
filter-nil-tails
amap-type vec-type throw-err ->args
URL-WMS URL-GEOKEYMAP]]))
(defn $g$->wms-cfg
"
Creates a configuration map for the WMS url-builder from the geoHierarchy map.
if : lookup key is a vec -> direct looked up
else: lookup at :id<-json key
($g$->wms-cfg
$g$
{:vintage 2014,
:geoHierarchy {:state {:lat 28.2639, :lng -80.7214}, :county '*'}})
{:vintage 2014,
:layers ['84'],
:cur-layer-idx 0,
:lat 28.2639,
:lng -80.7214,
:sub-level [:county '*'],
:geo [:STATE],
:looked-up-in :2010}
"
([$g$ args] ($g$->wms-cfg $g$ args 0))
([$g$ {:keys [geoHierarchy vintage]} server-index]
(let [[[scope {:keys [lat lng]}] & sub-levels] (vec geoHierarchy)
{:keys [lookup layers]} (get-in $g$ [scope (keyword (str vintage)) :wms])
config {:vintage vintage
:layers layers
:cur-layer-idx server-index
:lat lat
:lng lng
:sub-levels sub-levels}]
(if (instance? vec-type lookup)
(merge-with assoc config
{:geo lookup
:looked-up-in (keyword vintage)})
(merge-with assoc config
{:geo (get-in $g$ [scope lookup :id<-json])
:looked-up-in lookup})))))
( filter - nil - tails ( { : state { : lat 1 : lng 2 } : county nil : tract " * " } ) )
(defn lookup-id->match?
"
:id<-json
Looks in a single entry from the inverted geoKeyMap for a matching geoKey via
`some`ing through each of its vintages for a match with a provided WMS
geographic identifier.
[{:2017 {:wms {:layers ['24'], :lookup [:STATE :CONCITY]}}
:2016 {:wms {:layers ['24'], :lookup [:STATE :CONCITY]}}}
:consolidated-cities
{:2014 {:wms {:layers ['24'], :lookup [:BLOOP]}}
:2016 {:wms {:layers ['24'], :lookup :2010}}}
:something-else])
"
[GEO [geo-val geo-key]]
(let [vins (map (fn [[_ {:keys [id<-json] {:keys [lookup]} :wms}]]
(if (instance? vec-type lookup)
(last lookup)
(last id<-json)))
(vec geo-val))]
(if (some #(= GEO %) vins)
geo-key
nil)))
(defn search-id->match?
"
Searches the entire geoKeyMap (inverted) for a geo key match provided a given
WMS geographic identifier. Returned values are used in combination with a
response from the TigerWeb WMS geocoding response to determine the geographic
hierarchy of a geography for filling-in the data API request geography for
geocoding requests
(search-id->match? $g$ :CONCITY)
"
[$g$ GEO]
(let [inverted-geoKeyMap (seq (map-invert $g$))]
(remove nil?
(map #(lookup-id->match? GEO %)
inverted-geoKeyMap))))
(defn C->GIS-url
"
Constructs a URL for the TigerWeb Web Mapping Service (WMS) using a lookup
from the geoKeyMap configuration file cross-referenced against the users args.
"
([$g$ args] (C->GIS-url $g$ args 0))
([$g$ args server-index]
(let [{:keys [vintage layers cur-layer-idx lat lng geo]}
($g$->wms-cfg $g$ args server-index)]
(str URL-WMS
(cond
(= 0 (mod vintage 10)) (str "TIGERweb/tigerWMS_Census" vintage)
:else (str "TIGERweb/tigerWMS_ACS" vintage))
"/MapServer/"
(get layers cur-layer-idx)
"/query?"
(join "&"
(map #(join "=" %)
[["geometry" (str lng "," lat)]
["geometryType" "esriGeometryPoint"]
["inSR" "4269"]
["spatialRel" "esriSpatialRelIntersects"]
["returnGeometry" "false"]
["f" "json"]
["outFields" (join "," (map name geo))]]))))))
(defn configed-map
"IMPORTANT!
The :id<-json key in index.edn is double loaded, to both pull ids from GeoJSON
as well as configure the API call
(configed-map $g$ {:STATE '51', :COUNTY '013'})
Takes the geoKeyMap configuration and the attributes from the WMS service
corresponding configured map with (:geography 'value') needed to call Census'
data API).
this will also handle filling in any parent geographies left out of
the original geoHierarchy object when WMS is triggered
"
[$g$ attrs]
(let [wms-keys (into [] (keys attrs))
wms-vals (into [] (vals attrs))
geo-keys (map #(search-id->match? $g$ %) wms-keys)]
(loop [idx 0
result {}]
(if (= nil (get wms-keys idx))
result
(recur (inc idx)
(assoc result
(get wms-keys idx)
{(get (mapv #(first %) geo-keys) idx)
(get wms-vals idx)}))))))
(def $url$ (atom ""))
(def $res$ (atom []))
(def $err$ (atom {}))
(def $GET$-wms ($GET$ :json "Census FIPS Geocoding" $url$ $res$ $err$))
(defn try-census-wms
"
Takes the geoKeyMap with the users' arguments, a current WMS server index (used
for retrying if more than one exists for a given geography in WMS) and a
channel that will convey the result. Tries to cal the WMS and puts the
`configed-map` into the channel if successful.
"
[$g$ args server-idx =res=]
(let [=args=> (chan 1 (map #(configed-map $g$ (get-in % [:features 0 :attributes]))))
url (C->GIS-url $g$ args server-idx)]
($GET$-wms (to-chan! [url]) =args=> =args=>)
(take! =args=> (fn [args->] (put! =res= args->)
(close! =args=>)))))
(defn wms-engage?
"
Engages the wms-service workflow if the first element in the geoHierarchy
contains a map argument, which implies that the user doesn't have a GEOID handy.
"
[{:keys [geoHierarchy]}]
(let [[_ geo-val] (first geoHierarchy)]
(if (instance? amap-type geo-val)
true
false)))
(defn =>args=GIS=args=>
"
Tries to find the appropriate geographic identifiers for a provided
geoHierarchy argument, which contains a {:lat <float> :lng <float>} coordinate
instead of an actual FIPS code set. If FIPS are already provided, this step is
skipped. If not, the users' arguments are augmented with FIPS codes from the
Census Tiger WMS.
"
[$g$]
(fn [=>args= =args=>]
(go (let [->args (<! =>args=)
=res= (chan 1)]
(if (not (wms-engage? ->args))
(do (>! =args=> ->args)
(close! =res=))
(loop [args ->args
idx 0]
(try-census-wms $g$ args idx =res=)
(let [{:keys [layers sub-levels]} ($g$->wms-cfg $g$ args)
res (<! =res=)]
(cond
(not (empty? res))
(do (>! =args=>
(merge args
(assoc {} :geoHierarchy
(conj (-=-/map)
(into (-=-/map) (vals res))
(close! =res=))
(and (empty? res) (not (nil? (get layers (inc idx)))))
(recur ->args (inc idx))
:else
(do (>! =args=> "No FIPS (Census geocodes) found for given arguments")
(close! =res=))))))))))
(defn I-<wms=I=
"Provides a synchronous input to a function that accepts a channel for args
[$g$]
(fn [I =args=>]
((=>args=GIS=args=> $g$) (to-chan! [(->args I)]) =args=>)))
|
805a95f64e9fa320214788db9bcb5b9e874c448ebf96a93216ef9988d802d6bc | bobbae/gosling-emacs | mh-exit.ml | ; This file implements the autoloaded "exit" function (not a command) of mhe.
; There is a bug here: When processing the commands in cmd-buffer, they
; are sent directly to send-to-shell, without prepending mh-progs to the
; commands. If the user has his path wrong, these commands won't work.
; Doing things this way also fails to write ++update files as a result of
; moves.
; Could call &mh-close-folder, but this only processes commands for the
; current folder. Should fix it so that it can optionally do all folders.
; Shouldn't be hard.
; A better possibility that might be better is to have this routine look at
; cmd-buffer to see what folders need to be closed, and do each one.
(defun
(&mh-exit ans retval
(&mh-pop-to-buffer (concat "+" mh-folder))
(temp-use-buffer "cmd-buffer")
(setq retval 0)
(setq ans (get-response "Preparing to exit. Action? [q, e, u, ?] "
"qQeEuU\"
"q: quit (don't process) e: exit (after processing) u: undo (don't exit)"))
(if (| (= ans 'q') (= ans '\'))
(progn
(temp-use-buffer "cmd-buffer") (setq ans 'y')
(if (> (buffer-size) 0)
(setq ans
(get-response "Really exit without processing? "
"yYnN\" "y for Yes or n for No")))
(if (| (= ans 'y') (= ans '\'))
(progn
(temp-use-buffer (concat "+" mh-folder))
(erase-buffer)
(setq retval 1)
)
)
)
(= ans 'e')
(progn
(temp-use-buffer "cmd-buffer")
(if (!= 0 (buffer-size))
(progn
(message "Preparing to exit. Action? [q, e, u, ?] exiting...")
(sit-for 0)
(&mh-close-all-folders)
)
)
(setq retval 1)
)
)
(sit-for 0)
retval
)
(&mh-close-all-folders-xx
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
(split-long-lines)
(beginning-of-file) (set-mark) (end-of-file)
(progn s
(setq s (region-to-string))
(message "Processing deletes and moves...")
(sit-for 0)
(send-to-shell s 'f')
(setq buffer-is-modified 0)
(temp-use-buffer (concat "+" mh-folder))
(&mh-make-headers-current)
(setq buffer-is-modified 0)
)
)
(&mh-close-all-folders
(&mh-pop-to-buffer (concat "+" mh-folder))
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
(error-occured fn
(while (> (buffer-size) 0)
(re-search-forward " +\\([^ \t]*\\)")
(region-around-match 1)
(setq fn (region-to-string))
; So that headers all re-use same window.
(if (error-occured (use-old-buffer (concat "+" fn)))
(progn
(message "Whoops! You haven't visited +" fn)
(sit-for 10)
(beginning-of-line) (set-mark)
(next-line) (erase-region)
)
(progn
(message "Processing deletes and moves in +" fn "...")
(sit-for 0)
(setq mh-folder fn)
(&mh-close-folder)
)
)
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
)
)
(erase-buffer)
)
(split-long-lines t s ; make sure no overlong lines in cmd-buffer
(beginning-of-file)
(while (! (eobp))
(next-line)
(while
(progn (beginning-of-line)
(setq t (dot)) (end-of-line) (> (dot) (+ t 200)))
(beginning-of-line) (set-mark)
(if (looking-at "rmm")
(progn (forward-word) (forward-word) (forward-word)
(backward-word))
(looking-at mh-file-command)
(progn (forward-word) (forward-word)
(forward-word) (forward-word)
(forward-word) (backward-word))
)
(setq s (region-to-string)) (beginning-of-line)
(goto-character (+ (dot) 200)) (backward-word)
(delete-previous-character) (newline)
(insert-string s)
)
)
(setq buffer-is-modified 0)
)
)
| null | https://raw.githubusercontent.com/bobbae/gosling-emacs/8fdda532abbffb0c952251a0b5a4857e0f27495a/maclib/mh-exit.ml | ocaml | ; This file implements the autoloaded "exit" function (not a command) of mhe.
; There is a bug here: When processing the commands in cmd-buffer, they
; are sent directly to send-to-shell, without prepending mh-progs to the
; commands. If the user has his path wrong, these commands won't work.
; Doing things this way also fails to write ++update files as a result of
; moves.
; Could call &mh-close-folder, but this only processes commands for the
; current folder. Should fix it so that it can optionally do all folders.
; Shouldn't be hard.
; A better possibility that might be better is to have this routine look at
; cmd-buffer to see what folders need to be closed, and do each one.
(defun
(&mh-exit ans retval
(&mh-pop-to-buffer (concat "+" mh-folder))
(temp-use-buffer "cmd-buffer")
(setq retval 0)
(setq ans (get-response "Preparing to exit. Action? [q, e, u, ?] "
"qQeEuU\"
"q: quit (don't process) e: exit (after processing) u: undo (don't exit)"))
(if (| (= ans 'q') (= ans '\'))
(progn
(temp-use-buffer "cmd-buffer") (setq ans 'y')
(if (> (buffer-size) 0)
(setq ans
(get-response "Really exit without processing? "
"yYnN\" "y for Yes or n for No")))
(if (| (= ans 'y') (= ans '\'))
(progn
(temp-use-buffer (concat "+" mh-folder))
(erase-buffer)
(setq retval 1)
)
)
)
(= ans 'e')
(progn
(temp-use-buffer "cmd-buffer")
(if (!= 0 (buffer-size))
(progn
(message "Preparing to exit. Action? [q, e, u, ?] exiting...")
(sit-for 0)
(&mh-close-all-folders)
)
)
(setq retval 1)
)
)
(sit-for 0)
retval
)
(&mh-close-all-folders-xx
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
(split-long-lines)
(beginning-of-file) (set-mark) (end-of-file)
(progn s
(setq s (region-to-string))
(message "Processing deletes and moves...")
(sit-for 0)
(send-to-shell s 'f')
(setq buffer-is-modified 0)
(temp-use-buffer (concat "+" mh-folder))
(&mh-make-headers-current)
(setq buffer-is-modified 0)
)
)
(&mh-close-all-folders
(&mh-pop-to-buffer (concat "+" mh-folder))
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
(error-occured fn
(while (> (buffer-size) 0)
(re-search-forward " +\\([^ \t]*\\)")
(region-around-match 1)
(setq fn (region-to-string))
; So that headers all re-use same window.
(if (error-occured (use-old-buffer (concat "+" fn)))
(progn
(message "Whoops! You haven't visited +" fn)
(sit-for 10)
(beginning-of-line) (set-mark)
(next-line) (erase-region)
)
(progn
(message "Processing deletes and moves in +" fn "...")
(sit-for 0)
(setq mh-folder fn)
(&mh-close-folder)
)
)
(temp-use-buffer "cmd-buffer")
(beginning-of-file)
)
)
(erase-buffer)
)
(split-long-lines t s ; make sure no overlong lines in cmd-buffer
(beginning-of-file)
(while (! (eobp))
(next-line)
(while
(progn (beginning-of-line)
(setq t (dot)) (end-of-line) (> (dot) (+ t 200)))
(beginning-of-line) (set-mark)
(if (looking-at "rmm")
(progn (forward-word) (forward-word) (forward-word)
(backward-word))
(looking-at mh-file-command)
(progn (forward-word) (forward-word)
(forward-word) (forward-word)
(forward-word) (backward-word))
)
(setq s (region-to-string)) (beginning-of-line)
(goto-character (+ (dot) 200)) (backward-word)
(delete-previous-character) (newline)
(insert-string s)
)
)
(setq buffer-is-modified 0)
)
)
|
|
d7a136f21eb0cfb7c6559a07e7f3679cbef4be4f618a339ed323bbc9bc3c2bd0 | brendanlong/ocaml-ooxml | easy_xlsx.ml | open Base
open Base.Printf
open Stdint
open Spreadsheetml
module Value = struct
type t =
| Date of Ptime.date
| Datetime of Ptime.t
| Number of float
| String of string
| Time of Ptime.time
let to_string = function
| Date (y, m, d) -> sprintf "%d-%d-%d" y m d
| Datetime t -> Ptime.to_rfc3339 t
| Number f ->
Float.to_string f
|> String.rstrip ~drop:(function '.' -> true | _ -> false)
| String s -> s
| Time ((h, m, s), _tz) -> sprintf "%d:%d:%d" h m s
let sexp_of_t = function
| Number n -> Float.sexp_of_t n
| String s -> String.sexp_of_t s
| _ as t -> to_string t |> String.sexp_of_t
let built_in_formats =
FIXME : This is only the English format codes . There are 4 Asian format
codes ( Chinese Simplified , Chinese Traditional , Japanese , Korean ) and it 's
not clear to me how we 're supposed to pick between them .
codes (Chinese Simplified, Chinese Traditional, Japanese, Korean) and it's
not clear to me how we're supposed to pick between them. *)
[ 0, "general"
; 1, "0"
; 2, "0.00"
; 3, "#,##0"
; 4, "#,##0,00"
; 9, "0%"
; 10, "0.00%"
; 11, "0.00E+00"
; 12, "# ?/?"
; 13, "# ??/??"
; 14, "mm-dd-yy"
; 15, "d-mmm-yy"
; 16, "d-mmm"
; 17, "mmm-yy"
; 18, "h:mm AM/PM"
; 19, "h:mm:ss AM/PM"
; 20, "h:mm"
; 21, "h:mm:ss"
; 22, "m/d/yy h:mm"
; 37, "#,##0 ;(#,##0)"
; 38, "#,##0 ;[Red](#,##0)"
; 39, "#,##0.00;(#,##0.00)"
; 40, "#,##0.00;[Red](#,##0.00)"
; 45, "mm:ss"
; 46, "[h]:mm:ss"
; 47, "mmss.0"
; 48, "##0.0E+0"
; 49, "@" ]
|> Map.of_alist_exn (module Int)
let classify_format str =
FIXME : This is really slow . We should really use Menhir for this .
let str = String.lowercase str in
let remove_strings = Str.regexp "\\[[^\\[]*\\]\\|\"[^\"]*\"" in
let str = Str.global_replace remove_strings "" str in
if String.(str = "general" || str = "") then
`Number
else
let is_string = String.contains str '@' in
let is_date = String.contains str 'y' || String.contains str 'd'
QQ is quarter , NN is day of the week
|| String.contains str 'q' || String.contains str 'n'
WW is week number
|| String.contains str 'w'
|| String.is_substring str ~substring:"mmm" in
let is_time = String.contains str 'h' || String.contains str 's'
|| String.is_substring str ~substring:"am/pm"
|| String.is_substring str ~substring:"a/p" in
if [ is_string ; is_date || is_time ]
|> List.filter ~f:Fn.id
|> List.length > 1 then
failwithf "Ambiguous format string '%s'" str ()
else if is_string then
`String
else if is_date && is_time then
`Datetime
else if is_date then
`Date
else if is_time then
`Time
else
`Number
let of_cell ~styles ~formats ~shared_strings
({ Worksheet.Cell.style_index ; data_type ; _ } as cell) =
let formats = Map.merge built_in_formats formats ~f:(fun ~key ->
function
| `Left v -> Some v
| `Right v -> Some v
| `Both (a, b) ->
if String.(a = b) then Some a
else
failwithf "Got format string with ID %d, \"%s\", but there is a \
built-in format string with the same ID, \"%s\""
key b a ())
in
let str = Worksheet.Cell.to_string ~shared_strings cell in
match data_type with
| Number when String.(str <> "") ->
styles.(Uint32.to_int style_index)
|> Spreadsheetml.Styles.Format.number_format_id
|> Option.map ~f:Uint32.to_int
|> Option.value ~default:0
|> (fun num_fmt_id ->
match Map.find formats num_fmt_id with
| Some format -> format
| None ->
failwithf "Cell referenced numFmtId %d but it's not listed in the \
XLSX file and isn't a known built-in format ID"
num_fmt_id ())
|> classify_format
|> (
let xlsx_epoch = Option.value_exn ~here:[%here]
(Ptime.(of_date (1899, 12, 30))) in
let date_of_float n =
Float.iround_exn ~dir:`Down n
|> (fun days -> Ptime.Span.of_d_ps (days, Int64.zero))
|> Option.value_exn ~here:[%here]
|> Ptime.add_span xlsx_epoch
|> Option.value_exn ~here:[%here]
|> Ptime.to_date
in
let time_of_float n =
Float.((modf n |> Parts.fractional) * 24. * 60. * 60.)
|> Ptime.Span.of_float_s
|> Option.value_exn ~here:[%here]
|> Ptime.of_span
|> Option.value_exn ~here:[%here]
|> Ptime.to_date_time
|> snd
in
let n = Float.of_string str in
function
| `Number -> Number n
| `Date -> Date (date_of_float n)
| `Datetime ->
let date = date_of_float n in
let time = time_of_float n in
Datetime (Ptime.of_date_time (date, time)
|> Option.value_exn ~here:[%here])
| `String -> String str
| `Time ->
Time (time_of_float n))
| _ -> String str
let is_empty = function
| String "" -> true
| _ -> false
end
type sheet =
{ name : string
; rows : Value.t list list }
[@@deriving fields, sexp_of]
type t = sheet list [@@deriving sexp_of]
let read_file filename =
let zip_entry_to_xml zip name =
Zip.find_entry zip name
|> Zip.read_entry zip
|> Xml.parse_string
in
let zip = Zip.open_in filename in
Exn.protect ~f:(fun () ->
let shared_strings =
zip_entry_to_xml zip "xl/sharedStrings.xml"
|> Shared_string_table.of_xml
|> List.to_array
in
let sheets =
zip_entry_to_xml zip "xl/workbook.xml"
|> Workbook.of_xml
|> Workbook.sheets
in
let stylesheet =
zip_entry_to_xml zip "xl/styles.xml"
|> Styles.of_xml
in
let styles =
Styles.cell_formats stylesheet
|> Array.of_list
in
let formats =
Styles.number_formats stylesheet
|> List.map ~f:(fun { Styles.Number_format.id ; format } ->
Uint32.to_int id, format)
|> Map.of_alist_exn (module Int)
in
let rel_map =
zip_entry_to_xml zip "xl/_rels/workbook.xml.rels"
|> Open_packaging.Relationships.of_xml
|> List.map ~f:(fun { Open_packaging.Relationship.id ; target ; _ } ->
id, target)
|> Map.of_alist_exn (module String)
in
List.map sheets ~f:(fun { Workbook.Sheet.name ; id ; _ } ->
let rows =
let target = Map.find_exn rel_map id in
let path = sprintf "xl/%s" target in
let { Worksheet.columns ; rows } =
Zip.find_entry zip path
|> Zip.read_entry zip
|> Xml.parse_string
|> Worksheet.of_xml
in
let num_cols =
columns
|> List.map ~f:Worksheet.Column.max
|> List.map ~f:Uint32.to_int
|> List.max_elt ~compare:Int.compare
|> Option.value ~default:0
in
let row_map =
rows
|> List.map ~f:(fun { Worksheet.Row.row_index ; cells ; _ } ->
let index =
Option.value_exn ~here:[%here] row_index
|> Uint32.to_int
in
let cell_map =
List.map cells ~f:(fun cell ->
Worksheet.Cell.column cell, cell)
|> Map.of_alist_exn (module Int)
in
let cells =
Map.max_elt cell_map
|> Option.map ~f:(fun (max, _) ->
List.init (max + 1) ~f:(fun i ->
Map.find cell_map i
|> Option.value ~default:Worksheet.Cell.default))
|> Option.value ~default:[]
in
index - 1, cells)
|> Map.of_alist_exn (module Int)
in
let n =
Map.keys row_map
|> List.max_elt ~compare:Int.compare
|> Option.map ~f:((+) 1)
|> Option.value ~default:0
in
List.init n ~f:Fn.id
|> List.map ~f:(fun i ->
let row =
Map.find row_map i
|> Option.value ~default:[]
in
let missing_cols = num_cols - List.length row in
if missing_cols > 0 then
row @ List.init ~f:(Fn.const Worksheet.Cell.default) missing_cols
else
row)
|> List.map ~f:(List.map ~f:(Value.of_cell ~styles ~formats ~shared_strings))
in
{ name ; rows }))
~finally:(fun () -> Zip.close_in zip)
| null | https://raw.githubusercontent.com/brendanlong/ocaml-ooxml/dd6b8496d22670cc0fa0b50990414f955267ded9/easy_xlsx/src/easy_xlsx.ml | ocaml | open Base
open Base.Printf
open Stdint
open Spreadsheetml
module Value = struct
type t =
| Date of Ptime.date
| Datetime of Ptime.t
| Number of float
| String of string
| Time of Ptime.time
let to_string = function
| Date (y, m, d) -> sprintf "%d-%d-%d" y m d
| Datetime t -> Ptime.to_rfc3339 t
| Number f ->
Float.to_string f
|> String.rstrip ~drop:(function '.' -> true | _ -> false)
| String s -> s
| Time ((h, m, s), _tz) -> sprintf "%d:%d:%d" h m s
let sexp_of_t = function
| Number n -> Float.sexp_of_t n
| String s -> String.sexp_of_t s
| _ as t -> to_string t |> String.sexp_of_t
let built_in_formats =
FIXME : This is only the English format codes . There are 4 Asian format
codes ( Chinese Simplified , Chinese Traditional , Japanese , Korean ) and it 's
not clear to me how we 're supposed to pick between them .
codes (Chinese Simplified, Chinese Traditional, Japanese, Korean) and it's
not clear to me how we're supposed to pick between them. *)
[ 0, "general"
; 1, "0"
; 2, "0.00"
; 3, "#,##0"
; 4, "#,##0,00"
; 9, "0%"
; 10, "0.00%"
; 11, "0.00E+00"
; 12, "# ?/?"
; 13, "# ??/??"
; 14, "mm-dd-yy"
; 15, "d-mmm-yy"
; 16, "d-mmm"
; 17, "mmm-yy"
; 18, "h:mm AM/PM"
; 19, "h:mm:ss AM/PM"
; 20, "h:mm"
; 21, "h:mm:ss"
; 22, "m/d/yy h:mm"
; 37, "#,##0 ;(#,##0)"
; 38, "#,##0 ;[Red](#,##0)"
; 39, "#,##0.00;(#,##0.00)"
; 40, "#,##0.00;[Red](#,##0.00)"
; 45, "mm:ss"
; 46, "[h]:mm:ss"
; 47, "mmss.0"
; 48, "##0.0E+0"
; 49, "@" ]
|> Map.of_alist_exn (module Int)
let classify_format str =
FIXME : This is really slow . We should really use Menhir for this .
let str = String.lowercase str in
let remove_strings = Str.regexp "\\[[^\\[]*\\]\\|\"[^\"]*\"" in
let str = Str.global_replace remove_strings "" str in
if String.(str = "general" || str = "") then
`Number
else
let is_string = String.contains str '@' in
let is_date = String.contains str 'y' || String.contains str 'd'
QQ is quarter , NN is day of the week
|| String.contains str 'q' || String.contains str 'n'
WW is week number
|| String.contains str 'w'
|| String.is_substring str ~substring:"mmm" in
let is_time = String.contains str 'h' || String.contains str 's'
|| String.is_substring str ~substring:"am/pm"
|| String.is_substring str ~substring:"a/p" in
if [ is_string ; is_date || is_time ]
|> List.filter ~f:Fn.id
|> List.length > 1 then
failwithf "Ambiguous format string '%s'" str ()
else if is_string then
`String
else if is_date && is_time then
`Datetime
else if is_date then
`Date
else if is_time then
`Time
else
`Number
let of_cell ~styles ~formats ~shared_strings
({ Worksheet.Cell.style_index ; data_type ; _ } as cell) =
let formats = Map.merge built_in_formats formats ~f:(fun ~key ->
function
| `Left v -> Some v
| `Right v -> Some v
| `Both (a, b) ->
if String.(a = b) then Some a
else
failwithf "Got format string with ID %d, \"%s\", but there is a \
built-in format string with the same ID, \"%s\""
key b a ())
in
let str = Worksheet.Cell.to_string ~shared_strings cell in
match data_type with
| Number when String.(str <> "") ->
styles.(Uint32.to_int style_index)
|> Spreadsheetml.Styles.Format.number_format_id
|> Option.map ~f:Uint32.to_int
|> Option.value ~default:0
|> (fun num_fmt_id ->
match Map.find formats num_fmt_id with
| Some format -> format
| None ->
failwithf "Cell referenced numFmtId %d but it's not listed in the \
XLSX file and isn't a known built-in format ID"
num_fmt_id ())
|> classify_format
|> (
let xlsx_epoch = Option.value_exn ~here:[%here]
(Ptime.(of_date (1899, 12, 30))) in
let date_of_float n =
Float.iround_exn ~dir:`Down n
|> (fun days -> Ptime.Span.of_d_ps (days, Int64.zero))
|> Option.value_exn ~here:[%here]
|> Ptime.add_span xlsx_epoch
|> Option.value_exn ~here:[%here]
|> Ptime.to_date
in
let time_of_float n =
Float.((modf n |> Parts.fractional) * 24. * 60. * 60.)
|> Ptime.Span.of_float_s
|> Option.value_exn ~here:[%here]
|> Ptime.of_span
|> Option.value_exn ~here:[%here]
|> Ptime.to_date_time
|> snd
in
let n = Float.of_string str in
function
| `Number -> Number n
| `Date -> Date (date_of_float n)
| `Datetime ->
let date = date_of_float n in
let time = time_of_float n in
Datetime (Ptime.of_date_time (date, time)
|> Option.value_exn ~here:[%here])
| `String -> String str
| `Time ->
Time (time_of_float n))
| _ -> String str
let is_empty = function
| String "" -> true
| _ -> false
end
type sheet =
{ name : string
; rows : Value.t list list }
[@@deriving fields, sexp_of]
type t = sheet list [@@deriving sexp_of]
let read_file filename =
let zip_entry_to_xml zip name =
Zip.find_entry zip name
|> Zip.read_entry zip
|> Xml.parse_string
in
let zip = Zip.open_in filename in
Exn.protect ~f:(fun () ->
let shared_strings =
zip_entry_to_xml zip "xl/sharedStrings.xml"
|> Shared_string_table.of_xml
|> List.to_array
in
let sheets =
zip_entry_to_xml zip "xl/workbook.xml"
|> Workbook.of_xml
|> Workbook.sheets
in
let stylesheet =
zip_entry_to_xml zip "xl/styles.xml"
|> Styles.of_xml
in
let styles =
Styles.cell_formats stylesheet
|> Array.of_list
in
let formats =
Styles.number_formats stylesheet
|> List.map ~f:(fun { Styles.Number_format.id ; format } ->
Uint32.to_int id, format)
|> Map.of_alist_exn (module Int)
in
let rel_map =
zip_entry_to_xml zip "xl/_rels/workbook.xml.rels"
|> Open_packaging.Relationships.of_xml
|> List.map ~f:(fun { Open_packaging.Relationship.id ; target ; _ } ->
id, target)
|> Map.of_alist_exn (module String)
in
List.map sheets ~f:(fun { Workbook.Sheet.name ; id ; _ } ->
let rows =
let target = Map.find_exn rel_map id in
let path = sprintf "xl/%s" target in
let { Worksheet.columns ; rows } =
Zip.find_entry zip path
|> Zip.read_entry zip
|> Xml.parse_string
|> Worksheet.of_xml
in
let num_cols =
columns
|> List.map ~f:Worksheet.Column.max
|> List.map ~f:Uint32.to_int
|> List.max_elt ~compare:Int.compare
|> Option.value ~default:0
in
let row_map =
rows
|> List.map ~f:(fun { Worksheet.Row.row_index ; cells ; _ } ->
let index =
Option.value_exn ~here:[%here] row_index
|> Uint32.to_int
in
let cell_map =
List.map cells ~f:(fun cell ->
Worksheet.Cell.column cell, cell)
|> Map.of_alist_exn (module Int)
in
let cells =
Map.max_elt cell_map
|> Option.map ~f:(fun (max, _) ->
List.init (max + 1) ~f:(fun i ->
Map.find cell_map i
|> Option.value ~default:Worksheet.Cell.default))
|> Option.value ~default:[]
in
index - 1, cells)
|> Map.of_alist_exn (module Int)
in
let n =
Map.keys row_map
|> List.max_elt ~compare:Int.compare
|> Option.map ~f:((+) 1)
|> Option.value ~default:0
in
List.init n ~f:Fn.id
|> List.map ~f:(fun i ->
let row =
Map.find row_map i
|> Option.value ~default:[]
in
let missing_cols = num_cols - List.length row in
if missing_cols > 0 then
row @ List.init ~f:(Fn.const Worksheet.Cell.default) missing_cols
else
row)
|> List.map ~f:(List.map ~f:(Value.of_cell ~styles ~formats ~shared_strings))
in
{ name ; rows }))
~finally:(fun () -> Zip.close_in zip)
|
|
9105bdb31a466f4715ff60eecb37204a577d4445e85f60fa35843064c817bde4 | cxphoe/SICP-solutions | 1.23.rkt | (define (square x) (* x x))
(define (smallest-divisor n)
(find-divisor n 2))
; find divisors without testing even numbers if
( not (= ( remainder num 2 ) 0 ) which CANNOT halve
; the time to test a prime because of the extra
; procedures
(define (find-divisor n test-divisor)
(define (next num)
(if (= num 2)
3
(+ num 2)))
(cond ((> (square test-divisor) n) n)
((divides? test-divisor n) test-divisor)
(else (find-divisor n (next test-divisor)))))
not to test even numbers halve the time
; BUT also won't exceed the former time literally
; because the extra procedures is less than the
; follow procedures
(define (divides? a b)
(= (remainder b a) 0))
(define (prime? n)
(= n (smallest-divisor n)))
(define (timed-prime-test n)
(newline)
(display n)
(start-prime-test n (runtime)))
(define (start-prime-test n start-time)
(if (prime? n)
(report-prime (- (runtime) start-time))))
(define (report-prime elapsed-time)
(display " *** ")
(display elapsed-time))
; search for primes within odd numbers
; with a given number as start
(define (search-for-primes start)
(define (sub-search num)
(if (not (prime? num))
(sub-search (+ num 2))
(timed-prime-test num)))
(if (even? start)
(sub-search (+ start 1))
(sub-seatch start))) | null | https://raw.githubusercontent.com/cxphoe/SICP-solutions/d35bb688db0320f6efb3b3bde1a14ce21da319bd/Chapter%201-Building%20Abstractions%20with%20Procedures/1.23.rkt | racket | find divisors without testing even numbers if
the time to test a prime because of the extra
procedures
BUT also won't exceed the former time literally
because the extra procedures is less than the
follow procedures
search for primes within odd numbers
with a given number as start | (define (square x) (* x x))
(define (smallest-divisor n)
(find-divisor n 2))
( not (= ( remainder num 2 ) 0 ) which CANNOT halve
(define (find-divisor n test-divisor)
(define (next num)
(if (= num 2)
3
(+ num 2)))
(cond ((> (square test-divisor) n) n)
((divides? test-divisor n) test-divisor)
(else (find-divisor n (next test-divisor)))))
not to test even numbers halve the time
(define (divides? a b)
(= (remainder b a) 0))
(define (prime? n)
(= n (smallest-divisor n)))
(define (timed-prime-test n)
(newline)
(display n)
(start-prime-test n (runtime)))
(define (start-prime-test n start-time)
(if (prime? n)
(report-prime (- (runtime) start-time))))
(define (report-prime elapsed-time)
(display " *** ")
(display elapsed-time))
(define (search-for-primes start)
(define (sub-search num)
(if (not (prime? num))
(sub-search (+ num 2))
(timed-prime-test num)))
(if (even? start)
(sub-search (+ start 1))
(sub-seatch start))) |
bee64447169c6fb4037bcfe30bed305e90e959218fe4271892f15ee898e549fa | NorfairKing/mergeful | TwoClientsSpec.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Data.Mergeful.Persistent.TwoClientsSpec
( spec,
)
where
import Control.Monad.Reader
import qualified Data.Map as M
import Data.Mergeful
import qualified Data.Set as S
import Database.Persist.Sql
import Test.Syd hiding (Timed (..), runTest)
import Test.Syd.Validity
import TestUtils
# ANN module ( " HLint : ignore Reduce duplication " : : String ) #
spec :: Spec
spec =
twoClientsSpec $ do
describe "sanity" $ do
describe "setupClient & clientGetStore" $ do
it "roundtrips" $ \te -> forAllValid $ \cstore -> runTest te $ do
setupClient A cstore
cstore' <- clientGetStore A
liftIO $ cstore' `shouldBe` cstore
describe "setupServer & serverGetStore" $ do
it "roundtrips" $ \te -> forAllValid $ \sstore -> runTest te $ do
setupServer sstore
sstore' <- serverGetStore
liftIO $ sstore' `shouldBe` sstore
describe "mergeFromServerStrategy" $ do
let strat = mergeFromServerStrategy
mergeFunctionSpec strat
noDivergenceSpec strat
xdescribe "Does not hold" $ noDataLossSpec strat
describe "mergeFromClientStrategy" $ do
let strat = mergeFromClientStrategy
mergeFunctionSpec strat
noDataLossSpec strat
xdescribe "Does not hold" $ noDivergenceSpec strat
describe "mergeUsingCRDTStrategy" $ do
let strat = mergeUsingCRDTStrategy max
mergeFunctionSpec strat
noDataLossSpec strat
noDivergenceSpec strat
mergeFunctionSpec :: ItemMergeStrategy Thing -> SpecWith TestEnv
mergeFunctionSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
describe "Multiple clients" $ do
describe "Single item" $ do
it "successfully syncs an addition accross to a second client" $ \te -> forAllValid $ \k -> forAllValid $ \i -> runTest te $ do
Client A has one item
setupClient A $ initialClientStore {clientStoreAddedItems = M.singleton k i}
-- Client B is empty
setupClient B initialClientStore
-- The server is empty
setupServer initialServerStore
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let addedItems = syncResponseClientAdded resp1
case M.toList addedItems of
[(k', ClientAddition uuid st)] -> do
lift $ k' `shouldBe` k
let time = initialServerTime
lift $ st `shouldBe` time
let items = M.singleton uuid (Timed i st)
lift $ sstore2 `shouldBe` (ServerStore {serverStoreItems = items})
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseServerAdded = items})
sstore3 `shouldBe` sstore2
-- Client B merges the response
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
-- Client A and Client B now have the same store
lift $ cAstore2 `shouldBe` cBstore2
_ ->
lift $
expectationFailure
"Should have found exactly one added item."
it "successfully syncs a modification accross to a second client" $ \te -> forAllValid $ \uuid -> forAllValid $ \i -> forAllValid $ \j -> forAllValid $ \time1 ->
runTest te $ do
-- Client A has a synced item.
setupClient A $
initialClientStore
{ clientStoreSyncedItems = M.singleton uuid (Timed i time1)
}
-- Client B had synced that same item, but has since modified it
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed j time1)
}
-- The server is has the item that both clients had before
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
lift $ do
resp1
`shouldBe` emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2}
sstore2
`shouldBe` ServerStore {serverStoreItems = M.singleton uuid (Timed j time2)}
-- Client B merges the response
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $
cBstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed j time2)}
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` emptySyncResponse
{ syncResponseServerChanged = M.singleton uuid (Timed j time2)
}
sstore3 `shouldBe` sstore2
-- Client A merges the response
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $
cAstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed j time2)}
-- Client A and Client B now have the same store
lift $ cAstore2 `shouldBe` cBstore2
it "succesfully syncs a deletion across to a second client" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i ->
runTest te $ do
setupClient A $
initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed i time1)}
-- Client A has a synced item.
-- Client B had synced that same item, but has since deleted it.
setupClient B $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
-- The server still has the undeleted item
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` emptySyncResponse {syncResponseClientDeleted = S.singleton uuid}
sstore2 `shouldBe` initialServerStore
-- Client B merges the response
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` initialClientStore
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` emptySyncResponse {syncResponseServerDeleted = S.singleton uuid}
sstore3 `shouldBe` sstore2
-- Client A merges the response
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
-- Client A and Client B now have the same store
lift $ cAstore2 `shouldBe` cBstore2
it "does not run into a conflict if two clients both try to sync a deletion" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i ->
runTest te $ do
setupClient A $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
-- Both client a and client b delete an item.
setupClient B $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
-- The server still has the undeleted item
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientDeleted = S.singleton uuid})
sstore2 `shouldBe` (ServerStore {serverStoreItems = M.empty})
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseClientDeleted = S.singleton uuid})
sstore3 `shouldBe` sstore2
-- Client B merges the response
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
cBstore2 `shouldBe` initialClientStore
-- Client A and Client B now have the same store
cAstore2 `shouldBe` cBstore2
describe "Multiple items" $ do
it "successfully syncs additions accross to a second client" $ \te -> forAllValid $ \is ->
runTest te $ do
setupClient A $ initialClientStore {clientStoreAddedItems = is}
-- Client B is empty
setupClient B initialClientStore
-- The server is empty
setupServer initialServerStore
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let (rest, items) = mergeAddedItems is (syncResponseClientAdded resp1)
lift $ do
rest `shouldBe` M.empty
sstore2 `shouldBe` (ServerStore {serverStoreItems = items})
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseServerAdded = items})
sstore3 `shouldBe` sstore2
-- Client B merges the response
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
-- Client A and Client B now have the same store
lift $ cAstore2 `shouldBe` cBstore2
it "succesfully syncs deletions across to a second client" $ \te -> forAllValid $ \items -> forAllValid $ \time1 ->
runTest te $ do
let syncedItems = M.map (\i -> Timed i time1) items
itemTimes = M.map (const time1) items
itemIds = M.keysSet items
setupClient A $ initialClientStore {clientStoreSyncedItems = syncedItems}
-- Client A has synced items
-- Client B had synced the same items, but has since deleted them.
setupClient B $ initialClientStore {clientStoreDeletedItems = itemTimes}
-- The server still has the undeleted item
setupServer $ ServerStore {serverStoreItems = syncedItems}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` emptySyncResponse {syncResponseClientDeleted = itemIds}
sstore2 `shouldBe` initialServerStore
-- Client B merges the response
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` initialClientStore
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` emptySyncResponse {syncResponseServerDeleted = itemIds}
sstore3 `shouldBe` sstore2
-- Client A merges the response
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
-- Client A and Client B now have the same store
lift $ cAstore2 `shouldBe` cBstore2
it "does not run into a conflict if two clients both try to sync a deletion" $ \te -> forAllValid $ \items -> forAllValid $ \time1 ->
runTest te $ do
setupClient A $
initialClientStore {clientStoreDeletedItems = M.map (const time1) items}
-- Both client a and client b delete their items.
setupClient B $
initialClientStore {clientStoreDeletedItems = M.map (const time1) items}
-- The server still has the undeleted items
setupServer $ ServerStore {serverStoreItems = M.map (\i -> Timed i time1) items}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` (emptySyncResponse {syncResponseClientDeleted = M.keysSet items})
TODO will probably need some sort of tombstoning .
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseClientDeleted = M.keysSet items})
sstore3 `shouldBe` sstore2
-- Client B merges the response
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
cBstore2 `shouldBe` initialClientStore
-- Client A and Client B now have the same store
cAstore2 `shouldBe` cBstore2
noDataLossSpec ::
ItemMergeStrategy Thing ->
SpecWith TestEnv
noDataLossSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
it "does not lose data after a conflict occurs" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i1 -> forAllValid $ \i2 -> forAllValid $ \i3 ->
runTest te $ do
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i1 time1)}
-- The server has an item
The first client has synced it , and modified it .
setupClient A $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i2 time1)
}
The second client has synced it too , and modified it too .
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i3 time1)
}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
-- The server updates the item accordingly
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2})
sstore2
`shouldBe` (ServerStore {serverStoreItems = M.singleton uuid (Timed i2 time2)})
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $
cAstore2
`shouldBe` (initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed i2 time2)})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
-- The server reports a conflict and does not change its store
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseConflicts = M.singleton uuid (Timed i2 time2)})
sstore3 `shouldBe` sstore2
-- Client B merges the response
clientMergeSyncResponse mergeFromClientStrategy B resp2
cBstore2 <- clientGetStore B
-- Client does not update, but keeps its conflict
-- Client A and Client B now *do not* have the same store
lift $
cBstore2
`shouldBe` ( initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i3 time1)
}
)
noDivergenceSpec :: ItemMergeStrategy Thing -> SpecWith TestEnv
noDivergenceSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
it "does not diverge after a conflict occurs" $ \te ->
forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \iS -> forAllValid $ \iA ->
forAllValid $ \iB ->
runTest te $ do
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed iS time1)}
-- The server has an item
The first client has synced it , and modified it .
setupClient A $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed iA time1)
}
The second client has synced it too , and modified it too .
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed iB time1)
}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
-- The server updates the item accordingly
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2})
sstore2
`shouldBe` (ServerStore {serverStoreItems = M.singleton uuid (Timed iA time2)})
-- Client A merges the response
mergeFunc A resp1
cAstore2 <- clientGetStore A
-- Client A has the item from the server because there was no conflict.
lift $
cAstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed iA time2)}
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
-- The server reports a conflict and does not change its store
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseConflicts = M.singleton uuid (Timed iA time2)})
sstore3 `shouldBe` sstore2
-- Client B merges the response
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
let expected = case itemMergeStrategyMergeChangeConflict strat iB iA of
KeepLocal -> initialClientStore {clientStoreSyncedButChangedItems = M.singleton uuid (Timed iB time1)}
TakeRemote -> initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed iA time2)}
Merged im -> initialClientStore {clientStoreSyncedButChangedItems = M.singleton uuid (Timed im time2)}
cBstore2
`shouldBe` expected
-- In case of a previous merge, the synced item will still be changed, so we need to sync again with B and then with A
req3 <- clientMakeSyncRequest B
resp3 <- serverProcessSync req3
mergeFunc B resp3
cBstore3 <- clientGetStore B
req4 <- clientMakeSyncRequest A
resp4 <- serverProcessSync req4
mergeFunc A resp4
cAstore3 <- clientGetStore A
lift $
cBstore3 `shouldBe` cAstore3
type T a = ReaderT TestEnv IO a
runTest :: TestEnv -> T a -> IO a
runTest = flip runReaderT
runClientDB :: Client -> SqlPersistT IO a -> T a
runClientDB num func = do
pool <- asks $ case num of
A -> testEnvClient1Pool
B -> testEnvClient2Pool
liftIO $ runSqlPool func pool
runServerDB :: SqlPersistT IO a -> T a
runServerDB func = do
pool <- asks testEnvServerPool
liftIO $ runSqlPool func pool
type CS = ClientStore ClientThingId ServerThingId Thing
type SReq = SyncRequest ClientThingId ServerThingId Thing
type SS = ServerStore ServerThingId Thing
type SResp = SyncResponse ClientThingId ServerThingId Thing
setupClient :: Client -> CS -> T ()
setupClient client = runClientDB client . setupClientThingQuery
setupServer :: SS -> T ()
setupServer = runServerDB . setupServerThingQuery
clientGetStore :: Client -> T CS
clientGetStore client = runClientDB client clientGetStoreThingQuery
clientMakeSyncRequest :: Client -> T SReq
clientMakeSyncRequest client = runClientDB client clientMakeSyncRequestThingQuery
serverGetStore :: T SS
serverGetStore = runServerDB serverGetStoreThingQuery
serverProcessSync :: SReq -> T SResp
serverProcessSync = runServerDB . serverProcessSyncThingQuery
clientMergeSyncResponse :: ItemMergeStrategy Thing -> Client -> SResp -> T ()
clientMergeSyncResponse strat client = runClientDB client . clientMergeSyncResponseThingQuery strat
data Client = A | B
deriving (Show, Eq)
data TestEnv = TestEnv
{ testEnvServerPool :: !ConnectionPool,
testEnvClient1Pool :: !ConnectionPool,
testEnvClient2Pool :: !ConnectionPool
}
twoClientsSpec :: SpecWith TestEnv -> Spec
twoClientsSpec =
modifyMaxSuccess (`div` 10)
. around withTestEnv
withTestEnv :: (TestEnv -> IO a) -> IO a
withTestEnv func =
withServerPool $ \serverPool ->
withClientPool $ \client1Pool ->
withClientPool $ \client2Pool -> do
let tenv =
TestEnv
{ testEnvServerPool = serverPool,
testEnvClient1Pool = client1Pool,
testEnvClient2Pool = client2Pool
}
liftIO $ func tenv
| null | https://raw.githubusercontent.com/NorfairKing/mergeful/8f1686891e306edcb26daf5ec1782f94f6ae326c/mergeful-persistent/test/Data/Mergeful/Persistent/TwoClientsSpec.hs | haskell | # LANGUAGE RankNTypes #
Client B is empty
The server is empty
Client A merges the response
Client B merges the response
Client A and Client B now have the same store
Client A has a synced item.
Client B had synced that same item, but has since modified it
The server is has the item that both clients had before
Client B merges the response
Client A merges the response
Client A and Client B now have the same store
Client A has a synced item.
Client B had synced that same item, but has since deleted it.
The server still has the undeleted item
Client B merges the response
Client A merges the response
Client A and Client B now have the same store
Both client a and client b delete an item.
The server still has the undeleted item
Client A merges the response
Client B merges the response
Client A and Client B now have the same store
Client B is empty
The server is empty
Client A merges the response
Client B merges the response
Client A and Client B now have the same store
Client A has synced items
Client B had synced the same items, but has since deleted them.
The server still has the undeleted item
Client B merges the response
Client A merges the response
Client A and Client B now have the same store
Both client a and client b delete their items.
The server still has the undeleted items
Client A merges the response
Client B merges the response
Client A and Client B now have the same store
The server has an item
The server updates the item accordingly
Client A merges the response
The server reports a conflict and does not change its store
Client B merges the response
Client does not update, but keeps its conflict
Client A and Client B now *do not* have the same store
The server has an item
The server updates the item accordingly
Client A merges the response
Client A has the item from the server because there was no conflict.
The server reports a conflict and does not change its store
Client B merges the response
In case of a previous merge, the synced item will still be changed, so we need to sync again with B and then with A | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Data.Mergeful.Persistent.TwoClientsSpec
( spec,
)
where
import Control.Monad.Reader
import qualified Data.Map as M
import Data.Mergeful
import qualified Data.Set as S
import Database.Persist.Sql
import Test.Syd hiding (Timed (..), runTest)
import Test.Syd.Validity
import TestUtils
# ANN module ( " HLint : ignore Reduce duplication " : : String ) #
spec :: Spec
spec =
twoClientsSpec $ do
describe "sanity" $ do
describe "setupClient & clientGetStore" $ do
it "roundtrips" $ \te -> forAllValid $ \cstore -> runTest te $ do
setupClient A cstore
cstore' <- clientGetStore A
liftIO $ cstore' `shouldBe` cstore
describe "setupServer & serverGetStore" $ do
it "roundtrips" $ \te -> forAllValid $ \sstore -> runTest te $ do
setupServer sstore
sstore' <- serverGetStore
liftIO $ sstore' `shouldBe` sstore
describe "mergeFromServerStrategy" $ do
let strat = mergeFromServerStrategy
mergeFunctionSpec strat
noDivergenceSpec strat
xdescribe "Does not hold" $ noDataLossSpec strat
describe "mergeFromClientStrategy" $ do
let strat = mergeFromClientStrategy
mergeFunctionSpec strat
noDataLossSpec strat
xdescribe "Does not hold" $ noDivergenceSpec strat
describe "mergeUsingCRDTStrategy" $ do
let strat = mergeUsingCRDTStrategy max
mergeFunctionSpec strat
noDataLossSpec strat
noDivergenceSpec strat
mergeFunctionSpec :: ItemMergeStrategy Thing -> SpecWith TestEnv
mergeFunctionSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
describe "Multiple clients" $ do
describe "Single item" $ do
it "successfully syncs an addition accross to a second client" $ \te -> forAllValid $ \k -> forAllValid $ \i -> runTest te $ do
Client A has one item
setupClient A $ initialClientStore {clientStoreAddedItems = M.singleton k i}
setupClient B initialClientStore
setupServer initialServerStore
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let addedItems = syncResponseClientAdded resp1
case M.toList addedItems of
[(k', ClientAddition uuid st)] -> do
lift $ k' `shouldBe` k
let time = initialServerTime
lift $ st `shouldBe` time
let items = M.singleton uuid (Timed i st)
lift $ sstore2 `shouldBe` (ServerStore {serverStoreItems = items})
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseServerAdded = items})
sstore3 `shouldBe` sstore2
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
lift $ cAstore2 `shouldBe` cBstore2
_ ->
lift $
expectationFailure
"Should have found exactly one added item."
it "successfully syncs a modification accross to a second client" $ \te -> forAllValid $ \uuid -> forAllValid $ \i -> forAllValid $ \j -> forAllValid $ \time1 ->
runTest te $ do
setupClient A $
initialClientStore
{ clientStoreSyncedItems = M.singleton uuid (Timed i time1)
}
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed j time1)
}
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
lift $ do
resp1
`shouldBe` emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2}
sstore2
`shouldBe` ServerStore {serverStoreItems = M.singleton uuid (Timed j time2)}
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $
cBstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed j time2)}
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` emptySyncResponse
{ syncResponseServerChanged = M.singleton uuid (Timed j time2)
}
sstore3 `shouldBe` sstore2
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $
cAstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed j time2)}
lift $ cAstore2 `shouldBe` cBstore2
it "succesfully syncs a deletion across to a second client" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i ->
runTest te $ do
setupClient A $
initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed i time1)}
setupClient B $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` emptySyncResponse {syncResponseClientDeleted = S.singleton uuid}
sstore2 `shouldBe` initialServerStore
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` initialClientStore
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` emptySyncResponse {syncResponseServerDeleted = S.singleton uuid}
sstore3 `shouldBe` sstore2
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
lift $ cAstore2 `shouldBe` cBstore2
it "does not run into a conflict if two clients both try to sync a deletion" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i ->
runTest te $ do
setupClient A $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
setupClient B $ initialClientStore {clientStoreDeletedItems = M.singleton uuid time1}
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i time1)}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientDeleted = S.singleton uuid})
sstore2 `shouldBe` (ServerStore {serverStoreItems = M.empty})
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseClientDeleted = S.singleton uuid})
sstore3 `shouldBe` sstore2
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
cBstore2 `shouldBe` initialClientStore
cAstore2 `shouldBe` cBstore2
describe "Multiple items" $ do
it "successfully syncs additions accross to a second client" $ \te -> forAllValid $ \is ->
runTest te $ do
setupClient A $ initialClientStore {clientStoreAddedItems = is}
setupClient B initialClientStore
setupServer initialServerStore
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let (rest, items) = mergeAddedItems is (syncResponseClientAdded resp1)
lift $ do
rest `shouldBe` M.empty
sstore2 `shouldBe` (ServerStore {serverStoreItems = items})
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseServerAdded = items})
sstore3 `shouldBe` sstore2
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` (initialClientStore {clientStoreSyncedItems = items})
lift $ cAstore2 `shouldBe` cBstore2
it "succesfully syncs deletions across to a second client" $ \te -> forAllValid $ \items -> forAllValid $ \time1 ->
runTest te $ do
let syncedItems = M.map (\i -> Timed i time1) items
itemTimes = M.map (const time1) items
itemIds = M.keysSet items
setupClient A $ initialClientStore {clientStoreSyncedItems = syncedItems}
setupClient B $ initialClientStore {clientStoreDeletedItems = itemTimes}
setupServer $ ServerStore {serverStoreItems = syncedItems}
Client B makes sync request 1
req1 <- clientMakeSyncRequest B
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` emptySyncResponse {syncResponseClientDeleted = itemIds}
sstore2 `shouldBe` initialServerStore
mergeFunc B resp1
cBstore2 <- clientGetStore B
lift $ cBstore2 `shouldBe` initialClientStore
Client A makes sync request 2
req2 <- clientMakeSyncRequest A
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` emptySyncResponse {syncResponseServerDeleted = itemIds}
sstore3 `shouldBe` sstore2
mergeFunc A resp2
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
lift $ cAstore2 `shouldBe` cBstore2
it "does not run into a conflict if two clients both try to sync a deletion" $ \te -> forAllValid $ \items -> forAllValid $ \time1 ->
runTest te $ do
setupClient A $
initialClientStore {clientStoreDeletedItems = M.map (const time1) items}
setupClient B $
initialClientStore {clientStoreDeletedItems = M.map (const time1) items}
setupServer $ ServerStore {serverStoreItems = M.map (\i -> Timed i time1) items}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
lift $ do
resp1 `shouldBe` (emptySyncResponse {syncResponseClientDeleted = M.keysSet items})
TODO will probably need some sort of tombstoning .
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $ cAstore2 `shouldBe` initialClientStore
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2 `shouldBe` (emptySyncResponse {syncResponseClientDeleted = M.keysSet items})
sstore3 `shouldBe` sstore2
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
cBstore2 `shouldBe` initialClientStore
cAstore2 `shouldBe` cBstore2
noDataLossSpec ::
ItemMergeStrategy Thing ->
SpecWith TestEnv
noDataLossSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
it "does not lose data after a conflict occurs" $ \te -> forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \i1 -> forAllValid $ \i2 -> forAllValid $ \i3 ->
runTest te $ do
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed i1 time1)}
The first client has synced it , and modified it .
setupClient A $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i2 time1)
}
The second client has synced it too , and modified it too .
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i3 time1)
}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2})
sstore2
`shouldBe` (ServerStore {serverStoreItems = M.singleton uuid (Timed i2 time2)})
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $
cAstore2
`shouldBe` (initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed i2 time2)})
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseConflicts = M.singleton uuid (Timed i2 time2)})
sstore3 `shouldBe` sstore2
clientMergeSyncResponse mergeFromClientStrategy B resp2
cBstore2 <- clientGetStore B
lift $
cBstore2
`shouldBe` ( initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed i3 time1)
}
)
noDivergenceSpec :: ItemMergeStrategy Thing -> SpecWith TestEnv
noDivergenceSpec strat = do
let mergeFunc = clientMergeSyncResponse strat
it "does not diverge after a conflict occurs" $ \te ->
forAllValid $ \uuid -> forAllValid $ \time1 -> forAllValid $ \iS -> forAllValid $ \iA ->
forAllValid $ \iB ->
runTest te $ do
setupServer $ ServerStore {serverStoreItems = M.singleton uuid (Timed iS time1)}
The first client has synced it , and modified it .
setupClient A $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed iA time1)
}
The second client has synced it too , and modified it too .
setupClient B $
initialClientStore
{ clientStoreSyncedButChangedItems = M.singleton uuid (Timed iB time1)
}
Client A makes sync request 1
req1 <- clientMakeSyncRequest A
The server processes sync request 1
resp1 <- serverProcessSync req1
sstore2 <- serverGetStore
let time2 = incrementServerTime time1
lift $ do
resp1
`shouldBe` (emptySyncResponse {syncResponseClientChanged = M.singleton uuid time2})
sstore2
`shouldBe` (ServerStore {serverStoreItems = M.singleton uuid (Timed iA time2)})
mergeFunc A resp1
cAstore2 <- clientGetStore A
lift $
cAstore2
`shouldBe` initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed iA time2)}
Client B makes sync request 2
req2 <- clientMakeSyncRequest B
The server processes sync request 2
resp2 <- serverProcessSync req2
sstore3 <- serverGetStore
lift $ do
resp2
`shouldBe` (emptySyncResponse {syncResponseConflicts = M.singleton uuid (Timed iA time2)})
sstore3 `shouldBe` sstore2
mergeFunc B resp2
cBstore2 <- clientGetStore B
lift $ do
let expected = case itemMergeStrategyMergeChangeConflict strat iB iA of
KeepLocal -> initialClientStore {clientStoreSyncedButChangedItems = M.singleton uuid (Timed iB time1)}
TakeRemote -> initialClientStore {clientStoreSyncedItems = M.singleton uuid (Timed iA time2)}
Merged im -> initialClientStore {clientStoreSyncedButChangedItems = M.singleton uuid (Timed im time2)}
cBstore2
`shouldBe` expected
req3 <- clientMakeSyncRequest B
resp3 <- serverProcessSync req3
mergeFunc B resp3
cBstore3 <- clientGetStore B
req4 <- clientMakeSyncRequest A
resp4 <- serverProcessSync req4
mergeFunc A resp4
cAstore3 <- clientGetStore A
lift $
cBstore3 `shouldBe` cAstore3
type T a = ReaderT TestEnv IO a
runTest :: TestEnv -> T a -> IO a
runTest = flip runReaderT
runClientDB :: Client -> SqlPersistT IO a -> T a
runClientDB num func = do
pool <- asks $ case num of
A -> testEnvClient1Pool
B -> testEnvClient2Pool
liftIO $ runSqlPool func pool
runServerDB :: SqlPersistT IO a -> T a
runServerDB func = do
pool <- asks testEnvServerPool
liftIO $ runSqlPool func pool
type CS = ClientStore ClientThingId ServerThingId Thing
type SReq = SyncRequest ClientThingId ServerThingId Thing
type SS = ServerStore ServerThingId Thing
type SResp = SyncResponse ClientThingId ServerThingId Thing
setupClient :: Client -> CS -> T ()
setupClient client = runClientDB client . setupClientThingQuery
setupServer :: SS -> T ()
setupServer = runServerDB . setupServerThingQuery
clientGetStore :: Client -> T CS
clientGetStore client = runClientDB client clientGetStoreThingQuery
clientMakeSyncRequest :: Client -> T SReq
clientMakeSyncRequest client = runClientDB client clientMakeSyncRequestThingQuery
serverGetStore :: T SS
serverGetStore = runServerDB serverGetStoreThingQuery
serverProcessSync :: SReq -> T SResp
serverProcessSync = runServerDB . serverProcessSyncThingQuery
clientMergeSyncResponse :: ItemMergeStrategy Thing -> Client -> SResp -> T ()
clientMergeSyncResponse strat client = runClientDB client . clientMergeSyncResponseThingQuery strat
data Client = A | B
deriving (Show, Eq)
data TestEnv = TestEnv
{ testEnvServerPool :: !ConnectionPool,
testEnvClient1Pool :: !ConnectionPool,
testEnvClient2Pool :: !ConnectionPool
}
twoClientsSpec :: SpecWith TestEnv -> Spec
twoClientsSpec =
modifyMaxSuccess (`div` 10)
. around withTestEnv
withTestEnv :: (TestEnv -> IO a) -> IO a
withTestEnv func =
withServerPool $ \serverPool ->
withClientPool $ \client1Pool ->
withClientPool $ \client2Pool -> do
let tenv =
TestEnv
{ testEnvServerPool = serverPool,
testEnvClient1Pool = client1Pool,
testEnvClient2Pool = client2Pool
}
liftIO $ func tenv
|
33eddee38e0cd9b8784ccee2ad84dc170fcc09c883ac7ccb614605cc9ac2f99a | bobzhang/ocaml-book | pa_json_ast.ml | open Camlp4.PreCast
open Json_ast
module Camlp4TrashX = struct
INCLUDE "json_ast.ml"
end
open Camlp4TrashX
class map = Camlp4MapGenerator.generated
class fold = Camlp4FoldGenerator.generated
module MetaExpr = struct
let meta_float' _loc f =
<:expr< $`flo:f$ >>
include Camlp4Filters.MetaGeneratorExpr(Camlp4TrashX)
end
module MetaPatt = struct
let meta_float' _loc f =
<:patt< $`flo:f$ >>
include Camlp4Filters.MetaGeneratorPatt(Camlp4TrashX)
end
| null | https://raw.githubusercontent.com/bobzhang/ocaml-book/09a575b0d1fedfce565ecb9a0ae9cf0df37fdc75/camlp4/code/jake/pa_json_ast.ml | ocaml | open Camlp4.PreCast
open Json_ast
module Camlp4TrashX = struct
INCLUDE "json_ast.ml"
end
open Camlp4TrashX
class map = Camlp4MapGenerator.generated
class fold = Camlp4FoldGenerator.generated
module MetaExpr = struct
let meta_float' _loc f =
<:expr< $`flo:f$ >>
include Camlp4Filters.MetaGeneratorExpr(Camlp4TrashX)
end
module MetaPatt = struct
let meta_float' _loc f =
<:patt< $`flo:f$ >>
include Camlp4Filters.MetaGeneratorPatt(Camlp4TrashX)
end
|
|
a85041108e7534c9e4bf9cf225e1eaa519629313eeedb528a587db2aa03ca8b1 | haskell-servant/servant | JWT.hs | module Servant.Auth.Server.Internal.JWT where
import Control.Lens
import Control.Monad (MonadPlus(..), guard)
import Control.Monad.Reader
import qualified Crypto.JOSE as Jose
import qualified Crypto.JWT as Jose
import Data.ByteArray (constEq)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Maybe (fromMaybe)
import Data.Time (UTCTime)
import Network.Wai (requestHeaders)
import Servant.Auth.JWT (FromJWT(..), ToJWT(..))
import Servant.Auth.Server.Internal.ConfigTypes
import Servant.Auth.Server.Internal.Types
| A JWT @AuthCheck@. You likely wo n't need to use this directly unless you
are protecting a @Raw@ endpoint .
jwtAuthCheck :: FromJWT usr => JWTSettings -> AuthCheck usr
jwtAuthCheck jwtSettings = do
req <- ask
token <- maybe mempty return $ do
authHdr <- lookup "Authorization" $ requestHeaders req
let bearer = "Bearer "
(mbearer, rest) = BS.splitAt (BS.length bearer) authHdr
guard (mbearer `constEq` bearer)
return rest
verifiedJWT <- liftIO $ verifyJWT jwtSettings token
case verifiedJWT of
Nothing -> mzero
Just v -> return v
| Creates a JWT containing the specified data . The data is stored in the
@dat@ claim . The ' Maybe UTCTime ' argument indicates the time at which the
-- token expires.
makeJWT :: ToJWT a
=> a -> JWTSettings -> Maybe UTCTime -> IO (Either Jose.Error BSL.ByteString)
makeJWT v cfg expiry = Jose.runJOSE $ do
bestAlg <- Jose.bestJWSAlg $ signingKey cfg
let alg = fromMaybe bestAlg $ jwtAlg cfg
ejwt <- Jose.signClaims (signingKey cfg)
(Jose.newJWSHeader ((), alg))
(addExp $ encodeJWT v)
return $ Jose.encodeCompact ejwt
where
addExp claims = case expiry of
Nothing -> claims
Just e -> claims & Jose.claimExp ?~ Jose.NumericDate e
verifyJWT :: FromJWT a => JWTSettings -> BS.ByteString -> IO (Maybe a)
verifyJWT jwtCfg input = do
keys <- validationKeys jwtCfg
verifiedJWT <- Jose.runJOSE $ do
unverifiedJWT <- Jose.decodeCompact (BSL.fromStrict input)
Jose.verifyClaims
(jwtSettingsToJwtValidationSettings jwtCfg)
keys
unverifiedJWT
return $ case verifiedJWT of
Left (_ :: Jose.JWTError) -> Nothing
Right v -> case decodeJWT v of
Left _ -> Nothing
Right v' -> Just v'
| null | https://raw.githubusercontent.com/haskell-servant/servant/8f081bd9ad69aaa4ed4f27444e15433b36177b42/servant-auth/servant-auth-server/src/Servant/Auth/Server/Internal/JWT.hs | haskell | token expires. | module Servant.Auth.Server.Internal.JWT where
import Control.Lens
import Control.Monad (MonadPlus(..), guard)
import Control.Monad.Reader
import qualified Crypto.JOSE as Jose
import qualified Crypto.JWT as Jose
import Data.ByteArray (constEq)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Maybe (fromMaybe)
import Data.Time (UTCTime)
import Network.Wai (requestHeaders)
import Servant.Auth.JWT (FromJWT(..), ToJWT(..))
import Servant.Auth.Server.Internal.ConfigTypes
import Servant.Auth.Server.Internal.Types
| A JWT @AuthCheck@. You likely wo n't need to use this directly unless you
are protecting a @Raw@ endpoint .
jwtAuthCheck :: FromJWT usr => JWTSettings -> AuthCheck usr
jwtAuthCheck jwtSettings = do
req <- ask
token <- maybe mempty return $ do
authHdr <- lookup "Authorization" $ requestHeaders req
let bearer = "Bearer "
(mbearer, rest) = BS.splitAt (BS.length bearer) authHdr
guard (mbearer `constEq` bearer)
return rest
verifiedJWT <- liftIO $ verifyJWT jwtSettings token
case verifiedJWT of
Nothing -> mzero
Just v -> return v
| Creates a JWT containing the specified data . The data is stored in the
@dat@ claim . The ' Maybe UTCTime ' argument indicates the time at which the
makeJWT :: ToJWT a
=> a -> JWTSettings -> Maybe UTCTime -> IO (Either Jose.Error BSL.ByteString)
makeJWT v cfg expiry = Jose.runJOSE $ do
bestAlg <- Jose.bestJWSAlg $ signingKey cfg
let alg = fromMaybe bestAlg $ jwtAlg cfg
ejwt <- Jose.signClaims (signingKey cfg)
(Jose.newJWSHeader ((), alg))
(addExp $ encodeJWT v)
return $ Jose.encodeCompact ejwt
where
addExp claims = case expiry of
Nothing -> claims
Just e -> claims & Jose.claimExp ?~ Jose.NumericDate e
verifyJWT :: FromJWT a => JWTSettings -> BS.ByteString -> IO (Maybe a)
verifyJWT jwtCfg input = do
keys <- validationKeys jwtCfg
verifiedJWT <- Jose.runJOSE $ do
unverifiedJWT <- Jose.decodeCompact (BSL.fromStrict input)
Jose.verifyClaims
(jwtSettingsToJwtValidationSettings jwtCfg)
keys
unverifiedJWT
return $ case verifiedJWT of
Left (_ :: Jose.JWTError) -> Nothing
Right v -> case decodeJWT v of
Left _ -> Nothing
Right v' -> Just v'
|
cf272d412f48399811f44d544b248634676da3e61042c16b6377d916fe9bb5c1 | haskell/text | ApiCompare.hs | -- This script compares the strict and lazy Text APIs to ensure that
-- they're reasonably in sync.
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.Set as S
import qualified Data.Text as T
import System.Process
main = do
let tidy pkg = (S.fromList . filter (T.isInfixOf "::") . T.lines .
T.replace "GHC.Int.Int64" "Int" .
T.replace "\n " "" .
T.replace (T.append (T.pack pkg) ".") "" . T.pack) `fmap`
readProcess "ghci" [] (":browse " ++ pkg)
let diff a b = mapM_ (putStrLn . (" "++) . T.unpack) . S.toList $
S.difference a b
text <- tidy "Data.Text"
lazy <- tidy "Data.Text.Lazy"
list <- tidy "Data.List"
putStrLn "Text \\ List:"
diff text list
putStrLn ""
putStrLn "Text \\ Lazy:"
diff text lazy
putStrLn ""
putStrLn "Lazy \\ Text:"
diff lazy text
| null | https://raw.githubusercontent.com/haskell/text/ca73ae38ed3744ec930cbfdb706147ec29d5a185/scripts/ApiCompare.hs | haskell | This script compares the strict and lazy Text APIs to ensure that
they're reasonably in sync.
# LANGUAGE OverloadedStrings # |
import qualified Data.Set as S
import qualified Data.Text as T
import System.Process
main = do
let tidy pkg = (S.fromList . filter (T.isInfixOf "::") . T.lines .
T.replace "GHC.Int.Int64" "Int" .
T.replace "\n " "" .
T.replace (T.append (T.pack pkg) ".") "" . T.pack) `fmap`
readProcess "ghci" [] (":browse " ++ pkg)
let diff a b = mapM_ (putStrLn . (" "++) . T.unpack) . S.toList $
S.difference a b
text <- tidy "Data.Text"
lazy <- tidy "Data.Text.Lazy"
list <- tidy "Data.List"
putStrLn "Text \\ List:"
diff text list
putStrLn ""
putStrLn "Text \\ Lazy:"
diff text lazy
putStrLn ""
putStrLn "Lazy \\ Text:"
diff lazy text
|
1c58d103b05c3228b2ccfb7b294de557d98dfb6e6352924949f5679f003ed6a7 | flupe/achille | Writable.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
-- | Defines an interface for things that can be written to disk.
module Achille.Writable where
import Data.Text as Text
import Data.Text.Encoding (encodeUtf8)
import Data.Text.Lazy qualified as LT
import Data.Text.Lazy.Encoding qualified as LT (encodeUtf8)
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as LBS
import Achille.IO as AchilleIO
import Achille.Path
-- | Class for things that can be saved.
class Writable m a where
write :: Path -> a -> m ()
instance AchilleIO m => Writable m [Char] where
write to = write to . Text.pack
instance AchilleIO m => Writable m Text where
write to = AchilleIO.writeFile to . encodeUtf8
instance AchilleIO m => Writable m LT.Text where
write to = AchilleIO.writeFileLazy to . LT.encodeUtf8
instance AchilleIO m => Writable m BS.ByteString where
write = AchilleIO.writeFile
instance AchilleIO m => Writable m LBS.ByteString where
write = AchilleIO.writeFileLazy
| null | https://raw.githubusercontent.com/flupe/achille/359e8a4ab80a5503e682f3d11a4ef5327daa7bdc/achille/Achille/Writable.hs | haskell | | Defines an interface for things that can be written to disk.
| Class for things that can be saved. | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Achille.Writable where
import Data.Text as Text
import Data.Text.Encoding (encodeUtf8)
import Data.Text.Lazy qualified as LT
import Data.Text.Lazy.Encoding qualified as LT (encodeUtf8)
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as LBS
import Achille.IO as AchilleIO
import Achille.Path
class Writable m a where
write :: Path -> a -> m ()
instance AchilleIO m => Writable m [Char] where
write to = write to . Text.pack
instance AchilleIO m => Writable m Text where
write to = AchilleIO.writeFile to . encodeUtf8
instance AchilleIO m => Writable m LT.Text where
write to = AchilleIO.writeFileLazy to . LT.encodeUtf8
instance AchilleIO m => Writable m BS.ByteString where
write = AchilleIO.writeFile
instance AchilleIO m => Writable m LBS.ByteString where
write = AchilleIO.writeFileLazy
|
75ae4414f5779f54eb80f88d713efaf4c08e812ad901adbddecadfbe9cd481e2 | ruhler/smten | HashTable.hs |
-- | A hash table which is used only for lookups.
module Smten.Runtime.HashTable (
HashTable(), table,
Smten.Runtime.HashTable.lookup,
Smten.Runtime.HashTable.assocs,
) where
import Data.Array
import Data.Hashable
data HashTable k v = HashTable {
_size :: Int,
_elems :: Array Int [(k, v)]
}
-- A list of the hash table sizes we'll draw from.
-- From
sizes :: [Int]
sizes = [
193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613,
196613, 393241, 786433, 1572869, 3145739, 6291469, 12582917, 25165843,
50331653, 100663319]
-- | Construct a new table with the given key/value pair.
The first value for each key in the list is stored .
table :: (Hashable k) => [(k, v)] -> HashTable k v
table elems =
let s = head (filter (> (2 * length elems)) sizes ++ [last sizes])
assocs = [(indexof s k, (k,v)) | (k, v) <- elems]
in HashTable s (accumArray (\e a -> e ++ [a]) [] (0, s) assocs)
-- | Lookup the value for the given key in the table.
lookup :: (Eq k, Hashable k) => k -> HashTable k v -> Maybe v
lookup k (HashTable s es) = Prelude.lookup k (es ! indexof s k)
-- | Return the index of the bucket where the key should be found given the
-- size of the hash table.
indexof :: (Hashable k) => Int -> k -> Int
indexof s k = hash k `mod` s
assocs :: HashTable k v -> [(k, v)]
assocs (HashTable _ es) = concat (Data.Array.elems es)
| null | https://raw.githubusercontent.com/ruhler/smten/16dd37fb0ee3809408803d4be20401211b6c4027/smten-lib/Smten/Runtime/HashTable.hs | haskell | | A hash table which is used only for lookups.
A list of the hash table sizes we'll draw from.
From
| Construct a new table with the given key/value pair.
| Lookup the value for the given key in the table.
| Return the index of the bucket where the key should be found given the
size of the hash table. |
module Smten.Runtime.HashTable (
HashTable(), table,
Smten.Runtime.HashTable.lookup,
Smten.Runtime.HashTable.assocs,
) where
import Data.Array
import Data.Hashable
data HashTable k v = HashTable {
_size :: Int,
_elems :: Array Int [(k, v)]
}
sizes :: [Int]
sizes = [
193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613,
196613, 393241, 786433, 1572869, 3145739, 6291469, 12582917, 25165843,
50331653, 100663319]
The first value for each key in the list is stored .
table :: (Hashable k) => [(k, v)] -> HashTable k v
table elems =
let s = head (filter (> (2 * length elems)) sizes ++ [last sizes])
assocs = [(indexof s k, (k,v)) | (k, v) <- elems]
in HashTable s (accumArray (\e a -> e ++ [a]) [] (0, s) assocs)
lookup :: (Eq k, Hashable k) => k -> HashTable k v -> Maybe v
lookup k (HashTable s es) = Prelude.lookup k (es ! indexof s k)
indexof :: (Hashable k) => Int -> k -> Int
indexof s k = hash k `mod` s
assocs :: HashTable k v -> [(k, v)]
assocs (HashTable _ es) = concat (Data.Array.elems es)
|
2f3bc511edb62aaed7b2401b39f759cb8c295bc1eec97cec2adae49a1490cdeb | clckwrks/clckwrks | URL.hs | # LANGUAGE DeriveDataTypeable , FlexibleInstances , TemplateHaskell , TypeFamilies #
module Clckwrks.URL
( ClckURL(..)
, AdminURL(..)
, AuthenticateURL(..)
, NoEscape(..)
) where
import Clckwrks.Admin.URL (AdminURL(..))
import Clckwrks.JS.URL (JSURL)
import Clckwrks.ProfileData.URL (ProfileDataURL(..))
import Control.Applicative ((<$>), many)
import Data.Data (Data, Typeable)
import Data.SafeCopy (Migrate(..), SafeCopy(..), base, deriveSafeCopy, extension)
import Data.Text (Text, pack, unpack)
import Happstack.Authenticate.Core (AuthenticateURL(..))
import System.FilePath (joinPath, splitDirectories)
import Web.Routes (PathInfo(..), anySegment)
import Web.Routes.TH (derivePathInfo)
newtype NoEscape a = NoEscape a
deriving (Eq, Ord, Data, Typeable, Read, Show)
instance PathInfo (NoEscape String) where
toPathSegments (NoEscape s) = map pack $ splitDirectories s
fromPathSegments =
do ps <- many anySegment
return (NoEscape (joinPath $ map unpack ps))
data ClckURL
= ThemeData String
| ThemeDataNoEscape (NoEscape FilePath)
| PluginData Text FilePath
| Admin AdminURL
| Profile ProfileDataURL
| JS JSURL
deriving (Eq, Ord, Data, Typeable, Read, Show)
$(derivePathInfo ''ClckURL)
| null | https://raw.githubusercontent.com/clckwrks/clckwrks/dd4ea1e2f41066aa5779f1cc22f3b7a0ca8a0bed/Clckwrks/URL.hs | haskell | # LANGUAGE DeriveDataTypeable , FlexibleInstances , TemplateHaskell , TypeFamilies #
module Clckwrks.URL
( ClckURL(..)
, AdminURL(..)
, AuthenticateURL(..)
, NoEscape(..)
) where
import Clckwrks.Admin.URL (AdminURL(..))
import Clckwrks.JS.URL (JSURL)
import Clckwrks.ProfileData.URL (ProfileDataURL(..))
import Control.Applicative ((<$>), many)
import Data.Data (Data, Typeable)
import Data.SafeCopy (Migrate(..), SafeCopy(..), base, deriveSafeCopy, extension)
import Data.Text (Text, pack, unpack)
import Happstack.Authenticate.Core (AuthenticateURL(..))
import System.FilePath (joinPath, splitDirectories)
import Web.Routes (PathInfo(..), anySegment)
import Web.Routes.TH (derivePathInfo)
newtype NoEscape a = NoEscape a
deriving (Eq, Ord, Data, Typeable, Read, Show)
instance PathInfo (NoEscape String) where
toPathSegments (NoEscape s) = map pack $ splitDirectories s
fromPathSegments =
do ps <- many anySegment
return (NoEscape (joinPath $ map unpack ps))
data ClckURL
= ThemeData String
| ThemeDataNoEscape (NoEscape FilePath)
| PluginData Text FilePath
| Admin AdminURL
| Profile ProfileDataURL
| JS JSURL
deriving (Eq, Ord, Data, Typeable, Read, Show)
$(derivePathInfo ''ClckURL)
|
|
88290fe4fe36a0a8d0a8dc4ef593ce9c81484a11996a48d38d7783ebebc2f37e | SahilKang/cl-avro | duration.lisp | Copyright 2021 Google LLC
;;;
;;; This file is part of cl-avro.
;;;
;;; cl-avro is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; cl-avro is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with cl-avro. If not, see </>.
(in-package #:cl-user)
(defpackage #:cl-avro.internal.duration
(:use #:cl)
(:local-nicknames
(#:api #:cl-avro)
(#:internal #:cl-avro.internal)
(#:mop #:cl-avro.internal.mop)
(#:little-endian #:cl-avro.internal.little-endian))
(:import-from #:cl-avro.internal.type
#:uint8
#:uint32
#:ufixnum
#:vector<uint8>)
(:import-from #:cl-avro.internal.compare
#:compare-byte-vectors
#:compare-byte-streams)
(:import-from #:cl-avro.internal.recursive-descent.pattern
#:define-pattern-method))
(in-package #:cl-avro.internal.duration)
;;; duration
(defclass api:duration (api:logical-schema)
((underlying
:initarg :underlying
:reader internal:underlying
:late-type api:fixed))
(:metaclass mop:schema-class)
(:scalars :underlying)
(:object-class api:duration-object)
(:default-initargs
:underlying (error "Must supply UNDERLYING"))
(:documentation
"Metaclass of avro duration schemas."))
(defmethod closer-mop:validate-superclass
((class api:duration) (superclass api:logical-schema))
t)
(defmethod mop:early->late
((class api:duration) (name (eql 'underlying)) type value)
(let* ((value (call-next-method))
(size (api:size value)))
(assert (= size 12) () "Size of fixed schema must be 12, not ~S" size)
value))
;;; duration-object
(defclass api:duration-object (time-interval:time-interval api:logical-object)
((time-interval::months
:type uint32
:documentation "Number of months.")
(time-interval::days
:type uint32
:documentation "Number of days.")
(milliseconds
:type uint32
:accessor milliseconds
:documentation "Number of milliseconds."))
(:documentation
"Base class for instances of an avro duration schema."))
(declaim (boolean *normalize-p*))
(defparameter *normalize-p* t)
(declaim (ftype (function (api:duration-object) (values &optional)) normalize))
(defun normalize (duration-object)
(let ((*normalize-p* nil))
(with-accessors
((years time-interval::interval-years)
(months time-interval::interval-months)
(weeks time-interval::interval-weeks)
(days time-interval::interval-days)
(hours time-interval::interval-hours)
(minutes time-interval::interval-minutes)
(seconds time-interval::interval-seconds)
(milliseconds milliseconds)
(nanoseconds time-interval::interval-nanoseconds))
duration-object
(declare (uint32 months days milliseconds)
(integer years weeks hours minutes seconds nanoseconds))
(incf months (* years 12))
(setf years 0)
(incf days (* weeks 7))
(setf weeks 0)
(incf minutes (* 60 hours))
(incf seconds (* 60 minutes))
(setf hours 0
minutes 0)
(setf milliseconds (+ (* 1000 seconds)
(truncate nanoseconds (* 1000 1000))))))
(values))
(defmethod (setf closer-mop:slot-value-using-class)
(new-value (class api:duration) (object api:duration-object) slot)
(prog1 (call-next-method)
(when *normalize-p*
(normalize object))))
(defmethod initialize-instance :around
((instance api:duration-object) &key)
(let ((*normalize-p* nil))
(call-next-method)))
(defmethod initialize-instance :after
((instance api:duration-object) &key (milliseconds 0))
(multiple-value-bind (seconds remainder)
(truncate milliseconds 1000)
(incf (time-interval::interval-seconds instance) seconds)
(incf (time-interval::interval-nanoseconds instance)
(* remainder 1000 1000)))
(normalize instance))
(defmethod api:months
((object api:duration-object))
(time-interval::interval-months object))
(defmethod api:days
((object api:duration-object))
(time-interval::interval-days object))
(defmethod api:milliseconds
((object api:duration-object))
(milliseconds object))
;;; serialized-size
(defmethod internal:fixed-size
((schema api:duration))
(declare (ignore schema))
12)
(defmethod api:serialized-size
((object api:duration-object))
(declare (ignore object))
12)
;;; serialize
(defmethod internal:serialize
((object api:duration-object) (into vector) &key (start 0))
(declare (vector<uint8> into)
(ufixnum start))
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
object
(little-endian:uint32->vector months into start)
(little-endian:uint32->vector days into (+ start 4))
(little-endian:uint32->vector milliseconds into (+ start 8)))
12)
(defmethod internal:serialize
((object api:duration-object) (into stream) &key)
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
object
(little-endian:uint32->stream months into)
(little-endian:uint32->stream days into)
(little-endian:uint32->stream milliseconds into))
12)
(defmethod api:serialize
((object api:duration-object)
&rest initargs
&key
((:single-object-encoding-p sp))
(into (make-array (if sp 22 12) :element-type 'uint8))
(start 0))
(declare (ignore start))
(values into (apply #'internal:serialize object into initargs)))
;;; deserialize
(defmethod api:deserialize
((schema api:duration) (input vector) &key (start 0))
(declare (vector<uint8> input)
(ufixnum start))
(multiple-value-bind (months days milliseconds)
(values (little-endian:vector->uint32 input start)
(little-endian:vector->uint32 input (+ start 4))
(little-endian:vector->uint32 input (+ start 8)))
(values (make-instance
schema :months months :days days :milliseconds milliseconds)
12)))
(defmethod api:deserialize
((schema api:duration) (input stream) &key)
(multiple-value-bind (months days milliseconds)
(values (little-endian:stream->uint32 input)
(little-endian:stream->uint32 input)
(little-endian:stream->uint32 input))
(values (make-instance
schema :months months :days days :milliseconds milliseconds)
12)))
;;; compare
(defmethod internal:skip
((schema api:duration) (input vector) &optional start)
(declare (ignore schema input start))
12)
(defmethod internal:skip
((schema api:duration) (input stream) &optional start)
(declare (ignore schema start))
(loop repeat 12 do (read-byte input))
12)
(defmethod api:compare
((schema api:duration) (left vector) (right vector)
&key (left-start 0) (right-start 0))
(declare (vector<uint8> left right)
(ufixnum left-start right-start))
(let ((left-end (+ left-start 12))
(right-end (+ right-start 12)))
(compare-byte-vectors
left right left-start right-start left-end right-end)))
(defmethod api:compare
((schema api:duration) (left stream) (right stream) &key)
(compare-byte-streams left right 12 12))
;;; coerce
(defmethod api:coerce
((object api:duration-object) (schema api:duration))
(change-class object schema))
;;; field default
(defmethod internal:serialize-field-default
((default api:duration-object))
(let ((buffer (make-array 12 :element-type 'uint8)))
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
default
(little-endian:uint32->vector months buffer 0)
(little-endian:uint32->vector days buffer 4)
(little-endian:uint32->vector milliseconds buffer 8))
(babel:octets-to-string buffer :encoding :latin-1)))
(defmethod internal:deserialize-field-default
((schema api:duration) (default string))
(multiple-value-bind (months days milliseconds)
(let ((buffer (babel:string-to-octets default :encoding :latin-1)))
assuming valid input like this size 12 is unsafe
(declare ((simple-array uint8 (12)) buffer))
(values (little-endian:vector->uint32 buffer 0)
(little-endian:vector->uint32 buffer 4)
(little-endian:vector->uint32 buffer 8)))
(make-instance schema :months months :days days :milliseconds milliseconds)))
;;; jso
(define-pattern-method 'internal:read-jso
'(lambda ((jso ("type" nil
"logicalType" "duration"))
fullname->schema
enclosing-namespace)
(let ((underlying (internal:read-jso (st-json:getjso "type" jso)
fullname->schema
enclosing-namespace)))
(handler-case
(make-instance 'api:duration :underlying underlying)
(error ()
underlying)))))
(defmethod internal:logical-name
((schema api:duration))
(declare (ignore schema))
"duration")
| null | https://raw.githubusercontent.com/SahilKang/cl-avro/70fcaa32514cfb59b75812b2eab45ed24cce9d27/src/logical/duration.lisp | lisp |
This file is part of cl-avro.
cl-avro is free software: you can redistribute it and/or modify
(at your option) any later version.
cl-avro is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with cl-avro. If not, see </>.
duration
duration-object
serialized-size
serialize
deserialize
compare
coerce
field default
jso | Copyright 2021 Google LLC
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package #:cl-user)
(defpackage #:cl-avro.internal.duration
(:use #:cl)
(:local-nicknames
(#:api #:cl-avro)
(#:internal #:cl-avro.internal)
(#:mop #:cl-avro.internal.mop)
(#:little-endian #:cl-avro.internal.little-endian))
(:import-from #:cl-avro.internal.type
#:uint8
#:uint32
#:ufixnum
#:vector<uint8>)
(:import-from #:cl-avro.internal.compare
#:compare-byte-vectors
#:compare-byte-streams)
(:import-from #:cl-avro.internal.recursive-descent.pattern
#:define-pattern-method))
(in-package #:cl-avro.internal.duration)
(defclass api:duration (api:logical-schema)
((underlying
:initarg :underlying
:reader internal:underlying
:late-type api:fixed))
(:metaclass mop:schema-class)
(:scalars :underlying)
(:object-class api:duration-object)
(:default-initargs
:underlying (error "Must supply UNDERLYING"))
(:documentation
"Metaclass of avro duration schemas."))
(defmethod closer-mop:validate-superclass
((class api:duration) (superclass api:logical-schema))
t)
(defmethod mop:early->late
((class api:duration) (name (eql 'underlying)) type value)
(let* ((value (call-next-method))
(size (api:size value)))
(assert (= size 12) () "Size of fixed schema must be 12, not ~S" size)
value))
(defclass api:duration-object (time-interval:time-interval api:logical-object)
((time-interval::months
:type uint32
:documentation "Number of months.")
(time-interval::days
:type uint32
:documentation "Number of days.")
(milliseconds
:type uint32
:accessor milliseconds
:documentation "Number of milliseconds."))
(:documentation
"Base class for instances of an avro duration schema."))
(declaim (boolean *normalize-p*))
(defparameter *normalize-p* t)
(declaim (ftype (function (api:duration-object) (values &optional)) normalize))
(defun normalize (duration-object)
(let ((*normalize-p* nil))
(with-accessors
((years time-interval::interval-years)
(months time-interval::interval-months)
(weeks time-interval::interval-weeks)
(days time-interval::interval-days)
(hours time-interval::interval-hours)
(minutes time-interval::interval-minutes)
(seconds time-interval::interval-seconds)
(milliseconds milliseconds)
(nanoseconds time-interval::interval-nanoseconds))
duration-object
(declare (uint32 months days milliseconds)
(integer years weeks hours minutes seconds nanoseconds))
(incf months (* years 12))
(setf years 0)
(incf days (* weeks 7))
(setf weeks 0)
(incf minutes (* 60 hours))
(incf seconds (* 60 minutes))
(setf hours 0
minutes 0)
(setf milliseconds (+ (* 1000 seconds)
(truncate nanoseconds (* 1000 1000))))))
(values))
(defmethod (setf closer-mop:slot-value-using-class)
(new-value (class api:duration) (object api:duration-object) slot)
(prog1 (call-next-method)
(when *normalize-p*
(normalize object))))
(defmethod initialize-instance :around
((instance api:duration-object) &key)
(let ((*normalize-p* nil))
(call-next-method)))
(defmethod initialize-instance :after
((instance api:duration-object) &key (milliseconds 0))
(multiple-value-bind (seconds remainder)
(truncate milliseconds 1000)
(incf (time-interval::interval-seconds instance) seconds)
(incf (time-interval::interval-nanoseconds instance)
(* remainder 1000 1000)))
(normalize instance))
(defmethod api:months
((object api:duration-object))
(time-interval::interval-months object))
(defmethod api:days
((object api:duration-object))
(time-interval::interval-days object))
(defmethod api:milliseconds
((object api:duration-object))
(milliseconds object))
(defmethod internal:fixed-size
((schema api:duration))
(declare (ignore schema))
12)
(defmethod api:serialized-size
((object api:duration-object))
(declare (ignore object))
12)
(defmethod internal:serialize
((object api:duration-object) (into vector) &key (start 0))
(declare (vector<uint8> into)
(ufixnum start))
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
object
(little-endian:uint32->vector months into start)
(little-endian:uint32->vector days into (+ start 4))
(little-endian:uint32->vector milliseconds into (+ start 8)))
12)
(defmethod internal:serialize
((object api:duration-object) (into stream) &key)
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
object
(little-endian:uint32->stream months into)
(little-endian:uint32->stream days into)
(little-endian:uint32->stream milliseconds into))
12)
(defmethod api:serialize
((object api:duration-object)
&rest initargs
&key
((:single-object-encoding-p sp))
(into (make-array (if sp 22 12) :element-type 'uint8))
(start 0))
(declare (ignore start))
(values into (apply #'internal:serialize object into initargs)))
(defmethod api:deserialize
((schema api:duration) (input vector) &key (start 0))
(declare (vector<uint8> input)
(ufixnum start))
(multiple-value-bind (months days milliseconds)
(values (little-endian:vector->uint32 input start)
(little-endian:vector->uint32 input (+ start 4))
(little-endian:vector->uint32 input (+ start 8)))
(values (make-instance
schema :months months :days days :milliseconds milliseconds)
12)))
(defmethod api:deserialize
((schema api:duration) (input stream) &key)
(multiple-value-bind (months days milliseconds)
(values (little-endian:stream->uint32 input)
(little-endian:stream->uint32 input)
(little-endian:stream->uint32 input))
(values (make-instance
schema :months months :days days :milliseconds milliseconds)
12)))
(defmethod internal:skip
((schema api:duration) (input vector) &optional start)
(declare (ignore schema input start))
12)
(defmethod internal:skip
((schema api:duration) (input stream) &optional start)
(declare (ignore schema start))
(loop repeat 12 do (read-byte input))
12)
(defmethod api:compare
((schema api:duration) (left vector) (right vector)
&key (left-start 0) (right-start 0))
(declare (vector<uint8> left right)
(ufixnum left-start right-start))
(let ((left-end (+ left-start 12))
(right-end (+ right-start 12)))
(compare-byte-vectors
left right left-start right-start left-end right-end)))
(defmethod api:compare
((schema api:duration) (left stream) (right stream) &key)
(compare-byte-streams left right 12 12))
(defmethod api:coerce
((object api:duration-object) (schema api:duration))
(change-class object schema))
(defmethod internal:serialize-field-default
((default api:duration-object))
(let ((buffer (make-array 12 :element-type 'uint8)))
(with-accessors
((months time-interval::interval-months)
(days time-interval::interval-days)
(milliseconds milliseconds))
default
(little-endian:uint32->vector months buffer 0)
(little-endian:uint32->vector days buffer 4)
(little-endian:uint32->vector milliseconds buffer 8))
(babel:octets-to-string buffer :encoding :latin-1)))
(defmethod internal:deserialize-field-default
((schema api:duration) (default string))
(multiple-value-bind (months days milliseconds)
(let ((buffer (babel:string-to-octets default :encoding :latin-1)))
assuming valid input like this size 12 is unsafe
(declare ((simple-array uint8 (12)) buffer))
(values (little-endian:vector->uint32 buffer 0)
(little-endian:vector->uint32 buffer 4)
(little-endian:vector->uint32 buffer 8)))
(make-instance schema :months months :days days :milliseconds milliseconds)))
(define-pattern-method 'internal:read-jso
'(lambda ((jso ("type" nil
"logicalType" "duration"))
fullname->schema
enclosing-namespace)
(let ((underlying (internal:read-jso (st-json:getjso "type" jso)
fullname->schema
enclosing-namespace)))
(handler-case
(make-instance 'api:duration :underlying underlying)
(error ()
underlying)))))
(defmethod internal:logical-name
((schema api:duration))
(declare (ignore schema))
"duration")
|
14548dded6c9ebc8e92c0db3726776be6e7a57eadd6f3a168e2108394b510d08 | Helium4Haskell/helium | FunctionInMultipleClasses.hs | class ClassA a where
duplicate :: a -> a
class ClassB b where
duplicate :: b -> b
| null | https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/typeClassesStatic/FunctionInMultipleClasses.hs | haskell | class ClassA a where
duplicate :: a -> a
class ClassB b where
duplicate :: b -> b
|
|
87d401994f9b9898daaf6597c12d1a445a68755cf647f6b475f7630030a43eea | nd/sicp | 2.28.scm | (define (fringe tree)
(cond ((null? tree) (list))
((not (pair? tree)) (list tree))
(else (append (fringe (car tree))
(fringe (cdr tree)))))) | null | https://raw.githubusercontent.com/nd/sicp/d8587a0403d95af7c7bcf59b812f98c4f8550afd/ch02/2.28.scm | scheme | (define (fringe tree)
(cond ((null? tree) (list))
((not (pair? tree)) (list tree))
(else (append (fringe (car tree))
(fringe (cdr tree)))))) |
|
a0106d32e165560fb789dea362d58a68744a9b1751ba4a76a80764405e08aad1 | mirage/alcotest | json_output.ml | let () =
let open Alcotest in
let id () = () in
run ~argv:[| ""; "--json" |] __FILE__
[
( "test-a",
[
test_case "First test case" `Quick id;
test_case "Second test case" `Quick id;
] );
("test-b", [ test_case "Third test case" `Quick id ]);
]
| null | https://raw.githubusercontent.com/mirage/alcotest/bb3492901dea03c72b4de6b5660852a020283921/test/e2e/alcotest/passing/json_output.ml | ocaml | let () =
let open Alcotest in
let id () = () in
run ~argv:[| ""; "--json" |] __FILE__
[
( "test-a",
[
test_case "First test case" `Quick id;
test_case "Second test case" `Quick id;
] );
("test-b", [ test_case "Third test case" `Quick id ]);
]
|
|
b4806e000fd3a8200ade6502e74088b0b7a704400329952f9558ee5da95ad379 | alanz/ghc-exactprint | CorePragma.hs | {-# INLINE strictStream #-}
strictStream (Bitstream l v)
# CORE " Strict Bitstream stream " #
S.concatMap stream (GV.stream v)
`S.sized`
Exact l
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc710/CorePragma.hs | haskell | # INLINE strictStream # | strictStream (Bitstream l v)
# CORE " Strict Bitstream stream " #
S.concatMap stream (GV.stream v)
`S.sized`
Exact l
|
0992a9240ff623a3319b85e23167ea80ee45c29b98d79a070ee6d88201c3a045 | helium/packet-purchaser | pp_cli_registry.erl | -module(pp_cli_registry).
-define(CLI_MODULES, [
pp_cli_info,
pp_cli_config
]).
-export([register_cli/0]).
register_cli() ->
clique:register(?CLI_MODULES).
| null | https://raw.githubusercontent.com/helium/packet-purchaser/c1bffd81ed9dfa462a8c8408ba8f2eaa9215aa21/src/cli/pp_cli_registry.erl | erlang | -module(pp_cli_registry).
-define(CLI_MODULES, [
pp_cli_info,
pp_cli_config
]).
-export([register_cli/0]).
register_cli() ->
clique:register(?CLI_MODULES).
|
|
a15d02d24280e56ac10209ed99bd5fc9812e77048b21203a445f781b731d7df5 | lambe-lang/nethra | binding.mli | type t =
| Signature of string * Term.t Localized.t
| Definition of string * Term.t Localized.t option * Term.t Localized.t
| null | https://raw.githubusercontent.com/lambe-lang/nethra/2ef1dbb6ca0ce14a70f8e81fbc13e54ef12f34a1/lib/nethra/toy/cst/binding.mli | ocaml | type t =
| Signature of string * Term.t Localized.t
| Definition of string * Term.t Localized.t option * Term.t Localized.t
|
|
1b4e652764706831bdae0f703c5e41899c18f84ef3d51276cdf7c19c96bdd254 | mrphlip/aoc | Modulo.hs | # OPTIONS_GHC -Wno - tabs #
module Modulo (Modulo, modulo, modulus) where
import Data.List
import Utils (extendedGcd)
data Modulo x = !x `Mod` !x deriving Eq
infix 5 `modulo`
n `modulo` m = (n `mod` m) `Mod` m
modulusMismatch (_ `Mod` m) (_ `Mod` m') = m /= m'
modulusMismatch3 (_ `Mod` m) (_ `Mod` m') (_ `Mod` m'') = m /= m' || m' /= m''
modulusMismatchList [] = False
modulusMismatchList (x:xs) = any (modulusMismatch x) xs
instance (Integral x, Show x) => Show (Modulo x) where
showsPrec p (n `Mod` m) s =
(showParen (p >= 9) $
showsPrec 11 n .
showString " `modulo` " .
showsPrec 11 m) s
instance (Integral x) => Enum (Modulo x) where
fromEnum (n `Mod` m) = fromEnum $ toInteger (m*(m-1)`div`2 + n)
toEnum i = let (m,offs) = last $ takeWhile ((<=i).snd) triangles in fromIntegral (i - offs) `Mod` fromIntegral m
where triangles = map(\x->(x,x*(x-1)`div`2)) [1..]
succ (n `Mod` m) = (n+1) `modulo` m
pred (n `Mod` m) = (n-1) `modulo` m
enumFrom (n `Mod` m) = genericDrop n $ cycle $ map (`Mod` m) $ [0..(m-1)]
enumFromTo x y = enumFromThenTo x (succ x) y
-- Note... this will give an infinite list if it never reaches the final value
eg [ 0 ` modulo ` 10 , 2 ` modulo ` 10 .. 9 ` modulo ` 10 ]
I think this is the Right Thing for it to do ... I think we want it to loop properly
if given eg [ 0 ` modulo ` 7 , 2 ` modulo ` 7 .. 5 ` modulo ` 7 ]
which will give [ 0,2,4,6,1,3,5 ]
enumFromThenTo x y z
| modulusMismatch3 x y z = error "enumFromThenTo: mismatched moduli"
| x == z = [x]
| otherwise = let (xn `Mod` m) = x; (yn `Mod` _) = y in x:enumFromThenTo y ((yn * 2 - xn) `modulo` m) z
instance (Integral x) => Num (Modulo x) where
(n1 `Mod` m) + (n2 `Mod` m')
| m /= m' = error "(+): mismatched moduli"
| otherwise = (n1 + n2) `modulo` m
(n1 `Mod` m) - (n2 `Mod` m')
| m /= m' = error "(-): mismatched moduli"
| otherwise = (n1 - n2) `modulo` m
(n1 `Mod` m) * (n2 `Mod` m')
| m /= m' = error "(*): mismatched moduli"
| otherwise = (n1 * n2) `modulo` m
negate (n `Mod` m) = (-n) `modulo` m
abs = id
signum (n `Mod` _) = if n == 0 then 0 else 1
fromInteger = error "fromInteger: cannot guess modulus"
They 're not really ordered , but being is still useful
-- for putting these in sets, etc
instance (Integral x) => Ord (Modulo x) where
compare (a `Mod` _) (b `Mod` _) = compare a b
instance (Integral x) => Real (Modulo x) where
toRational (n `Mod` _) = toRational n
-- It's not really fractional, I know, but it fits the definition to an extent
instance (Integral x) => Fractional (Modulo x) where
fromRational = error "fromRational: cannot guess modulus"
recip (n `Mod` m)
| m == 0 = error "divide by zero"
| c /= 1 = error "recip: value not coprime to modulus"
| otherwise = a `modulo` m
where (a, b, c) = extendedGcd n m
instance (Integral x) => Integral (Modulo x) where
toInteger (n `Mod` _) = toInteger n
a `divMod` b = (a/b, 0)
a `div` b = a/b
a `mod` b = 0
quotRem = divMod
quot = div
rem = mod
-- To extract the number part, use toInteger
modulus (_ `Mod` m) = m
| null | https://raw.githubusercontent.com/mrphlip/aoc/06395681eb6b50b838cd4561b2e0aa772aca570a/Modulo.hs | haskell | Note... this will give an infinite list if it never reaches the final value
for putting these in sets, etc
It's not really fractional, I know, but it fits the definition to an extent
To extract the number part, use toInteger | # OPTIONS_GHC -Wno - tabs #
module Modulo (Modulo, modulo, modulus) where
import Data.List
import Utils (extendedGcd)
data Modulo x = !x `Mod` !x deriving Eq
infix 5 `modulo`
n `modulo` m = (n `mod` m) `Mod` m
modulusMismatch (_ `Mod` m) (_ `Mod` m') = m /= m'
modulusMismatch3 (_ `Mod` m) (_ `Mod` m') (_ `Mod` m'') = m /= m' || m' /= m''
modulusMismatchList [] = False
modulusMismatchList (x:xs) = any (modulusMismatch x) xs
instance (Integral x, Show x) => Show (Modulo x) where
showsPrec p (n `Mod` m) s =
(showParen (p >= 9) $
showsPrec 11 n .
showString " `modulo` " .
showsPrec 11 m) s
instance (Integral x) => Enum (Modulo x) where
fromEnum (n `Mod` m) = fromEnum $ toInteger (m*(m-1)`div`2 + n)
toEnum i = let (m,offs) = last $ takeWhile ((<=i).snd) triangles in fromIntegral (i - offs) `Mod` fromIntegral m
where triangles = map(\x->(x,x*(x-1)`div`2)) [1..]
succ (n `Mod` m) = (n+1) `modulo` m
pred (n `Mod` m) = (n-1) `modulo` m
enumFrom (n `Mod` m) = genericDrop n $ cycle $ map (`Mod` m) $ [0..(m-1)]
enumFromTo x y = enumFromThenTo x (succ x) y
eg [ 0 ` modulo ` 10 , 2 ` modulo ` 10 .. 9 ` modulo ` 10 ]
I think this is the Right Thing for it to do ... I think we want it to loop properly
if given eg [ 0 ` modulo ` 7 , 2 ` modulo ` 7 .. 5 ` modulo ` 7 ]
which will give [ 0,2,4,6,1,3,5 ]
enumFromThenTo x y z
| modulusMismatch3 x y z = error "enumFromThenTo: mismatched moduli"
| x == z = [x]
| otherwise = let (xn `Mod` m) = x; (yn `Mod` _) = y in x:enumFromThenTo y ((yn * 2 - xn) `modulo` m) z
instance (Integral x) => Num (Modulo x) where
(n1 `Mod` m) + (n2 `Mod` m')
| m /= m' = error "(+): mismatched moduli"
| otherwise = (n1 + n2) `modulo` m
(n1 `Mod` m) - (n2 `Mod` m')
| m /= m' = error "(-): mismatched moduli"
| otherwise = (n1 - n2) `modulo` m
(n1 `Mod` m) * (n2 `Mod` m')
| m /= m' = error "(*): mismatched moduli"
| otherwise = (n1 * n2) `modulo` m
negate (n `Mod` m) = (-n) `modulo` m
abs = id
signum (n `Mod` _) = if n == 0 then 0 else 1
fromInteger = error "fromInteger: cannot guess modulus"
They 're not really ordered , but being is still useful
instance (Integral x) => Ord (Modulo x) where
compare (a `Mod` _) (b `Mod` _) = compare a b
instance (Integral x) => Real (Modulo x) where
toRational (n `Mod` _) = toRational n
instance (Integral x) => Fractional (Modulo x) where
fromRational = error "fromRational: cannot guess modulus"
recip (n `Mod` m)
| m == 0 = error "divide by zero"
| c /= 1 = error "recip: value not coprime to modulus"
| otherwise = a `modulo` m
where (a, b, c) = extendedGcd n m
instance (Integral x) => Integral (Modulo x) where
toInteger (n `Mod` _) = toInteger n
a `divMod` b = (a/b, 0)
a `div` b = a/b
a `mod` b = 0
quotRem = divMod
quot = div
rem = mod
modulus (_ `Mod` m) = m
|
0d64e3b258c3b603d8bfe74c7df9e49f6f8f01da1262a4b59f119a301ef7a8be | rudolph-miller/cl-gists | file.lisp | (in-package :cl-user)
(defpackage cl-gists.file
(:use :cl
:annot.doc
:cl-gists.util)
(:import-from :alexandria
:remove-from-plist)
(:export :cl-gists.file
:file
:file-name
:file-size
:file-raw-url
:file-type
:file-truncated
:file-language
:file-content
:file-old-name
:make-file
:make-files))
(in-package :cl-gists.file)
(syntax:use-syntax :annot)
@doc
"Structure of File."
(defstruct (file (:constructor make-file (&key name size raw-url type truncated language content old-name
&aux (old-name (or old-name name)))))
(name nil :type (or null string))
(size nil :type (or null integer))
(raw-url nil :type (or null string))
(type nil :type (or null string))
(truncated nil :type boolean)
(language nil :type (or null string))
(content nil :type (or null string))
(old-name nil :type (or null string) :read-only t))
(defun make-files (list)
(mapcar #'(lambda (plist) (apply #'make-file plist)) list))
| null | https://raw.githubusercontent.com/rudolph-miller/cl-gists/bcf3687f0af8b2eb5acaeda5db94d67446e56daf/src/file.lisp | lisp | (in-package :cl-user)
(defpackage cl-gists.file
(:use :cl
:annot.doc
:cl-gists.util)
(:import-from :alexandria
:remove-from-plist)
(:export :cl-gists.file
:file
:file-name
:file-size
:file-raw-url
:file-type
:file-truncated
:file-language
:file-content
:file-old-name
:make-file
:make-files))
(in-package :cl-gists.file)
(syntax:use-syntax :annot)
@doc
"Structure of File."
(defstruct (file (:constructor make-file (&key name size raw-url type truncated language content old-name
&aux (old-name (or old-name name)))))
(name nil :type (or null string))
(size nil :type (or null integer))
(raw-url nil :type (or null string))
(type nil :type (or null string))
(truncated nil :type boolean)
(language nil :type (or null string))
(content nil :type (or null string))
(old-name nil :type (or null string) :read-only t))
(defun make-files (list)
(mapcar #'(lambda (plist) (apply #'make-file plist)) list))
|
|
3a7c0e0a65999599dea1623396452295d113e8dc2024834f8ff91fe90aa7ce81 | wdebeaum/step | test2.lisp |
(in-package :lxm)
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
Note : : handling it this way overgenerates e.g. , " the two ones in the corner " parses .
(W::ONE
(SENSES
((meta-data :origin trips :entry-date 20060803 :change-date nil :comments nil :wn ("one%1:09:00"))
(syntax (W::one +)) ;; such a strange word, we give it its own feature
(LF-PARENT ont::referential-sem)
(example "the other one")
(preference .96) ;; prefer number sense
)
)
)
(W::OTHER
(SENSES
((meta-data :origin monroe :entry-date 20031219 :change-date nil :comments s14)
(LF-PARENT ONT::referential-sem)
(preference .92) ;; prefer adjectival sense
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/converter/test2.lisp | lisp | such a strange word, we give it its own feature
prefer number sense
prefer adjectival sense |
(in-package :lxm)
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
Note : : handling it this way overgenerates e.g. , " the two ones in the corner " parses .
(W::ONE
(SENSES
((meta-data :origin trips :entry-date 20060803 :change-date nil :comments nil :wn ("one%1:09:00"))
(LF-PARENT ont::referential-sem)
(example "the other one")
)
)
)
(W::OTHER
(SENSES
((meta-data :origin monroe :entry-date 20031219 :change-date nil :comments s14)
(LF-PARENT ONT::referential-sem)
)
)
)
))
|
c5dd59e83dc7c7032ca95539304486f4833fbc29f2cbad802d650f66a61ec94d | ghcjs/ghcjs | data-fixed-show-read.hs |
module Main (main) where
import Data.Fixed
main :: IO ()
main = do doit 38.001
doit 38.009
doit 38.01
doit 38.09
print (read "38" :: Centi)
doit (-38.001)
doit (-38.009)
doit (-38.01)
doit (-38.09)
print (read "-38" :: Centi)
doit :: Centi -> IO ()
doit c = do let s = show c
r = read s :: Centi
putStrLn s
print r
| null | https://raw.githubusercontent.com/ghcjs/ghcjs/e4cd4232a31f6371c761acd93853702f4c7ca74c/test/pkg/base/data-fixed-show-read.hs | haskell |
module Main (main) where
import Data.Fixed
main :: IO ()
main = do doit 38.001
doit 38.009
doit 38.01
doit 38.09
print (read "38" :: Centi)
doit (-38.001)
doit (-38.009)
doit (-38.01)
doit (-38.09)
print (read "-38" :: Centi)
doit :: Centi -> IO ()
doit c = do let s = show c
r = read s :: Centi
putStrLn s
print r
|
|
03448d3d82c65c0c2f86120b7eaa9128734cd725fbabdbfbaf09102dc9544102 | noinia/hgeometry | WSPD.hs | --------------------------------------------------------------------------------
-- |
Module : Algorithms . Geometry . WSPD
Copyright : ( C )
-- License : see the LICENSE file
Maintainer :
--
-- Algorithm to construct a well separated pair decomposition (wspd).
--
--------------------------------------------------------------------------------
module Algorithms.Geometry.WSPD
( fairSplitTree
, wellSeparatedPairs
, NodeData(NodeData)
, WSP
, SplitTree
, nodeData
, Level(..)
, reIndexPoints
, distributePoints
, distributePoints'
) where
import Algorithms.Geometry.WSPD.Types
import Control.Lens hiding (Level, levels)
import Control.Monad.Reader
import Control.Monad.ST (ST,runST)
import Data.BinaryTree
import Data.Ext
import qualified Data.Foldable as F
import HGeometry.Number.Radical
import Geometry.Box.Internal
import Geometry.Point
-- import Geometry.Properties
-- import Geometry.Transformation
import Geometry.Vector
import qualified Geometry.Vector as GV
import qualified Data.IntMap.Strict as IntMap
import qualified Data.LSeq as LSeq
import Data.LSeq (LSeq, toSeq,pattern (:<|))
import qualified Data.List as L
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe
import Data.Ord (comparing)
import Data.Range
import qualified Data.Range as Range
import qualified Data.Sequence as S
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import GHC.TypeLits
-- import Debug.Trace
--------------------------------------------------------------------------------
-- | Construct a split tree
--
-- running time: \(O(n \log n)\)
fairSplitTree :: (Fractional r, Ord r, Arity d, 1 <= d
, Show r, Show p
)
=> NonEmpty.NonEmpty (Point d r :+ p) -> SplitTree d p r ()
fairSplitTree pts = foldUp node' Leaf $ fairSplitTree' n pts'
where
pts' = imap sortOn . pure . g $ pts
n = length $ pts'^.GV.element @0
sortOn' i = NonEmpty.sortWith (^.core.unsafeCoord i)
sortOn i = LSeq.fromNonEmpty . sortOn' (i + 1)
sorts the points on the first coordinate , and then associates each point
with an index , ; its rank in terms of this first coordinate .
g = NonEmpty.zipWith (\i (p :+ e) -> p :+ (i :+ e)) (NonEmpty.fromList [0..])
. sortOn' 1
-- node' :: b -> a -> b -> b
-- node' :: SplitTree d p r () -> Int -> SplitTree d p r () -> SplitTree d p r ()
node' l j r = Node l (NodeData j (bbOf l <> bbOf r) ()) r
-- | Given a split tree, generate the Well separated pairs
--
-- running time: \(O(s^d n)\)
wellSeparatedPairs :: (Radical r, Fractional r, Ord r, Arity d, Arity (d + 1))
=> r -> SplitTree d p r a -> [WSP d p r a]
wellSeparatedPairs s = f
where
f (Leaf _) = []
f (Node l _ r) = findPairs s l r ++ f l ++ f r
-- | Given a split tree , generate the well separated pairs such that one set is
-- -- a singleton.
-- running time : \(O(s^d n\log n)\ )
wellSeparatedPairSingletons : : ( Fractional r , r , AlwaysTrueWSPD d )
= > r - > SplitTree d p r a - > [ ( Point d r : + p , PointSet d p r ( Sized a ) ) ]
-- wellSeparatedPairSingletons s t = concatMap split $ wellSeparatedPairs s t'
-- where
-- split (l,r) = undefined
-- -- | measure l <= measure r = map (,r) $ F.toList l
-- | otherwise = map ( , l ) $ F.toList r
t ' = foldUpData ( \l nd r - > )
-- t
--------------------------------------------------------------------------------
-- * Building the split tree
-- | Given the points, sorted in every dimension, recursively build a split tree
--
-- The algorithm works in rounds. Each round takes \( O(n) \) time, and halves the
-- number of points. Thus, the total running time is \( O(n log n) \).
--
-- The algorithm essentially builds a path in the split tree; at every node on
the path that we construct , we split the point set into two sets ( L , R )
-- according to the longest side of the bounding box.
--
-- The smaller set is "assigned" to the current node and set asside. We
-- continue to build the path with the larger set until the total number of
items remaining is less than n/2 .
--
-- To start the next round, each node on the path needs to have the points
-- assigned to that node, sorted in each dimension (i.e. the Vector
-- (PointSeq))'s. Since we have the level assignment, we can compute these
-- lists by traversing each original input list (i.e. one for every dimension)
-- once, and partition the points based on their level assignment.
fairSplitTree' :: (Fractional r, Ord r, Arity d, 1 <= d
, Show r, Show p
)
=> Int -> GV.Vector d (PointSeq d (Idx :+ p) r)
-> BinLeafTree Int (Point d r :+ p)
fairSplitTree' n pts
| n <= 1 = let p = LSeq.head $ pts^.GV.element @0 in Leaf (dropIdx p)
| otherwise = foldr node' (V.last path) $ V.zip nodeLevels (V.init path)
where
-- note that points may also be assigned level 'Nothing'.
(levels, nodeLevels'@(maxLvl NonEmpty.:| _)) = runST $ do
lvls <- MV.replicate n Nothing
ls <- runReaderT (assignLevels (n `div` 2) 0 pts (Level 0 Nothing) []) lvls
lvls' <- V.unsafeFreeze lvls
pure (lvls',ls)
-- TODO: We also need to report the levels in the order in which they are
-- assigned to nodes
nodeLevels = V.fromList . L.reverse . NonEmpty.toList $ nodeLevels'
levels = traceShow ( " Levels",levels',maxLvl ) levels '
-- path = traceShow ("path", path',nodeLevels) path'
distrPts = distributePoints (1 + maxLvl^.unLevel) levels pts
path = recurse <$> distrPts -- (traceShow ("distributed pts",distrPts) distrPts)
-- node' (lvl,lc) rc | traceShow ("node' ",lvl,lc,rc) False = undefined
node' (lvl,lc) rc = case lvl^?widestDim._Just of
Nothing -> error "Unknown widest dimension"
Just j -> Node lc j rc
recurse pts' = fairSplitTree' (length $ pts'^.GV.element @0)
(reIndexPoints pts')
-- | Assign the points to their the correct class. The 'Nothing' class is
-- considered the last class
distributePoints :: (Arity d , Show r, Show p)
=> Int -> V.Vector (Maybe Level)
-> GV.Vector d (PointSeq d (Idx :+ p) r)
-> V.Vector (GV.Vector d (PointSeq d (Idx :+ p) r))
distributePoints k levels = transpose . fmap (distributePoints' k levels)
transpose :: Arity d => GV.Vector d (V.Vector a) -> V.Vector (GV.Vector d a)
transpose = V.fromList . map GV.vectorFromListUnsafe . L.transpose
. map V.toList . F.toList
-- | Assign the points to their the correct class. The 'Nothing' class is
-- considered the last class
distributePoints' :: Int -- ^ number of classes
-> V.Vector (Maybe Level) -- ^ level assignment
-> PointSeq d (Idx :+ p) r -- ^ input points
-> V.Vector (PointSeq d (Idx :+ p) r)
distributePoints' k levels pts
= fmap fromSeqUnsafe $ V.create $ do
v <- MV.replicate k mempty
forM_ pts $ \p ->
append v (level p) p
pure v
where
level p = maybe (k-1) _unLevel $ levels V.! (p^.extra.core)
append v i p = MV.read v i >>= MV.write v i . (S.|> p)
fromSeqUnsafe :: S.Seq a -> LSeq n a
fromSeqUnsafe = LSeq.promise . LSeq.fromSeq
| Given a sequence of points , whose index is increasing in the first
dimension , i.e. if idx p < idx q , then p[0 ] < q[0 ] .
-- Reindex the points so that they again have an index
-- in the range [0,..,n'], where n' is the new number of points.
--
running time : O(n ' * d ) ( more or less ; we are actually using an intmap for
-- the lookups)
--
-- alternatively: I can unsafe freeze and thaw an existing vector to pass it
-- along to use as mapping. Except then I would have to force the evaluation
order , i.e. we can not be in ' reIndexPoints ' for two of the nodes at the same
-- time.
--
-- so, basically, run reIndex points in ST as well.
reIndexPoints :: (Arity d, 1 <= d)
=> GV.Vector d (PointSeq d (Idx :+ p) r)
-> GV.Vector d (PointSeq d (Idx :+ p) r)
reIndexPoints ptsV = fmap reIndex ptsV
where
pts = ptsV^.GV.element @0
reIndex = fmap (\p -> p&extra.core %~ fromJust . flip IntMap.lookup mapping')
mapping' = IntMap.fromAscList $ zip (map (^.extra.core) . F.toList $ pts) [0..]
-- | ST monad with access to the vector storign the level of the points.
type RST s = ReaderT (MV.MVector s (Maybe Level)) (ST s)
HLINT ignore assignLevels
| Assigns the points to a level . Returns the list of levels used . The first
-- level in the list is the level assigned to the rest of the nodes. Their
-- level is actually still set to Nothing in the underlying array.
assignLevels :: (Fractional r, Ord r, Arity d
, Show r, Show p
)
=> Int -- ^ Number of items we need to collect
-> Int -- ^ Number of items we collected so far
-> GV.Vector d (PointSeq d (Idx :+ p) r)
-> Level -- ^ next level to use
-> [Level] -- ^ Levels used so far
-> RST s (NonEmpty.NonEmpty Level)
assignLevels h m pts l prevLvls
| m >= h = pure (l NonEmpty.:| prevLvls)
| otherwise = do
pts' <- compactEnds pts
-- find the widest dimension j = i+1
let j = widestDimension pts'
i = j - 1 -- traceShow ("i",j,pts') j - 1
extJ = (extends pts')^.ix' i
mid = midPoint extJ
-- find the set of points that we have to delete, by looking at the sorted
-- list L_j. As a side effect, this will remove previously assigned points
-- from L_j.
(lvlJPts,deletePts) <- findAndCompact j (pts'^.ix' i) mid
let pts'' = pts'&ix' i .~ lvlJPts
l' = l&widestDim ?~ j
forM_ deletePts $ \p ->
assignLevel p l'
assignLevels h (m + length deletePts) pts'' (nextLevel l) (l' : prevLvls)
-- | Remove already assigned pts from the ends of all vectors.
compactEnds :: Arity d
=> GV.Vector d (PointSeq d (Idx :+ p) r)
-> RST s (GV.Vector d (PointSeq d (Idx :+ p) r))
compactEnds = traverse compactEnds'
-- | Assign level l to point p
assignLevel :: (c :+ (Idx :+ p)) -> Level -> RST s ()
assignLevel p l = ask >>= \levels -> lift $ MV.write levels (p^.extra.core) (Just l)
-- | Get the level of a point
levelOf :: (c :+ (Idx :+ p)) -> RST s (Maybe Level)
levelOf p = ask >>= \levels -> lift $ MV.read levels (p^.extra.core)
-- | Test if the point already has a level assigned to it.
hasLevel :: c :+ (Idx :+ p) -> RST s Bool
hasLevel = fmap isJust . levelOf
-- | Remove allready assigned points from the sequence
--
-- pre: there are points remaining
compactEnds' :: PointSeq d (Idx :+ p) r
-> RST s (PointSeq d (Idx :+ p) r)
compactEnds' (l0 :<| s0) = fmap fromSeqUnsafe . goL $ l0 S.<| toSeq s0
where
goL s@(S.viewl -> l S.:< s') = hasLevel l >>= \case
False -> goR s
True -> goL s'
goL _ = error "Unreachable, but cannot prove it in Haskell"
goR s@(S.viewr -> s' S.:> r) = hasLevel r >>= \case
False -> pure s
True -> goR s'
goR _ = error "Unreachable, but cannot prove it in Haskell"
| Given the points , ordered by their j^th coordinate , split the point set
into a " left " and a " right " half , i.e. the points whose j^th coordinate is
-- at most the given mid point m, and the points whose j^th coordinate is
-- larger than m.
--
-- We return a pair (Largest set, Smallest set)
--
--
--fi ndAndCompact works by simultaneously traversing the points from left to
-- right, and from right to left. As soon as we find a point crossing the mid
-- point we stop and return. Thus, in principle this takes only O(|Smallest
-- set|) time.
--
-- running time: O(|Smallest set|) + R, where R is the number of *old* points
-- (i.e. points that should have been removed) in the list.
findAndCompact :: (Ord r, Arity d
, Show r, Show p
)
=> Int
-- ^ the dimension we are in, i.e. so that we know
-- which coordinate of the point to compare
-> PointSeq d (Idx :+ p) r
-> r -- ^ the mid point
-> RST s ( PointSeq d (Idx :+ p) r
, PointSeq d (Idx :+ p) r
)
findAndCompact j (l0 :<| s0) m = fmap select . stepL $ l0 S.<| toSeq s0
where
stepL and stepR together build a data structure ( FAC l r S ) that
-- contains the left part of the list, i.e. the points before midpoint, and
-- the right part of the list., and a value S that indicates which part is
-- the short side.
-- stepL takes a step on the left side of the list; if the left point l
-- already has been assigned, we continue waling along (and "ignore" the
-- point). If it has not been assigned, and is before the mid point, we
-- take a step from the right, and add l onto the left part. If it is
-- larger than the mid point, we have found our split.
stepL : : S.Seq ( Point d r : + ( Idx : + p ) ) - > ST s ( FindAndCompact d r ( Idx : + p ) )
stepL s = case S.viewl s of
S.EmptyL -> pure $ FAC mempty mempty L
l S.:< s' -> hasLevel l >>= \case
False -> if l^.core.unsafeCoord j <= m
then addL l <$> stepR s'
else pure $ FAC mempty s L
True -> stepL s' -- delete, continue left
stepR : : S.Seq ( Point d r : + ( Idx : + p ) ) - > ST s ( FindAndCompact d r ( Idx : + p ) )
stepR s = case S.viewr s of
S.EmptyR -> pure $ FAC mempty mempty R
s' S.:> r -> hasLevel r >>= \case
False -> if r^.core.unsafeCoord j >= m
then addR r <$> stepL s'
else pure $ FAC s mempty R
True -> stepR s'
addL l x = x&leftPart %~ (l S.<|)
addR r x = x&rightPart %~ (S.|> r)
select = over both fromSeqUnsafe . select'
-- select' f | traceShow ("select'", f) False = undefined
select' (FAC l r L) = (r, l)
select' (FAC l r R) = (l, r)
-- | Find the widest dimension of the point set
--
-- pre: points are sorted according to their dimension
widestDimension :: (Num r, Ord r, Arity d) => GV.Vector d (PointSeq d p r) -> Int
widestDimension = fst . L.maximumBy (comparing snd) . zip [1..] . F.toList . widths
widths :: (Num r, Arity d) => GV.Vector d (PointSeq d p r) -> GV.Vector d r
widths = fmap Range.width . extends
HLINT ignore extends
-- | get the extends of the set of points in every dimension, i.e. the left and
-- right boundaries.
--
-- pre: points are sorted according to their dimension
extends :: Arity d => GV.Vector d (PointSeq d p r) -> GV.Vector d (Range r)
extends = imap (\i pts ->
ClosedRange ((LSeq.head pts)^.core.unsafeCoord (i + 1))
((LSeq.last pts)^.core.unsafeCoord (i + 1)))
--------------------------------------------------------------------------------
-- * Finding Well Separated Pairs
findPairs :: (Radical r, Fractional r, Ord r, Arity d, Arity (d + 1))
=> r -> SplitTree d p r a -> SplitTree d p r a
-> [WSP d p r a]
findPairs s l r
| areWellSeparated' s l r = [(l,r)]
| maxWidth l <= maxWidth r = concatMap (findPairs s l) $ children' r
| otherwise = concatMap (findPairs s r) $ children' l
-- | Test if the two sets are well separated with param s
areWellSeparated : : ( Arity d , Arity ( d + 1 ) , Fractional r , r )
-- => r -- ^ separation factor
-- -> SplitTree d p r a
-- -> SplitTree d p r a -> Bool
-- areWellSeparated _ (Leaf _) (Leaf _) = True
-- areWellSeparated s l r = boxBox s (bbOf l) (bbOf r)
-- areWellSeparated s (Leaf p) (Node _ nd _) = pointBox s (p^.core) (nd^.bBox)
-- areWellSeparated s (Node _ nd _) (Leaf p) = pointBox s (p^.core) (nd^.bBox)
areWellSeparated s ( Node _ ld _ ) ( Node _ rd _ ) = boxBox s ( ld^.bBox ) ( rd^.bBox )
HLINT ignore boxBox
-- -- | Test if the point and the box are far enough appart
pointBox : : ( Fractional r , , AlwaysTruePFT d , AlwaysTrueTransformation d )
-- => r -> Point d r -> Box d p r -> Bool
-- pointBox s p b = not $ p `inBox` b'
-- where
v = ( centerPoint b)^.vector
b ' = translateBy v . scaleUniformlyBy s . ( ( -1 ) * ^ v ) $ b
-- | Test if the two boxes are sufficiently far appart
boxBox : : ( Fractional r , , Arity d , Arity ( d + 1 ) )
-- => r -> Box d p r -> Box d p r -> Bool
boxBox s lb rb = boxBox ' lb rb & & boxBox ' rb lb
-- where
-- boxBox' b' b = not $ b' `intersects` bOut
-- where
v = ( centerPoint b)^.vector
bOut = translateBy v . scaleUniformlyBy s . ( ( -1 ) * ^ v ) $ b
--------------------------------------------------------------------------------
-- * Alternative def if wellSeparated that uses fractional
areWellSeparated' :: (Radical r, Fractional r, Ord r, Arity d)
=> r
-> SplitTree d p r a
-> SplitTree d p r a
-> Bool
areWellSeparated' _ (Leaf _) (Leaf _) = True
areWellSeparated' s l r = boxBox1 s (bbOf l) (bbOf r)
( Leaf p ) ( Node _ nd _ ) = pointBox ' s ( p^.core ) ( nd^.bBox )
-- areWellSeparated' s (Node _ nd _) (Leaf p) = pointBox' s (p^.core) (nd^.bBox)
areWellSeparated ' s ( Node _ ld _ ) ( Node _ rd _ ) = boxBox ' s ( ld^.bBox ) ( rd^.bBox )
boxBox1 :: (Radical r, Fractional r, Ord r, Arity d) => r -> Box d p r -> Box d p r -> Bool
boxBox1 s lb rb = euclideanDist (centerPoint lb) (centerPoint rb) >= (s+1)*d
where
diam b = euclideanDist (b^.minP.core.cwMin) (b^.maxP.core.cwMax)
d = max (diam lb) (diam rb)
--------------------------------------------------------------------------------
-- * Helper stuff
-- | Computes the maximum width of a splitTree
maxWidth :: (Arity d, Num r)
=> SplitTree d p r a -> r
maxWidth (Leaf _) = 0
maxWidth (Node _ (NodeData i b _) _) = fromJust $ widthIn' i b
-- | 'Computes' the bounding box of a split tree
bbOf :: Ord r => SplitTree d p r a -> Box d () r
bbOf (Leaf p) = boundingBox $ p^.core
bbOf (Node _ (NodeData _ b _) _) = b
children' :: BinLeafTree v a -> [BinLeafTree v a]
children' (Leaf _) = []
children' (Node l _ r) = [l,r]
-- | Turn a traversal into lens
ix' :: (Arity d, KnownNat d) => Int -> Lens' (GV.Vector d a) a
ix' i = singular (GV.element' i)
dropIdx :: core :+ (t :+ extra) -> core :+ extra
dropIdx (p :+ (_ :+ e)) = p :+ e
--------------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/noinia/hgeometry/ddc7d456cbee3e2cd998857780089e1469ba81ba/hgeometry/src/Algorithms/Geometry/WSPD.hs | haskell | ------------------------------------------------------------------------------
|
License : see the LICENSE file
Algorithm to construct a well separated pair decomposition (wspd).
------------------------------------------------------------------------------
import Geometry.Properties
import Geometry.Transformation
import Debug.Trace
------------------------------------------------------------------------------
| Construct a split tree
running time: \(O(n \log n)\)
node' :: b -> a -> b -> b
node' :: SplitTree d p r () -> Int -> SplitTree d p r () -> SplitTree d p r ()
| Given a split tree, generate the Well separated pairs
running time: \(O(s^d n)\)
| Given a split tree , generate the well separated pairs such that one set is
-- a singleton.
running time : \(O(s^d n\log n)\ )
wellSeparatedPairSingletons s t = concatMap split $ wellSeparatedPairs s t'
where
split (l,r) = undefined
-- | measure l <= measure r = map (,r) $ F.toList l
| otherwise = map ( , l ) $ F.toList r
t
------------------------------------------------------------------------------
* Building the split tree
| Given the points, sorted in every dimension, recursively build a split tree
The algorithm works in rounds. Each round takes \( O(n) \) time, and halves the
number of points. Thus, the total running time is \( O(n log n) \).
The algorithm essentially builds a path in the split tree; at every node on
according to the longest side of the bounding box.
The smaller set is "assigned" to the current node and set asside. We
continue to build the path with the larger set until the total number of
To start the next round, each node on the path needs to have the points
assigned to that node, sorted in each dimension (i.e. the Vector
(PointSeq))'s. Since we have the level assignment, we can compute these
lists by traversing each original input list (i.e. one for every dimension)
once, and partition the points based on their level assignment.
note that points may also be assigned level 'Nothing'.
TODO: We also need to report the levels in the order in which they are
assigned to nodes
path = traceShow ("path", path',nodeLevels) path'
(traceShow ("distributed pts",distrPts) distrPts)
node' (lvl,lc) rc | traceShow ("node' ",lvl,lc,rc) False = undefined
| Assign the points to their the correct class. The 'Nothing' class is
considered the last class
| Assign the points to their the correct class. The 'Nothing' class is
considered the last class
^ number of classes
^ level assignment
^ input points
Reindex the points so that they again have an index
in the range [0,..,n'], where n' is the new number of points.
the lookups)
alternatively: I can unsafe freeze and thaw an existing vector to pass it
along to use as mapping. Except then I would have to force the evaluation
time.
so, basically, run reIndex points in ST as well.
| ST monad with access to the vector storign the level of the points.
level in the list is the level assigned to the rest of the nodes. Their
level is actually still set to Nothing in the underlying array.
^ Number of items we need to collect
^ Number of items we collected so far
^ next level to use
^ Levels used so far
find the widest dimension j = i+1
traceShow ("i",j,pts') j - 1
find the set of points that we have to delete, by looking at the sorted
list L_j. As a side effect, this will remove previously assigned points
from L_j.
| Remove already assigned pts from the ends of all vectors.
| Assign level l to point p
| Get the level of a point
| Test if the point already has a level assigned to it.
| Remove allready assigned points from the sequence
pre: there are points remaining
at most the given mid point m, and the points whose j^th coordinate is
larger than m.
We return a pair (Largest set, Smallest set)
fi ndAndCompact works by simultaneously traversing the points from left to
right, and from right to left. As soon as we find a point crossing the mid
point we stop and return. Thus, in principle this takes only O(|Smallest
set|) time.
running time: O(|Smallest set|) + R, where R is the number of *old* points
(i.e. points that should have been removed) in the list.
^ the dimension we are in, i.e. so that we know
which coordinate of the point to compare
^ the mid point
contains the left part of the list, i.e. the points before midpoint, and
the right part of the list., and a value S that indicates which part is
the short side.
stepL takes a step on the left side of the list; if the left point l
already has been assigned, we continue waling along (and "ignore" the
point). If it has not been assigned, and is before the mid point, we
take a step from the right, and add l onto the left part. If it is
larger than the mid point, we have found our split.
delete, continue left
select' f | traceShow ("select'", f) False = undefined
| Find the widest dimension of the point set
pre: points are sorted according to their dimension
| get the extends of the set of points in every dimension, i.e. the left and
right boundaries.
pre: points are sorted according to their dimension
------------------------------------------------------------------------------
* Finding Well Separated Pairs
| Test if the two sets are well separated with param s
=> r -- ^ separation factor
-> SplitTree d p r a
-> SplitTree d p r a -> Bool
areWellSeparated _ (Leaf _) (Leaf _) = True
areWellSeparated s l r = boxBox s (bbOf l) (bbOf r)
areWellSeparated s (Leaf p) (Node _ nd _) = pointBox s (p^.core) (nd^.bBox)
areWellSeparated s (Node _ nd _) (Leaf p) = pointBox s (p^.core) (nd^.bBox)
-- | Test if the point and the box are far enough appart
=> r -> Point d r -> Box d p r -> Bool
pointBox s p b = not $ p `inBox` b'
where
| Test if the two boxes are sufficiently far appart
=> r -> Box d p r -> Box d p r -> Bool
where
boxBox' b' b = not $ b' `intersects` bOut
where
------------------------------------------------------------------------------
* Alternative def if wellSeparated that uses fractional
areWellSeparated' s (Node _ nd _) (Leaf p) = pointBox' s (p^.core) (nd^.bBox)
------------------------------------------------------------------------------
* Helper stuff
| Computes the maximum width of a splitTree
| 'Computes' the bounding box of a split tree
| Turn a traversal into lens
------------------------------------------------------------------------------ | Module : Algorithms . Geometry . WSPD
Copyright : ( C )
Maintainer :
module Algorithms.Geometry.WSPD
( fairSplitTree
, wellSeparatedPairs
, NodeData(NodeData)
, WSP
, SplitTree
, nodeData
, Level(..)
, reIndexPoints
, distributePoints
, distributePoints'
) where
import Algorithms.Geometry.WSPD.Types
import Control.Lens hiding (Level, levels)
import Control.Monad.Reader
import Control.Monad.ST (ST,runST)
import Data.BinaryTree
import Data.Ext
import qualified Data.Foldable as F
import HGeometry.Number.Radical
import Geometry.Box.Internal
import Geometry.Point
import Geometry.Vector
import qualified Geometry.Vector as GV
import qualified Data.IntMap.Strict as IntMap
import qualified Data.LSeq as LSeq
import Data.LSeq (LSeq, toSeq,pattern (:<|))
import qualified Data.List as L
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe
import Data.Ord (comparing)
import Data.Range
import qualified Data.Range as Range
import qualified Data.Sequence as S
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import GHC.TypeLits
fairSplitTree :: (Fractional r, Ord r, Arity d, 1 <= d
, Show r, Show p
)
=> NonEmpty.NonEmpty (Point d r :+ p) -> SplitTree d p r ()
fairSplitTree pts = foldUp node' Leaf $ fairSplitTree' n pts'
where
pts' = imap sortOn . pure . g $ pts
n = length $ pts'^.GV.element @0
sortOn' i = NonEmpty.sortWith (^.core.unsafeCoord i)
sortOn i = LSeq.fromNonEmpty . sortOn' (i + 1)
sorts the points on the first coordinate , and then associates each point
with an index , ; its rank in terms of this first coordinate .
g = NonEmpty.zipWith (\i (p :+ e) -> p :+ (i :+ e)) (NonEmpty.fromList [0..])
. sortOn' 1
node' l j r = Node l (NodeData j (bbOf l <> bbOf r) ()) r
wellSeparatedPairs :: (Radical r, Fractional r, Ord r, Arity d, Arity (d + 1))
=> r -> SplitTree d p r a -> [WSP d p r a]
wellSeparatedPairs s = f
where
f (Leaf _) = []
f (Node l _ r) = findPairs s l r ++ f l ++ f r
wellSeparatedPairSingletons : : ( Fractional r , r , AlwaysTrueWSPD d )
= > r - > SplitTree d p r a - > [ ( Point d r : + p , PointSet d p r ( Sized a ) ) ]
t ' = foldUpData ( \l nd r - > )
the path that we construct , we split the point set into two sets ( L , R )
items remaining is less than n/2 .
fairSplitTree' :: (Fractional r, Ord r, Arity d, 1 <= d
, Show r, Show p
)
=> Int -> GV.Vector d (PointSeq d (Idx :+ p) r)
-> BinLeafTree Int (Point d r :+ p)
fairSplitTree' n pts
| n <= 1 = let p = LSeq.head $ pts^.GV.element @0 in Leaf (dropIdx p)
| otherwise = foldr node' (V.last path) $ V.zip nodeLevels (V.init path)
where
(levels, nodeLevels'@(maxLvl NonEmpty.:| _)) = runST $ do
lvls <- MV.replicate n Nothing
ls <- runReaderT (assignLevels (n `div` 2) 0 pts (Level 0 Nothing) []) lvls
lvls' <- V.unsafeFreeze lvls
pure (lvls',ls)
nodeLevels = V.fromList . L.reverse . NonEmpty.toList $ nodeLevels'
levels = traceShow ( " Levels",levels',maxLvl ) levels '
distrPts = distributePoints (1 + maxLvl^.unLevel) levels pts
node' (lvl,lc) rc = case lvl^?widestDim._Just of
Nothing -> error "Unknown widest dimension"
Just j -> Node lc j rc
recurse pts' = fairSplitTree' (length $ pts'^.GV.element @0)
(reIndexPoints pts')
distributePoints :: (Arity d , Show r, Show p)
=> Int -> V.Vector (Maybe Level)
-> GV.Vector d (PointSeq d (Idx :+ p) r)
-> V.Vector (GV.Vector d (PointSeq d (Idx :+ p) r))
distributePoints k levels = transpose . fmap (distributePoints' k levels)
transpose :: Arity d => GV.Vector d (V.Vector a) -> V.Vector (GV.Vector d a)
transpose = V.fromList . map GV.vectorFromListUnsafe . L.transpose
. map V.toList . F.toList
-> V.Vector (PointSeq d (Idx :+ p) r)
distributePoints' k levels pts
= fmap fromSeqUnsafe $ V.create $ do
v <- MV.replicate k mempty
forM_ pts $ \p ->
append v (level p) p
pure v
where
level p = maybe (k-1) _unLevel $ levels V.! (p^.extra.core)
append v i p = MV.read v i >>= MV.write v i . (S.|> p)
fromSeqUnsafe :: S.Seq a -> LSeq n a
fromSeqUnsafe = LSeq.promise . LSeq.fromSeq
| Given a sequence of points , whose index is increasing in the first
dimension , i.e. if idx p < idx q , then p[0 ] < q[0 ] .
running time : O(n ' * d ) ( more or less ; we are actually using an intmap for
order , i.e. we can not be in ' reIndexPoints ' for two of the nodes at the same
reIndexPoints :: (Arity d, 1 <= d)
=> GV.Vector d (PointSeq d (Idx :+ p) r)
-> GV.Vector d (PointSeq d (Idx :+ p) r)
reIndexPoints ptsV = fmap reIndex ptsV
where
pts = ptsV^.GV.element @0
reIndex = fmap (\p -> p&extra.core %~ fromJust . flip IntMap.lookup mapping')
mapping' = IntMap.fromAscList $ zip (map (^.extra.core) . F.toList $ pts) [0..]
type RST s = ReaderT (MV.MVector s (Maybe Level)) (ST s)
HLINT ignore assignLevels
| Assigns the points to a level . Returns the list of levels used . The first
assignLevels :: (Fractional r, Ord r, Arity d
, Show r, Show p
)
-> GV.Vector d (PointSeq d (Idx :+ p) r)
-> RST s (NonEmpty.NonEmpty Level)
assignLevels h m pts l prevLvls
| m >= h = pure (l NonEmpty.:| prevLvls)
| otherwise = do
pts' <- compactEnds pts
let j = widestDimension pts'
extJ = (extends pts')^.ix' i
mid = midPoint extJ
(lvlJPts,deletePts) <- findAndCompact j (pts'^.ix' i) mid
let pts'' = pts'&ix' i .~ lvlJPts
l' = l&widestDim ?~ j
forM_ deletePts $ \p ->
assignLevel p l'
assignLevels h (m + length deletePts) pts'' (nextLevel l) (l' : prevLvls)
compactEnds :: Arity d
=> GV.Vector d (PointSeq d (Idx :+ p) r)
-> RST s (GV.Vector d (PointSeq d (Idx :+ p) r))
compactEnds = traverse compactEnds'
assignLevel :: (c :+ (Idx :+ p)) -> Level -> RST s ()
assignLevel p l = ask >>= \levels -> lift $ MV.write levels (p^.extra.core) (Just l)
levelOf :: (c :+ (Idx :+ p)) -> RST s (Maybe Level)
levelOf p = ask >>= \levels -> lift $ MV.read levels (p^.extra.core)
hasLevel :: c :+ (Idx :+ p) -> RST s Bool
hasLevel = fmap isJust . levelOf
compactEnds' :: PointSeq d (Idx :+ p) r
-> RST s (PointSeq d (Idx :+ p) r)
compactEnds' (l0 :<| s0) = fmap fromSeqUnsafe . goL $ l0 S.<| toSeq s0
where
goL s@(S.viewl -> l S.:< s') = hasLevel l >>= \case
False -> goR s
True -> goL s'
goL _ = error "Unreachable, but cannot prove it in Haskell"
goR s@(S.viewr -> s' S.:> r) = hasLevel r >>= \case
False -> pure s
True -> goR s'
goR _ = error "Unreachable, but cannot prove it in Haskell"
| Given the points , ordered by their j^th coordinate , split the point set
into a " left " and a " right " half , i.e. the points whose j^th coordinate is
findAndCompact :: (Ord r, Arity d
, Show r, Show p
)
=> Int
-> PointSeq d (Idx :+ p) r
-> RST s ( PointSeq d (Idx :+ p) r
, PointSeq d (Idx :+ p) r
)
findAndCompact j (l0 :<| s0) m = fmap select . stepL $ l0 S.<| toSeq s0
where
stepL and stepR together build a data structure ( FAC l r S ) that
stepL : : S.Seq ( Point d r : + ( Idx : + p ) ) - > ST s ( FindAndCompact d r ( Idx : + p ) )
stepL s = case S.viewl s of
S.EmptyL -> pure $ FAC mempty mempty L
l S.:< s' -> hasLevel l >>= \case
False -> if l^.core.unsafeCoord j <= m
then addL l <$> stepR s'
else pure $ FAC mempty s L
stepR : : S.Seq ( Point d r : + ( Idx : + p ) ) - > ST s ( FindAndCompact d r ( Idx : + p ) )
stepR s = case S.viewr s of
S.EmptyR -> pure $ FAC mempty mempty R
s' S.:> r -> hasLevel r >>= \case
False -> if r^.core.unsafeCoord j >= m
then addR r <$> stepL s'
else pure $ FAC s mempty R
True -> stepR s'
addL l x = x&leftPart %~ (l S.<|)
addR r x = x&rightPart %~ (S.|> r)
select = over both fromSeqUnsafe . select'
select' (FAC l r L) = (r, l)
select' (FAC l r R) = (l, r)
widestDimension :: (Num r, Ord r, Arity d) => GV.Vector d (PointSeq d p r) -> Int
widestDimension = fst . L.maximumBy (comparing snd) . zip [1..] . F.toList . widths
widths :: (Num r, Arity d) => GV.Vector d (PointSeq d p r) -> GV.Vector d r
widths = fmap Range.width . extends
HLINT ignore extends
extends :: Arity d => GV.Vector d (PointSeq d p r) -> GV.Vector d (Range r)
extends = imap (\i pts ->
ClosedRange ((LSeq.head pts)^.core.unsafeCoord (i + 1))
((LSeq.last pts)^.core.unsafeCoord (i + 1)))
findPairs :: (Radical r, Fractional r, Ord r, Arity d, Arity (d + 1))
=> r -> SplitTree d p r a -> SplitTree d p r a
-> [WSP d p r a]
findPairs s l r
| areWellSeparated' s l r = [(l,r)]
| maxWidth l <= maxWidth r = concatMap (findPairs s l) $ children' r
| otherwise = concatMap (findPairs s r) $ children' l
areWellSeparated : : ( Arity d , Arity ( d + 1 ) , Fractional r , r )
areWellSeparated s ( Node _ ld _ ) ( Node _ rd _ ) = boxBox s ( ld^.bBox ) ( rd^.bBox )
HLINT ignore boxBox
pointBox : : ( Fractional r , , AlwaysTruePFT d , AlwaysTrueTransformation d )
v = ( centerPoint b)^.vector
b ' = translateBy v . scaleUniformlyBy s . ( ( -1 ) * ^ v ) $ b
boxBox : : ( Fractional r , , Arity d , Arity ( d + 1 ) )
boxBox s lb rb = boxBox ' lb rb & & boxBox ' rb lb
v = ( centerPoint b)^.vector
bOut = translateBy v . scaleUniformlyBy s . ( ( -1 ) * ^ v ) $ b
areWellSeparated' :: (Radical r, Fractional r, Ord r, Arity d)
=> r
-> SplitTree d p r a
-> SplitTree d p r a
-> Bool
areWellSeparated' _ (Leaf _) (Leaf _) = True
areWellSeparated' s l r = boxBox1 s (bbOf l) (bbOf r)
( Leaf p ) ( Node _ nd _ ) = pointBox ' s ( p^.core ) ( nd^.bBox )
areWellSeparated ' s ( Node _ ld _ ) ( Node _ rd _ ) = boxBox ' s ( ld^.bBox ) ( rd^.bBox )
boxBox1 :: (Radical r, Fractional r, Ord r, Arity d) => r -> Box d p r -> Box d p r -> Bool
boxBox1 s lb rb = euclideanDist (centerPoint lb) (centerPoint rb) >= (s+1)*d
where
diam b = euclideanDist (b^.minP.core.cwMin) (b^.maxP.core.cwMax)
d = max (diam lb) (diam rb)
maxWidth :: (Arity d, Num r)
=> SplitTree d p r a -> r
maxWidth (Leaf _) = 0
maxWidth (Node _ (NodeData i b _) _) = fromJust $ widthIn' i b
bbOf :: Ord r => SplitTree d p r a -> Box d () r
bbOf (Leaf p) = boundingBox $ p^.core
bbOf (Node _ (NodeData _ b _) _) = b
children' :: BinLeafTree v a -> [BinLeafTree v a]
children' (Leaf _) = []
children' (Node l _ r) = [l,r]
ix' :: (Arity d, KnownNat d) => Int -> Lens' (GV.Vector d a) a
ix' i = singular (GV.element' i)
dropIdx :: core :+ (t :+ extra) -> core :+ extra
dropIdx (p :+ (_ :+ e)) = p :+ e
|
cf0c7fd75084e48b476c487cce536758cc361990b0c64db7fe2de615b974aa42 | HJianBo/sserl | sserl_conn.erl | %%%-------------------------------------------------------------------
@author >
( C ) 2016 ,
%%% @doc
%%%
%%% @end
Created : 15 May 2016 by >
%%%-------------------------------------------------------------------
-module(sserl_conn).
-behaviour(gen_server).
%% API
-export([start_link/2, init/2]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("shadowsocks.hrl").
-include("sserl.hrl").
-define(SERVER, ?MODULE).
-define(RECV_TIMOUT, 180000).
-define(REPORT_INTERVAL, 1000).
1 MB
-define(TCP_OPTS, [binary, {packet, raw}, {active, once},{nodelay, true}]).
-record(state, {
全局的唯一标示 , 方便存入数据库
csocket,
ssocket,
source = undefined,
target = undefined,
ota,
port,
down = 0,
up = 0,
sending = 0,
ota_data = <<>>,
ota_len = 2,
ota_id = 0,
ota_iv = <<>>,
type = server,
cipher_info,
c2s_handler=undefined,
s2c_handler=undefined
}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
@spec start_link(Socket , Info ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link(Socket, Info) ->
proc_lib:start_link(?MODULE, init, [Socket, Info]).
%% gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
init(Socket, {Port, Server, OTA, Type, {Method,Password}}) ->
proc_lib:init_ack({ok, self()}),
wait_socket(Socket),
Cipher = shadowsocks_crypt:init_cipher_info(Method, Password),
{ok, Source} = inet:peername(Socket),
State = #state{conn_id=sserl_utils:gen_randnum(),
csocket=Socket, ssocket=undefined,
ota=OTA, port=Port, type=Type,
source = Source,
target = Server,
cipher_info=Cipher},
init_proto(State).
init_proto(State=#state{type=server,csocket=CSocket}) ->
State1 = recv_ivec(State),
{Addr, Port, Data, State2} = recv_target(State1),
gen_event:notify(?STAT_EVENT, {conn, {connect, self(), State#state.source, {Addr, Port}}}),
case gen_tcp:connect(Addr, Port, ?TCP_OPTS) of
{ok, SSocket} ->
self() ! {send, Data},
inet:setopts(CSocket, [{active, once}]),
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
gen_server:enter_loop(?MODULE, [], init_handler(State2#state{ssocket=SSocket,target={Addr,Port}}));
{error, Reason} ->
lager:error("conn init failed, state: ~p, reason: ~p~n", [State, Reason]),
exit(Reason)
end;
init_proto(State=#state{type=client, csocket=CSocket, target={Addr,Port},ota=OTA,cipher_info=Cipher}) ->
{Atype, Data} = recv_socks5(CSocket),
case gen_tcp:connect(Addr, Port, ?TCP_OPTS) of
{ok, SSocket} ->
{NewCipher, NewData} =
case OTA of
true ->
Hmac = shadowsocks_crypt:hmac([Cipher#cipher_info.encode_iv, Cipher#cipher_info.key],
[Atype bor ?OTA_FLAG, Data]),
shadowsocks_crypt:encode(Cipher, [Atype bor ?OTA_FLAG, Data, Hmac]);
false ->
shadowsocks_crypt:encode(Cipher, [Atype, Data])
end,
ok = gen_tcp:send(SSocket, NewData),
inet:setopts(CSocket, [{active, once}]),
State1 = State#state{ssocket = SSocket, cipher_info=NewCipher, ota_iv=Cipher#cipher_info.encode_iv},
gen_server:enter_loop(?MODULE, [], init_handler(State1));
{error, Reason} ->
exit(Reason)
end.
init_handler(State=#state{type=client, ota=true}) ->
State#state{c2s_handler=fun handle_client_ota_c2s/2,
s2c_handler=fun handle_client_s2c/2};
init_handler(State=#state{type=client, ota=false}) ->
State#state{c2s_handler=fun handle_client_c2s/2,
s2c_handler=fun handle_client_s2c/2};
init_handler(State=#state{type=server, ota=true}) ->
State#state{c2s_handler=fun handle_server_ota_c2s/2,
s2c_handler=fun handle_server_s2c/2};
init_handler(State=#state{type=server, ota=false}) ->
State#state{c2s_handler=fun handle_server_c2s/2,
s2c_handler=fun handle_server_s2c/2}.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
{ok, #state{}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info({tcp, CSocket, Data}, State=#state{csocket=CSocket, c2s_handler=Handler}) ->
inet:setopts(CSocket, [{active, once}]),
Handler(Data, State);
handle_info({tcp, SSocket, Data}, State=#state{ssocket=SSocket, s2c_handler=Handler}) ->
inet:setopts(SSocket, [{active, once}]),
Handler(Data, State);
%% socket send reply
handle_info({inet_reply, _Socket, _Error}, State = #state{csocket=undefined,sending=1}) ->
{stop, normal, State};
handle_info({inet_reply, _Socket, _Error}, State = #state{ssocket=undefined, sending=1}) ->
{stop, normal, State};
handle_info({inet_reply, _, _}, State = #state{sending=N}) ->
{noreply, State#state{sending=N-1}};
%% socket closed
handle_info({tcp_closed, _Socket}, State = #state{sending=0}) ->
{stop, normal, State};
handle_info({tcp_closed, CSocket}, State = #state{csocket=CSocket}) ->
{noreply, State#state{csocket=undefined}};
handle_info({tcp_closed, SSocket}, State = #state{ssocket=SSocket}) ->
{noreply, State#state{ssocket=undefined}};
%% report flow
handle_info(report_flow, State = #state{conn_id=ConnId, port=Port, source=Source, target=Target,
down=Down, up=Up}) when Down + Up >= ?REPORT_MIN ->
Traffic = #traffic{id=ConnId, port=Port, source=Source, target=Target, down=Down, up=Up, time=sserl_utils:timestamp()},
gen_event:notify(?TRAFFIC_EVENT, {sending, Traffic}),
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
{noreply, State#state{down=0, up=0}};
handle_info(report_flow, State) ->
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
{noreply, State};
%% first send
handle_info({send, Data}, State=#state{type=server,ota=false,ssocket=SSocket, up=Flow, sending=S}) ->
S1 = try_send(SSocket, Data),
{noreply, State#state{sending=S+S1, up=Flow+size(Data)}};
handle_info({send, Data}, State=#state{type=server,ota=true, ota_data=Rest}) ->
handle_ota(State#state{ota_data= <<Rest/binary, Data/binary>>});
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State = #state{conn_id=ConnId, port=Port, source=Source, target=Target,
down=Down, up=Up}) ->
Traffic = #traffic{id=ConnId, port=Port, source=Source, target=Target, down=Down, up=Up, time=sserl_utils:timestamp()},
gen_event:notify(?TRAFFIC_EVENT, {complete, Traffic}),
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
%% ----------------------------------------------------------------------------------------------------
%% Data encoding / decoding
%% -----------------------------------------------------------------------------------------------------
handle_client_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
up=Flow, sending=S}) ->
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, Data),
S1 = try_send(SSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1}}.
handle_client_ota_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
ota_iv=Iv, ota_id=Id, up=Flow, sending=S}) ->
Hmac = shadowsocks_crypt:hmac([Iv, <<Id:32/big>>], Data),
Len = byte_size(Data),
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, [<<Len:16/big>>, Hmac, Data]),
S1 = try_send(SSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1, ota_id=Id+1}}.
handle_server_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
up=Flow, sending=S}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
S1 = try_send(SSocket, DecData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1}}.
handle_server_ota_c2s(Data, State=#state{cipher_info=CipherInfo,ota_data=Rest}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
handle_ota(State#state{ota_data= <<Rest/binary, DecData/binary>>, cipher_info=CipherInfo1}).
handle_client_s2c(Data, State=#state{csocket=CSocket, cipher_info=CipherInfo,
down=Flow, sending=S}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
S1 = try_send(CSocket, DecData),
{noreply, State#state{cipher_info=CipherInfo1, down=Flow+size(Data), sending=S+S1}}.
handle_server_s2c(Data, State=#state{csocket=CSocket, cipher_info=CipherInfo,
down=Flow, sending=S}) ->
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, Data),
S1 = try_send(CSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, down=Flow+size(Data), sending=S+S1}}.
handle ota frame
handle_ota(State = #state{ota_data=Data, ota_len=2}) when byte_size(Data) >= 2 ->
<<DataLen:16/big, _/binary>> = Data,
handle_ota(State#state{ota_len=DataLen+?HMAC_LEN+2});
handle_ota(State = #state{ota_iv=Iv,ota_data=Data, ota_len=Len, ota_id=Id,
ssocket=SSocket, up=Flow, sending=S}) when byte_size(Data) >= Len ->
DataLen = Len-?HMAC_LEN - 2,
<<_:16/big, Hmac:?HMAC_LEN/binary, FrameData:DataLen/binary, Rest/binary>> = Data,
case shadowsocks_crypt:hmac([Iv, <<Id:32/big>>], FrameData) of
Hmac ->
S1 = try_send(SSocket, FrameData),
handle_ota(State#state{up=Flow+size(FrameData), sending=S+S1, ota_data=Rest,ota_len=2,ota_id=Id+1});
_ ->
{stop, {error, bad_ota_hmac}, State}
end;
handle_ota(State) ->
{noreply, State}.
%% ---------------------------------------------------------------------------------------------------------
wait_socket(Socket) ->
receive
{shoot, Socket} ->
ok;
_ ->
wait_socket(Socket)
end.
%% recv the iv data
recv_ivec(State = #state{csocket=Socket,
cipher_info=#cipher_info{method=Method,key=Key}=CipherInfo}) ->
{_, IvLen} = shadowsocks_crypt:key_iv_len(Method),
{ok, IvData} = gen_tcp:recv(Socket, IvLen, ?RECV_TIMOUT),
StreamState = shadowsocks_crypt:stream_init(Method, Key, IvData),
State#state{
ota_iv = IvData,
cipher_info=CipherInfo#cipher_info{
decode_iv=IvData, stream_dec_state=StreamState
}
}.
%% recv and decode target addr and port
recv_target(State) ->
{<<AddrType:8/big, Data/binary>>, State1} = recv_decode(1, <<>>, State),
{IPPort, Addr, Port, Rest, NewState} =
case ?GET_ATYP(AddrType) of
?SOCKS5_ATYP_V4 ->
{<<Data1:6/binary, Data2/binary>>, State2} = recv_decode(6, Data, State1),
<<IP1:8/big,IP2:8/big,IP3:8/big,IP4:8/big, DestPort:16/big>> = Data1,
{Data1, {IP1,IP2,IP3,IP4}, DestPort, Data2, State2};
?SOCKS5_ATYP_V6 ->
{<<Data1:18/binary, Data2/binary>>, State2} = recv_decode(18, Data, State1),
<<IP1:16/big,IP2:16/big,IP3:16/big,IP4:16/big,
IP5:16/big,IP6:16/big,IP7:16/big,IP8:16/big,
DestPort:16/big>> = Data1,
{Data1, {IP1,IP2,IP3,IP4,IP5,IP6,IP7,IP8}, DestPort, Data2, State2};
?SOCKS5_ATYP_DOM ->
{<<DomLen:8/big, Data1/binary>>, State2} = recv_decode(1, Data, State1),
DPLen = DomLen+2,
{<<Data2:DPLen/binary, Data3/binary>>, State3} = recv_decode(DomLen+2, Data1, State2),
<<Domain:DomLen/binary, DestPort:16/big>> = Data2,
{[DomLen,Data2], binary_to_list(Domain), DestPort, Data3, State3};
_ ->
lager:error("error_address_type ~p", [AddrType]),
exit({error_address_type, AddrType})
end,
case {?IS_OTA(AddrType), NewState#state.ota} of
{true, _} ->
{<<Hmac:?HMAC_LEN/binary, Rest2/binary>>, NewState2} = recv_decode(?HMAC_LEN, Rest, NewState),
#cipher_info{key=Key} = NewState2#state.cipher_info,
case shadowsocks_crypt:hmac([NewState2#state.ota_iv, Key], [AddrType, IPPort]) of
Hmac ->
{Addr, Port, Rest2, NewState2#state{ota=true}};
_ ->
throw({error, ota_bad_hmac})
end;
{_, true} ->
throw({error, missing_ota});
{false, false} ->
{Addr, Port, Rest, NewState#state{ota=false}}
end.
%% recv and decode data until got intput length
recv_decode(Len, Data, State) when byte_size(Data) >= Len ->
{Data, State};
recv_decode(Len, Data, State = #state{csocket=Socket, cipher_info=CipherInfo}) ->
{ok, Data1} = gen_tcp:recv(Socket, 0, ?RECV_TIMOUT),
{CipherInfo1, Data2} = shadowsocks_crypt:decode(CipherInfo, Data1),
Data3 = <<Data/binary, Data2/binary>>,
recv_decode(Len, Data3, State#state{cipher_info=CipherInfo1}).
recv request
recv_socks5(Socket) ->
%% ------ handshark --------------------------
exactly version otherwise boom ! ! !
<<?SOCKS5_VER:8, NMethods:8/big>> = exactly_recv(Socket, 2),
%% don't care methods
_ = exactly_recv(Socket, NMethods),
%% response ok
ok = gen_tcp:send(Socket, <<?SOCKS5_VER:8, 0>>),
------ -------------------------
only support connect
<<?SOCKS5_VER:8, ?SOCKS5_REQ_CONNECT:8, 0, Atyp:8/big>> = exactly_recv(Socket, 4),
Ret = case Atyp of
?SOCKS5_ATYP_V4 ->
exactly_recv(Socket, 6);
?SOCKS5_ATYP_V6 ->
exactly_recv(Socket, 18);
?SOCKS5_ATYP_DOM ->
<<DomLen:8/big>> = exactly_recv(Socket, 1),
[DomLen,exactly_recv(Socket, DomLen+2)]
end,
ok = gen_tcp:send(Socket, <<?SOCKS5_VER, ?SOCKS5_REP_OK, 0, ?SOCKS5_ATYP_V4, 0:32,0:16>>),
{Atyp,Ret}.
exactly_recv(Socket, Size) ->
{ok, Ret} = gen_tcp:recv(Socket, Size, ?RECV_TIMOUT),
Ret.
%% try to send package
return 1 if success else return 0
try_send(Socket, Data) ->
try erlang:port_command(Socket, Data) of
_ -> 1
catch
error:_E -> 0
end.
| null | https://raw.githubusercontent.com/HJianBo/sserl/9e42930caf8bfe90ae9ed2edac2e0672f7e5e55e/apps/sserl/src/sserl_conn.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
gen_server callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
socket send reply
socket closed
report flow
first send
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
----------------------------------------------------------------------------------------------------
Data encoding / decoding
-----------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------
recv the iv data
recv and decode target addr and port
recv and decode data until got intput length
------ handshark --------------------------
don't care methods
response ok
try to send package | @author >
( C ) 2016 ,
Created : 15 May 2016 by >
-module(sserl_conn).
-behaviour(gen_server).
-export([start_link/2, init/2]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("shadowsocks.hrl").
-include("sserl.hrl").
-define(SERVER, ?MODULE).
-define(RECV_TIMOUT, 180000).
-define(REPORT_INTERVAL, 1000).
1 MB
-define(TCP_OPTS, [binary, {packet, raw}, {active, once},{nodelay, true}]).
-record(state, {
全局的唯一标示 , 方便存入数据库
csocket,
ssocket,
source = undefined,
target = undefined,
ota,
port,
down = 0,
up = 0,
sending = 0,
ota_data = <<>>,
ota_len = 2,
ota_id = 0,
ota_iv = <<>>,
type = server,
cipher_info,
c2s_handler=undefined,
s2c_handler=undefined
}).
@spec start_link(Socket , Info ) - > { ok , Pid } | ignore | { error , Error }
start_link(Socket, Info) ->
proc_lib:start_link(?MODULE, init, [Socket, Info]).
init(Socket, {Port, Server, OTA, Type, {Method,Password}}) ->
proc_lib:init_ack({ok, self()}),
wait_socket(Socket),
Cipher = shadowsocks_crypt:init_cipher_info(Method, Password),
{ok, Source} = inet:peername(Socket),
State = #state{conn_id=sserl_utils:gen_randnum(),
csocket=Socket, ssocket=undefined,
ota=OTA, port=Port, type=Type,
source = Source,
target = Server,
cipher_info=Cipher},
init_proto(State).
init_proto(State=#state{type=server,csocket=CSocket}) ->
State1 = recv_ivec(State),
{Addr, Port, Data, State2} = recv_target(State1),
gen_event:notify(?STAT_EVENT, {conn, {connect, self(), State#state.source, {Addr, Port}}}),
case gen_tcp:connect(Addr, Port, ?TCP_OPTS) of
{ok, SSocket} ->
self() ! {send, Data},
inet:setopts(CSocket, [{active, once}]),
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
gen_server:enter_loop(?MODULE, [], init_handler(State2#state{ssocket=SSocket,target={Addr,Port}}));
{error, Reason} ->
lager:error("conn init failed, state: ~p, reason: ~p~n", [State, Reason]),
exit(Reason)
end;
init_proto(State=#state{type=client, csocket=CSocket, target={Addr,Port},ota=OTA,cipher_info=Cipher}) ->
{Atype, Data} = recv_socks5(CSocket),
case gen_tcp:connect(Addr, Port, ?TCP_OPTS) of
{ok, SSocket} ->
{NewCipher, NewData} =
case OTA of
true ->
Hmac = shadowsocks_crypt:hmac([Cipher#cipher_info.encode_iv, Cipher#cipher_info.key],
[Atype bor ?OTA_FLAG, Data]),
shadowsocks_crypt:encode(Cipher, [Atype bor ?OTA_FLAG, Data, Hmac]);
false ->
shadowsocks_crypt:encode(Cipher, [Atype, Data])
end,
ok = gen_tcp:send(SSocket, NewData),
inet:setopts(CSocket, [{active, once}]),
State1 = State#state{ssocket = SSocket, cipher_info=NewCipher, ota_iv=Cipher#cipher_info.encode_iv},
gen_server:enter_loop(?MODULE, [], init_handler(State1));
{error, Reason} ->
exit(Reason)
end.
init_handler(State=#state{type=client, ota=true}) ->
State#state{c2s_handler=fun handle_client_ota_c2s/2,
s2c_handler=fun handle_client_s2c/2};
init_handler(State=#state{type=client, ota=false}) ->
State#state{c2s_handler=fun handle_client_c2s/2,
s2c_handler=fun handle_client_s2c/2};
init_handler(State=#state{type=server, ota=true}) ->
State#state{c2s_handler=fun handle_server_ota_c2s/2,
s2c_handler=fun handle_server_s2c/2};
init_handler(State=#state{type=server, ota=false}) ->
State#state{c2s_handler=fun handle_server_c2s/2,
s2c_handler=fun handle_server_s2c/2}.
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init([]) ->
{ok, #state{}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info({tcp, CSocket, Data}, State=#state{csocket=CSocket, c2s_handler=Handler}) ->
inet:setopts(CSocket, [{active, once}]),
Handler(Data, State);
handle_info({tcp, SSocket, Data}, State=#state{ssocket=SSocket, s2c_handler=Handler}) ->
inet:setopts(SSocket, [{active, once}]),
Handler(Data, State);
handle_info({inet_reply, _Socket, _Error}, State = #state{csocket=undefined,sending=1}) ->
{stop, normal, State};
handle_info({inet_reply, _Socket, _Error}, State = #state{ssocket=undefined, sending=1}) ->
{stop, normal, State};
handle_info({inet_reply, _, _}, State = #state{sending=N}) ->
{noreply, State#state{sending=N-1}};
handle_info({tcp_closed, _Socket}, State = #state{sending=0}) ->
{stop, normal, State};
handle_info({tcp_closed, CSocket}, State = #state{csocket=CSocket}) ->
{noreply, State#state{csocket=undefined}};
handle_info({tcp_closed, SSocket}, State = #state{ssocket=SSocket}) ->
{noreply, State#state{ssocket=undefined}};
handle_info(report_flow, State = #state{conn_id=ConnId, port=Port, source=Source, target=Target,
down=Down, up=Up}) when Down + Up >= ?REPORT_MIN ->
Traffic = #traffic{id=ConnId, port=Port, source=Source, target=Target, down=Down, up=Up, time=sserl_utils:timestamp()},
gen_event:notify(?TRAFFIC_EVENT, {sending, Traffic}),
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
{noreply, State#state{down=0, up=0}};
handle_info(report_flow, State) ->
erlang:send_after(?REPORT_INTERVAL, self(), report_flow),
{noreply, State};
handle_info({send, Data}, State=#state{type=server,ota=false,ssocket=SSocket, up=Flow, sending=S}) ->
S1 = try_send(SSocket, Data),
{noreply, State#state{sending=S+S1, up=Flow+size(Data)}};
handle_info({send, Data}, State=#state{type=server,ota=true, ota_data=Rest}) ->
handle_ota(State#state{ota_data= <<Rest/binary, Data/binary>>});
handle_info(_Info, State) ->
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State = #state{conn_id=ConnId, port=Port, source=Source, target=Target,
down=Down, up=Up}) ->
Traffic = #traffic{id=ConnId, port=Port, source=Source, target=Target, down=Down, up=Up, time=sserl_utils:timestamp()},
gen_event:notify(?TRAFFIC_EVENT, {complete, Traffic}),
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
handle_client_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
up=Flow, sending=S}) ->
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, Data),
S1 = try_send(SSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1}}.
handle_client_ota_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
ota_iv=Iv, ota_id=Id, up=Flow, sending=S}) ->
Hmac = shadowsocks_crypt:hmac([Iv, <<Id:32/big>>], Data),
Len = byte_size(Data),
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, [<<Len:16/big>>, Hmac, Data]),
S1 = try_send(SSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1, ota_id=Id+1}}.
handle_server_c2s(Data, State=#state{ssocket=SSocket, cipher_info=CipherInfo,
up=Flow, sending=S}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
S1 = try_send(SSocket, DecData),
{noreply, State#state{cipher_info=CipherInfo1, up=Flow+size(Data), sending=S+S1}}.
handle_server_ota_c2s(Data, State=#state{cipher_info=CipherInfo,ota_data=Rest}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
handle_ota(State#state{ota_data= <<Rest/binary, DecData/binary>>, cipher_info=CipherInfo1}).
handle_client_s2c(Data, State=#state{csocket=CSocket, cipher_info=CipherInfo,
down=Flow, sending=S}) ->
{CipherInfo1, DecData} = shadowsocks_crypt:decode(CipherInfo, Data),
S1 = try_send(CSocket, DecData),
{noreply, State#state{cipher_info=CipherInfo1, down=Flow+size(Data), sending=S+S1}}.
handle_server_s2c(Data, State=#state{csocket=CSocket, cipher_info=CipherInfo,
down=Flow, sending=S}) ->
{CipherInfo1, EncData} = shadowsocks_crypt:encode(CipherInfo, Data),
S1 = try_send(CSocket, EncData),
{noreply, State#state{cipher_info=CipherInfo1, down=Flow+size(Data), sending=S+S1}}.
handle ota frame
handle_ota(State = #state{ota_data=Data, ota_len=2}) when byte_size(Data) >= 2 ->
<<DataLen:16/big, _/binary>> = Data,
handle_ota(State#state{ota_len=DataLen+?HMAC_LEN+2});
handle_ota(State = #state{ota_iv=Iv,ota_data=Data, ota_len=Len, ota_id=Id,
ssocket=SSocket, up=Flow, sending=S}) when byte_size(Data) >= Len ->
DataLen = Len-?HMAC_LEN - 2,
<<_:16/big, Hmac:?HMAC_LEN/binary, FrameData:DataLen/binary, Rest/binary>> = Data,
case shadowsocks_crypt:hmac([Iv, <<Id:32/big>>], FrameData) of
Hmac ->
S1 = try_send(SSocket, FrameData),
handle_ota(State#state{up=Flow+size(FrameData), sending=S+S1, ota_data=Rest,ota_len=2,ota_id=Id+1});
_ ->
{stop, {error, bad_ota_hmac}, State}
end;
handle_ota(State) ->
{noreply, State}.
wait_socket(Socket) ->
receive
{shoot, Socket} ->
ok;
_ ->
wait_socket(Socket)
end.
recv_ivec(State = #state{csocket=Socket,
cipher_info=#cipher_info{method=Method,key=Key}=CipherInfo}) ->
{_, IvLen} = shadowsocks_crypt:key_iv_len(Method),
{ok, IvData} = gen_tcp:recv(Socket, IvLen, ?RECV_TIMOUT),
StreamState = shadowsocks_crypt:stream_init(Method, Key, IvData),
State#state{
ota_iv = IvData,
cipher_info=CipherInfo#cipher_info{
decode_iv=IvData, stream_dec_state=StreamState
}
}.
recv_target(State) ->
{<<AddrType:8/big, Data/binary>>, State1} = recv_decode(1, <<>>, State),
{IPPort, Addr, Port, Rest, NewState} =
case ?GET_ATYP(AddrType) of
?SOCKS5_ATYP_V4 ->
{<<Data1:6/binary, Data2/binary>>, State2} = recv_decode(6, Data, State1),
<<IP1:8/big,IP2:8/big,IP3:8/big,IP4:8/big, DestPort:16/big>> = Data1,
{Data1, {IP1,IP2,IP3,IP4}, DestPort, Data2, State2};
?SOCKS5_ATYP_V6 ->
{<<Data1:18/binary, Data2/binary>>, State2} = recv_decode(18, Data, State1),
<<IP1:16/big,IP2:16/big,IP3:16/big,IP4:16/big,
IP5:16/big,IP6:16/big,IP7:16/big,IP8:16/big,
DestPort:16/big>> = Data1,
{Data1, {IP1,IP2,IP3,IP4,IP5,IP6,IP7,IP8}, DestPort, Data2, State2};
?SOCKS5_ATYP_DOM ->
{<<DomLen:8/big, Data1/binary>>, State2} = recv_decode(1, Data, State1),
DPLen = DomLen+2,
{<<Data2:DPLen/binary, Data3/binary>>, State3} = recv_decode(DomLen+2, Data1, State2),
<<Domain:DomLen/binary, DestPort:16/big>> = Data2,
{[DomLen,Data2], binary_to_list(Domain), DestPort, Data3, State3};
_ ->
lager:error("error_address_type ~p", [AddrType]),
exit({error_address_type, AddrType})
end,
case {?IS_OTA(AddrType), NewState#state.ota} of
{true, _} ->
{<<Hmac:?HMAC_LEN/binary, Rest2/binary>>, NewState2} = recv_decode(?HMAC_LEN, Rest, NewState),
#cipher_info{key=Key} = NewState2#state.cipher_info,
case shadowsocks_crypt:hmac([NewState2#state.ota_iv, Key], [AddrType, IPPort]) of
Hmac ->
{Addr, Port, Rest2, NewState2#state{ota=true}};
_ ->
throw({error, ota_bad_hmac})
end;
{_, true} ->
throw({error, missing_ota});
{false, false} ->
{Addr, Port, Rest, NewState#state{ota=false}}
end.
recv_decode(Len, Data, State) when byte_size(Data) >= Len ->
{Data, State};
recv_decode(Len, Data, State = #state{csocket=Socket, cipher_info=CipherInfo}) ->
{ok, Data1} = gen_tcp:recv(Socket, 0, ?RECV_TIMOUT),
{CipherInfo1, Data2} = shadowsocks_crypt:decode(CipherInfo, Data1),
Data3 = <<Data/binary, Data2/binary>>,
recv_decode(Len, Data3, State#state{cipher_info=CipherInfo1}).
recv request
recv_socks5(Socket) ->
exactly version otherwise boom ! ! !
<<?SOCKS5_VER:8, NMethods:8/big>> = exactly_recv(Socket, 2),
_ = exactly_recv(Socket, NMethods),
ok = gen_tcp:send(Socket, <<?SOCKS5_VER:8, 0>>),
------ -------------------------
only support connect
<<?SOCKS5_VER:8, ?SOCKS5_REQ_CONNECT:8, 0, Atyp:8/big>> = exactly_recv(Socket, 4),
Ret = case Atyp of
?SOCKS5_ATYP_V4 ->
exactly_recv(Socket, 6);
?SOCKS5_ATYP_V6 ->
exactly_recv(Socket, 18);
?SOCKS5_ATYP_DOM ->
<<DomLen:8/big>> = exactly_recv(Socket, 1),
[DomLen,exactly_recv(Socket, DomLen+2)]
end,
ok = gen_tcp:send(Socket, <<?SOCKS5_VER, ?SOCKS5_REP_OK, 0, ?SOCKS5_ATYP_V4, 0:32,0:16>>),
{Atyp,Ret}.
exactly_recv(Socket, Size) ->
{ok, Ret} = gen_tcp:recv(Socket, Size, ?RECV_TIMOUT),
Ret.
return 1 if success else return 0
try_send(Socket, Data) ->
try erlang:port_command(Socket, Data) of
_ -> 1
catch
error:_E -> 0
end.
|
a6c89c6f3a3137d7504786befcf14f7ecca900f7a293aaac05a4b6e811802622 | sionescu/bordeaux-threads | api-threads.lisp | -*- Mode : LISP ; Syntax : ANSI - Common - lisp ; Base : 10 ; Package : BORDEAUX - THREADS-2 -*-
The above modeline is required for . Do not change .
(in-package :bordeaux-threads-2)
(defclass thread ()
((name :initarg :name :reader thread-name)
(native-thread :initarg :native-thread
:reader thread-native-thread)
(%lock :initform (make-lock))
;; Used for implementing condition variables in
;; impl-condition-variables-semaphores.lisp.
#+ccl
(%semaphore :initform (%make-semaphore nil 0)
:reader %thread-semaphore)
(%return-values :initform nil :reader thread-return-values)
(%exit-condition :initform nil :reader thread-exit-condition)))
(defmethod print-object ((thread thread) stream)
(print-unreadable-object (thread stream :type t :identity t)
(format stream "~S" (thread-name thread))))
(define-global-var* .known-threads-lock.
(make-lock :name "known-threads-lock"))
(define-global-var* .known-threads.
(trivial-garbage:make-weak-hash-table #-genera :weakness #-genera :key))
(define-global-var* .thread-counter. -1)
(defun make-unknown-thread-name ()
(format nil "Unkown thread ~S"
(with-lock-held (.known-threads-lock.)
(incf .thread-counter.))))
(defun ensure-thread-wrapper (native-thread)
(with-lock-held (.known-threads-lock.)
(multiple-value-bind (thread presentp)
(gethash native-thread .known-threads.)
(if presentp
thread
(setf (gethash native-thread .known-threads.)
(make-instance 'thread
:name (%thread-name native-thread)
:native-thread native-thread))))))
(defun %get-thread-wrapper (native-thread)
(multiple-value-bind (thread presentp)
(with-lock-held (.known-threads-lock.)
(gethash native-thread .known-threads.))
(if presentp
thread
(bt-error "Thread wrapper is supposed to exist for ~S"
native-thread))))
(defun (setf thread-wrapper) (thread native-thread)
(with-lock-held (.known-threads-lock.)
(setf (gethash native-thread .known-threads.) thread)))
(defun remove-thread-wrapper (native-thread)
(with-lock-held (.known-threads-lock.)
(remhash native-thread .known-threads.)))
;; Forms are evaluated in the new thread or in the calling thread?
(defvar *default-special-bindings* nil
"This variable holds an alist associating special variable symbols
to forms to evaluate. Special variables named in this list will
be locally bound in the new thread before it begins executing user code.
This variable may be rebound around calls to MAKE-THREAD to
add/alter default bindings. The effect of mutating this list is
undefined, but earlier forms take precedence over later forms for
the same symbol, so defaults may be overridden by consing to the
head of the list.")
(macrolet
((defbindings (name docstring &body initforms)
(check-type docstring string)
`(alexandria:define-constant ,name
(list
,@(loop for (special form) in initforms
collect `(cons ',special ',form)))
:test #'equal
:documentation ,docstring)))
(defbindings +standard-io-bindings+
"Standard bindings of printer/reader control variables as per
CL:WITH-STANDARD-IO-SYNTAX. Forms are evaluated in the calling thread."
(*package* (find-package :common-lisp-user))
(*print-array* t)
(*print-base* 10)
(*print-case* :upcase)
(*print-circle* nil)
(*print-escape* t)
(*print-gensym* t)
(*print-length* nil)
(*print-level* nil)
(*print-lines* nil)
(*print-miser-width* nil)
does n't yet implement COPY - PPRINT - DISPATCH
;; (Calling it signals an error)
#-genera
(*print-pprint-dispatch* (copy-pprint-dispatch nil))
(*print-pretty* nil)
(*print-radix* nil)
(*print-readably* t)
(*print-right-margin* nil)
(*random-state* (make-random-state t))
(*read-base* 10)
(*read-default-float-format* 'double-float)
(*read-eval* nil)
(*read-suppress* nil)
(*readtable* (copy-readtable nil))))
(defvar *current-thread*)
(defun compute-special-bindings (bindings)
(remove-duplicates (append bindings +standard-io-bindings+)
:from-end t :key #'car))
(defun establish-dynamic-env (thread function special-bindings trap-conditions)
"Return a closure that binds the symbols in SPECIAL-BINDINGS and calls
FUNCTION."
(let* ((bindings (compute-special-bindings special-bindings))
(specials (mapcar #'car bindings))
(values (mapcar (lambda (f) (eval (cdr f))) bindings)))
(named-lambda %establish-dynamic-env-wrapper ()
(progv specials values
(with-slots (%lock %return-values %exit-condition #+genera native-thread)
thread
(flet ((record-condition (c)
(with-lock-held (%lock)
(setf %exit-condition c)))
(run-function ()
(let ((*current-thread* nil))
;; Wait until the thread creator has finished creating
;; the wrapper.
(with-lock-held (%lock)
(setf *current-thread* (%get-thread-wrapper (%current-thread))))
(let ((retval
(multiple-value-list (funcall function))))
(with-lock-held (%lock)
(setf %return-values retval))
retval))))
(unwind-protect
(if trap-conditions
(handler-case
(values-list (run-function))
(condition (c)
(record-condition c)))
(handler-bind
((condition #'record-condition))
(values-list (run-function))))
does n't support weak key hash tables . If we do n't remove
;; the native-thread object's entry from the hash table here, we'll
never be able to GC the native - thread after it terminates
#+genera (remove-thread-wrapper native-thread))))))))
;;;
;;; Thread Creation
;;;
(defun start-multiprocessing ()
"If the host implementation uses user-level threads, start the
scheduler and multiprocessing, otherwise do nothing.
It is safe to call repeatedly."
(when (fboundp '%start-multiprocessing)
(funcall '%start-multiprocessing))
(values))
(defun make-thread (function
&key
name
(initial-bindings *default-special-bindings*)
(trap-conditions t))
"Creates and returns a thread named NAME, which will call the
function FUNCTION with no arguments: when FUNCTION returns, the
thread terminates.
The interaction between threads and dynamic variables is in some
cases complex, and depends on whether the variable has only a global
binding (as established by e.g. DEFVAR/DEFPARAMETER/top-level SETQ)
or has been bound locally (e.g. with LET or LET*) in the calling
thread.
- Global bindings are shared between threads: the initial value of a
global variable in the new thread will be the same as in the
parent, and an assignment to such a variable in any thread will be
visible to all threads in which the global binding is visible.
- Local bindings, such as the ones introduced by INITIAL-BINDINGS,
are local to the thread they are introduced in, except that
- Local bindings in the the caller of MAKE-THREAD may or may not be
shared with the new thread that it creates: this is
implementation-defined. Portable code should not depend on
particular behaviour in this case, nor should it assign to such
variables without first rebinding them in the new thread."
(check-type function (and (not null) (or symbol function)))
(check-type name (or null string))
(let* ((name (or name (make-unknown-thread-name)))
(thread (make-instance 'thread :name name)))
(with-slots (native-thread %lock) thread
(with-lock-held (%lock)
(let ((%thread
(%make-thread (establish-dynamic-env
thread
function
initial-bindings
trap-conditions)
name)))
(setf native-thread %thread)
(setf (thread-wrapper %thread) thread))))
thread))
(defun current-thread ()
"Returns the thread object for the calling thread.
This is the same kind of object as would be returned
by MAKE-THREAD."
(cond
((boundp '*current-thread*)
(assert (threadp *current-thread*))
*current-thread*)
(t (ensure-thread-wrapper (%current-thread)))))
(defun threadp (object)
"Returns T if object is a thread, otherwise NIL."
(typep object 'thread))
(defmethod join-thread ((thread thread))
"Wait until THREAD terminates. If THREAD has already terminated,
return immediately. The return values of the thread function are
returned."
(with-slots (native-thread %lock %return-values %exit-condition)
thread
(when (eql native-thread (%current-thread))
(bt-error "Cannot join with the current thread"))
(%join-thread native-thread)
(multiple-value-bind (exit-condition retval)
(with-lock-held (%lock)
(values %exit-condition %return-values))
(if exit-condition
(error 'abnormal-exit :condition exit-condition)
(values-list retval)))))
(defun thread-yield ()
"Allows other threads to run. It may be necessary or desirable to
call this periodically in some implementations; others may schedule
threads automatically."
(%thread-yield)
(values))
;;;
;;; Introspection/debugging
;;;
(defun all-threads ()
"Returns a sequence of all of the threads."
(mapcar #'ensure-thread-wrapper (%all-threads)))
(defmethod interrupt-thread ((thread thread) function &rest args)
"Interrupt THREAD and cause it to evaluate FUNCTION
before continuing with the interrupted path of execution. This may
not be a good idea if THREAD is holding locks or doing anything
important."
(flet ((apply-function ()
(if args
(named-lambda %interrupt-thread-wrapper ()
(apply function args))
function)))
(declare (dynamic-extent #'apply-function))
(%interrupt-thread (thread-native-thread thread) (apply-function))
thread))
(defmethod signal-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call SIGNAL passing DATUM and ARGS."
(apply #'interrupt-thread thread #'signal (cons datum args)))
(defmethod warn-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call WARN passing DATUM and ARGS."
(apply #'interrupt-thread thread #'warn (cons datum args)))
(defmethod error-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call ERROR passing DATUM and ARGS."
(apply #'interrupt-thread thread #'error (cons datum args)))
(defmethod destroy-thread ((thread thread))
"Terminates the thread THREAD, which is an object
as returned by MAKE-THREAD. This should be used with caution: it is
implementation-defined whether the thread runs cleanup forms or
releases its locks first.
Destroying the calling thread is an error."
(with-slots (native-thread %lock %exit-condition)
thread
(when (eql native-thread (%current-thread))
(bt-error "Cannot destroy the current thread"))
(unless (thread-alive-p thread)
(bt-error "Cannot destroy thread because it already exited: ~S."
thread))
(%destroy-thread native-thread)
(with-lock-held (%lock)
(setf %exit-condition :terminated)))
thread)
(defmethod thread-alive-p ((thread thread))
"Returns true if THREAD is alive, that is, if it has not finished or
DESTROY-THREAD has not been called on it."
(%thread-alive-p (thread-native-thread thread)))
| null | https://raw.githubusercontent.com/sionescu/bordeaux-threads/fd1b0cffdb24ca630e0c0350e80571cdf26bf14c/apiv2/api-threads.lisp | lisp | Syntax : ANSI - Common - lisp ; Base : 10 ; Package : BORDEAUX - THREADS-2 -*-
Used for implementing condition variables in
impl-condition-variables-semaphores.lisp.
Forms are evaluated in the new thread or in the calling thread?
(Calling it signals an error)
Wait until the thread creator has finished creating
the wrapper.
the native-thread object's entry from the hash table here, we'll
Thread Creation
others may schedule
Introspection/debugging
| The above modeline is required for . Do not change .
(in-package :bordeaux-threads-2)
(defclass thread ()
((name :initarg :name :reader thread-name)
(native-thread :initarg :native-thread
:reader thread-native-thread)
(%lock :initform (make-lock))
#+ccl
(%semaphore :initform (%make-semaphore nil 0)
:reader %thread-semaphore)
(%return-values :initform nil :reader thread-return-values)
(%exit-condition :initform nil :reader thread-exit-condition)))
(defmethod print-object ((thread thread) stream)
(print-unreadable-object (thread stream :type t :identity t)
(format stream "~S" (thread-name thread))))
(define-global-var* .known-threads-lock.
(make-lock :name "known-threads-lock"))
(define-global-var* .known-threads.
(trivial-garbage:make-weak-hash-table #-genera :weakness #-genera :key))
(define-global-var* .thread-counter. -1)
(defun make-unknown-thread-name ()
(format nil "Unkown thread ~S"
(with-lock-held (.known-threads-lock.)
(incf .thread-counter.))))
(defun ensure-thread-wrapper (native-thread)
(with-lock-held (.known-threads-lock.)
(multiple-value-bind (thread presentp)
(gethash native-thread .known-threads.)
(if presentp
thread
(setf (gethash native-thread .known-threads.)
(make-instance 'thread
:name (%thread-name native-thread)
:native-thread native-thread))))))
(defun %get-thread-wrapper (native-thread)
(multiple-value-bind (thread presentp)
(with-lock-held (.known-threads-lock.)
(gethash native-thread .known-threads.))
(if presentp
thread
(bt-error "Thread wrapper is supposed to exist for ~S"
native-thread))))
(defun (setf thread-wrapper) (thread native-thread)
(with-lock-held (.known-threads-lock.)
(setf (gethash native-thread .known-threads.) thread)))
(defun remove-thread-wrapper (native-thread)
(with-lock-held (.known-threads-lock.)
(remhash native-thread .known-threads.)))
(defvar *default-special-bindings* nil
"This variable holds an alist associating special variable symbols
to forms to evaluate. Special variables named in this list will
be locally bound in the new thread before it begins executing user code.
This variable may be rebound around calls to MAKE-THREAD to
add/alter default bindings. The effect of mutating this list is
undefined, but earlier forms take precedence over later forms for
the same symbol, so defaults may be overridden by consing to the
head of the list.")
(macrolet
((defbindings (name docstring &body initforms)
(check-type docstring string)
`(alexandria:define-constant ,name
(list
,@(loop for (special form) in initforms
collect `(cons ',special ',form)))
:test #'equal
:documentation ,docstring)))
(defbindings +standard-io-bindings+
"Standard bindings of printer/reader control variables as per
CL:WITH-STANDARD-IO-SYNTAX. Forms are evaluated in the calling thread."
(*package* (find-package :common-lisp-user))
(*print-array* t)
(*print-base* 10)
(*print-case* :upcase)
(*print-circle* nil)
(*print-escape* t)
(*print-gensym* t)
(*print-length* nil)
(*print-level* nil)
(*print-lines* nil)
(*print-miser-width* nil)
does n't yet implement COPY - PPRINT - DISPATCH
#-genera
(*print-pprint-dispatch* (copy-pprint-dispatch nil))
(*print-pretty* nil)
(*print-radix* nil)
(*print-readably* t)
(*print-right-margin* nil)
(*random-state* (make-random-state t))
(*read-base* 10)
(*read-default-float-format* 'double-float)
(*read-eval* nil)
(*read-suppress* nil)
(*readtable* (copy-readtable nil))))
(defvar *current-thread*)
(defun compute-special-bindings (bindings)
(remove-duplicates (append bindings +standard-io-bindings+)
:from-end t :key #'car))
(defun establish-dynamic-env (thread function special-bindings trap-conditions)
"Return a closure that binds the symbols in SPECIAL-BINDINGS and calls
FUNCTION."
(let* ((bindings (compute-special-bindings special-bindings))
(specials (mapcar #'car bindings))
(values (mapcar (lambda (f) (eval (cdr f))) bindings)))
(named-lambda %establish-dynamic-env-wrapper ()
(progv specials values
(with-slots (%lock %return-values %exit-condition #+genera native-thread)
thread
(flet ((record-condition (c)
(with-lock-held (%lock)
(setf %exit-condition c)))
(run-function ()
(let ((*current-thread* nil))
(with-lock-held (%lock)
(setf *current-thread* (%get-thread-wrapper (%current-thread))))
(let ((retval
(multiple-value-list (funcall function))))
(with-lock-held (%lock)
(setf %return-values retval))
retval))))
(unwind-protect
(if trap-conditions
(handler-case
(values-list (run-function))
(condition (c)
(record-condition c)))
(handler-bind
((condition #'record-condition))
(values-list (run-function))))
does n't support weak key hash tables . If we do n't remove
never be able to GC the native - thread after it terminates
#+genera (remove-thread-wrapper native-thread))))))))
(defun start-multiprocessing ()
"If the host implementation uses user-level threads, start the
scheduler and multiprocessing, otherwise do nothing.
It is safe to call repeatedly."
(when (fboundp '%start-multiprocessing)
(funcall '%start-multiprocessing))
(values))
(defun make-thread (function
&key
name
(initial-bindings *default-special-bindings*)
(trap-conditions t))
"Creates and returns a thread named NAME, which will call the
function FUNCTION with no arguments: when FUNCTION returns, the
thread terminates.
The interaction between threads and dynamic variables is in some
cases complex, and depends on whether the variable has only a global
binding (as established by e.g. DEFVAR/DEFPARAMETER/top-level SETQ)
or has been bound locally (e.g. with LET or LET*) in the calling
thread.
- Global bindings are shared between threads: the initial value of a
global variable in the new thread will be the same as in the
parent, and an assignment to such a variable in any thread will be
visible to all threads in which the global binding is visible.
- Local bindings, such as the ones introduced by INITIAL-BINDINGS,
are local to the thread they are introduced in, except that
- Local bindings in the the caller of MAKE-THREAD may or may not be
shared with the new thread that it creates: this is
implementation-defined. Portable code should not depend on
particular behaviour in this case, nor should it assign to such
variables without first rebinding them in the new thread."
(check-type function (and (not null) (or symbol function)))
(check-type name (or null string))
(let* ((name (or name (make-unknown-thread-name)))
(thread (make-instance 'thread :name name)))
(with-slots (native-thread %lock) thread
(with-lock-held (%lock)
(let ((%thread
(%make-thread (establish-dynamic-env
thread
function
initial-bindings
trap-conditions)
name)))
(setf native-thread %thread)
(setf (thread-wrapper %thread) thread))))
thread))
(defun current-thread ()
"Returns the thread object for the calling thread.
This is the same kind of object as would be returned
by MAKE-THREAD."
(cond
((boundp '*current-thread*)
(assert (threadp *current-thread*))
*current-thread*)
(t (ensure-thread-wrapper (%current-thread)))))
(defun threadp (object)
"Returns T if object is a thread, otherwise NIL."
(typep object 'thread))
(defmethod join-thread ((thread thread))
"Wait until THREAD terminates. If THREAD has already terminated,
return immediately. The return values of the thread function are
returned."
(with-slots (native-thread %lock %return-values %exit-condition)
thread
(when (eql native-thread (%current-thread))
(bt-error "Cannot join with the current thread"))
(%join-thread native-thread)
(multiple-value-bind (exit-condition retval)
(with-lock-held (%lock)
(values %exit-condition %return-values))
(if exit-condition
(error 'abnormal-exit :condition exit-condition)
(values-list retval)))))
(defun thread-yield ()
"Allows other threads to run. It may be necessary or desirable to
threads automatically."
(%thread-yield)
(values))
(defun all-threads ()
"Returns a sequence of all of the threads."
(mapcar #'ensure-thread-wrapper (%all-threads)))
(defmethod interrupt-thread ((thread thread) function &rest args)
"Interrupt THREAD and cause it to evaluate FUNCTION
before continuing with the interrupted path of execution. This may
not be a good idea if THREAD is holding locks or doing anything
important."
(flet ((apply-function ()
(if args
(named-lambda %interrupt-thread-wrapper ()
(apply function args))
function)))
(declare (dynamic-extent #'apply-function))
(%interrupt-thread (thread-native-thread thread) (apply-function))
thread))
(defmethod signal-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call SIGNAL passing DATUM and ARGS."
(apply #'interrupt-thread thread #'signal (cons datum args)))
(defmethod warn-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call WARN passing DATUM and ARGS."
(apply #'interrupt-thread thread #'warn (cons datum args)))
(defmethod error-in-thread ((thread thread) datum &rest args)
"Interrupt THREAD and call ERROR passing DATUM and ARGS."
(apply #'interrupt-thread thread #'error (cons datum args)))
(defmethod destroy-thread ((thread thread))
"Terminates the thread THREAD, which is an object
as returned by MAKE-THREAD. This should be used with caution: it is
implementation-defined whether the thread runs cleanup forms or
releases its locks first.
Destroying the calling thread is an error."
(with-slots (native-thread %lock %exit-condition)
thread
(when (eql native-thread (%current-thread))
(bt-error "Cannot destroy the current thread"))
(unless (thread-alive-p thread)
(bt-error "Cannot destroy thread because it already exited: ~S."
thread))
(%destroy-thread native-thread)
(with-lock-held (%lock)
(setf %exit-condition :terminated)))
thread)
(defmethod thread-alive-p ((thread thread))
"Returns true if THREAD is alive, that is, if it has not finished or
DESTROY-THREAD has not been called on it."
(%thread-alive-p (thread-native-thread thread)))
|
9be46c44fc4f46510798f3be464a1874a32d568a820c49607baa67a1fe9244f3 | coleslaw-org/coleslaw | isso.lisp | (defpackage :coleslaw-isso
(:use :cl)
(:export #:enable)
(:import-from :coleslaw #:add-injection
#:post))
(in-package :coleslaw-isso)
(defvar *isso-header*
"<div class=\"comments\">
<section id=\"isso-thread\"></section>
<script data-isso=\"~a/\"
src=\"~a/js/embed.min.js\"></script>
</div>")
(defun enable (&key isso-url)
(flet ((inject-p (x)
(when (typep x 'post)
(format nil *isso-header* isso-url isso-url))))
(add-injection #'inject-p :body)))
| null | https://raw.githubusercontent.com/coleslaw-org/coleslaw/0b9f027a36ea00ca2e4b6f8d9fd7a135127cc2da/plugins/isso.lisp | lisp | (defpackage :coleslaw-isso
(:use :cl)
(:export #:enable)
(:import-from :coleslaw #:add-injection
#:post))
(in-package :coleslaw-isso)
(defvar *isso-header*
"<div class=\"comments\">
<section id=\"isso-thread\"></section>
<script data-isso=\"~a/\"
src=\"~a/js/embed.min.js\"></script>
</div>")
(defun enable (&key isso-url)
(flet ((inject-p (x)
(when (typep x 'post)
(format nil *isso-header* isso-url isso-url))))
(add-injection #'inject-p :body)))
|
|
a46af95cd30f1733eba1d3b3ed4b7856215cf8af5d573cc47330544c4eadd1ed | green-labs/gosura | auth_test.clj | (ns gosura.auth-test
(:require [clojure.test :refer [deftest is run-tests testing]]
[failjure.core :as f]
[gosura.auth :as gosura-auth]
[gosura.edn :refer [read-config]]))
(deftest ->auth-result-test
(let [auth0 (fn [ctx]
(boolean (get-in ctx [:identity :id])))
auth2 (fn [ctx auth-column-name]
(when-let [id (get-in ctx [:identity :id])]
{auth-column-name id}))
unauthorized (f/fail "Unauthorized")]
(testing "auth가 없을 때, 아무일도 일어나지 않는다"
(let [auth nil
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result nil]
(is (= result expected-result))))
(testing "auth의 시그니처가 fn 일 때, auth0 인증을 통과한 경우 추가 필터는 없다"
(let [auth auth0
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result nil]
(is (= result expected-result))))
(testing "auth의 시그니처가 fn 일 때, auth0 인증에 실패하면 Unauthorized 메시지를 반환한다."
(let [auth auth0
ctx {}
result (gosura-auth/->auth-result auth ctx)]
(is (= result unauthorized))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth0은 에러를 내뱉는다"
(let [auth [auth0 :user-id]
ctx {:identity {:id "1"}}]
(is (thrown? clojure.lang.ArityException (gosura-auth/->auth-result auth ctx)))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth2 인증에 실패하면 Unauthorized 메세지를 반환한다"
(let [auth [auth2 :user-id]
ctx {}
result (gosura-auth/->auth-result auth ctx)]
(is (= result unauthorized))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth2 인증을 통과한 후 추가 필터는 {keyword 인증-id-값}을 잘 반환한다"
(let [auth [auth2 :user-id]
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result {:user-id "1"}]
(is (= result expected-result))))))
(defn get-country-code
[ctx]
(get-in ctx [:identity :country-code]))
(deftest filter-opts-test
(let [filters (-> "test/resources/gosura/sample_resolver_configs.edn"
read-config
:filters)]
(testing "fiilters.country 설정에 signiture가 싱글 파라미터(qualified symbol)일 때, ctx 내에 인증 정보를 잘 가지고 온다"
(let [ctx {:identity {:country-code "JP"}}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code "JP"
:language-code nil}]
(is (= result expected-result))))
(testing "filters.language 설정에 signiture가 coll일 때, ctx 내에 인증 정보를 잘 가지고 온다"
(let [ctx {:identity {:language-code "ja"}}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code nil
:language-code "ja"}]
(is (= result expected-result))))
(testing "인증 정보에 국가 정보가 없을 때 빈 정보를 가지고 있다"
(let [ctx {}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code nil
:language-code nil}]
(is (= result expected-result))))))
(comment
(run-tests))
| null | https://raw.githubusercontent.com/green-labs/gosura/ca92389325557cbe749ee42a2986c0d528de7458/test/gosura/auth_test.clj | clojure | (ns gosura.auth-test
(:require [clojure.test :refer [deftest is run-tests testing]]
[failjure.core :as f]
[gosura.auth :as gosura-auth]
[gosura.edn :refer [read-config]]))
(deftest ->auth-result-test
(let [auth0 (fn [ctx]
(boolean (get-in ctx [:identity :id])))
auth2 (fn [ctx auth-column-name]
(when-let [id (get-in ctx [:identity :id])]
{auth-column-name id}))
unauthorized (f/fail "Unauthorized")]
(testing "auth가 없을 때, 아무일도 일어나지 않는다"
(let [auth nil
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result nil]
(is (= result expected-result))))
(testing "auth의 시그니처가 fn 일 때, auth0 인증을 통과한 경우 추가 필터는 없다"
(let [auth auth0
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result nil]
(is (= result expected-result))))
(testing "auth의 시그니처가 fn 일 때, auth0 인증에 실패하면 Unauthorized 메시지를 반환한다."
(let [auth auth0
ctx {}
result (gosura-auth/->auth-result auth ctx)]
(is (= result unauthorized))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth0은 에러를 내뱉는다"
(let [auth [auth0 :user-id]
ctx {:identity {:id "1"}}]
(is (thrown? clojure.lang.ArityException (gosura-auth/->auth-result auth ctx)))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth2 인증에 실패하면 Unauthorized 메세지를 반환한다"
(let [auth [auth2 :user-id]
ctx {}
result (gosura-auth/->auth-result auth ctx)]
(is (= result unauthorized))))
(testing "auth의 시그니처가 (fn keyword) 일 때, auth2 인증을 통과한 후 추가 필터는 {keyword 인증-id-값}을 잘 반환한다"
(let [auth [auth2 :user-id]
ctx {:identity {:id "1"}}
result (gosura-auth/->auth-result auth ctx)
expected-result {:user-id "1"}]
(is (= result expected-result))))))
(defn get-country-code
[ctx]
(get-in ctx [:identity :country-code]))
(deftest filter-opts-test
(let [filters (-> "test/resources/gosura/sample_resolver_configs.edn"
read-config
:filters)]
(testing "fiilters.country 설정에 signiture가 싱글 파라미터(qualified symbol)일 때, ctx 내에 인증 정보를 잘 가지고 온다"
(let [ctx {:identity {:country-code "JP"}}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code "JP"
:language-code nil}]
(is (= result expected-result))))
(testing "filters.language 설정에 signiture가 coll일 때, ctx 내에 인증 정보를 잘 가지고 온다"
(let [ctx {:identity {:language-code "ja"}}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code nil
:language-code "ja"}]
(is (= result expected-result))))
(testing "인증 정보에 국가 정보가 없을 때 빈 정보를 가지고 있다"
(let [ctx {}
result (gosura-auth/config-filter-opts filters ctx)
expected-result {:country-code nil
:language-code nil}]
(is (= result expected-result))))))
(comment
(run-tests))
|
|
a2947b3a85b6b3f5b2c6e7ee46f6e5507de663a98658e31899fd19f1525708c1 | dselsam/arc | Partition.hs | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
module Util.Partition where
import Data.UnionFind.ST
| null | https://raw.githubusercontent.com/dselsam/arc/7e68a7ed9508bf26926b0f68336db05505f4e765/src/Util/Partition.hs | haskell | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
module Util.Partition where
import Data.UnionFind.ST
|
|
e1e94e99c96004d6c4fa03574d243ff68cc6e11ef2edb06a71fbdddacee7a187 | nshepperd/funn | Param.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module AI.Funn.CL.Batched.Param (
Param(..),
reshape,
split,
appendD
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Foldable
import Data.Monoid
import Data.Proxy
import Data.Traversable
import GHC.TypeLits
import System.IO.Unsafe
import AI.Funn.CL.Blob (Blob, BlobT(Blob))
import qualified AI.Funn.CL.Blob as Blob
import AI.Funn.CL.MonadCL
import AI.Funn.CL.Tensor (Tensor)
import qualified AI.Funn.CL.Tensor as T
import qualified AI.Funn.CL.TensorLazy as TL
import qualified AI.Funn.CL.LazyMem as LM
import AI.Funn.Diff.Diff (Derivable(..))
import AI.Funn.Space
newtype Param (ω :: Nat) (n :: Nat) = Param { getParam :: Tensor '[n] }
instance Derivable (Param ω n) where
type D (Param ω n) = TL.Tensor '[ω, n]
instance (MonadIO m, KnownNat n) => Zero m (Param ω n) where
zero = Param <$> zero
instance (MonadIO m, KnownNat n) => Semi m (Param ω n) where
plus (Param x) (Param y) = Param <$> plus x y
instance (MonadIO m, KnownNat n) => Additive m (Param ω n) where
plusm xs = Param <$> plusm (map getParam xs)
instance (MonadIO m, KnownNat n) => Scale m Double (Param ω n) where
scale x (Param xs) = Param <$> scale x xs
instance (MonadIO m, KnownNat n) => VectorSpace m Double (Param ω n) where
{}
instance (MonadIO m, KnownNat n) => Inner m Double (Param ω n) where
inner (Param x) (Param y) = inner x y
instance (MonadIO m, KnownNat n) => Finite m Double (Param ω n) where
getBasis (Param x) = getBasis x
O(1 )
reshape :: (Prod ds ~ n) => Param ω n -> Tensor ds
reshape (Param xs) = T.reshape xs
O(1 )
split :: (KnownNat a, KnownNat b) => Param ω (a+b) -> (Param ω a, Param ω b)
split (Param xs) = case T.split xs of
(a, b) -> (Param a, Param b)
-- O(ω)
appendD :: forall ω a b. (KnownDimsF [ω, a, b]) => TL.Tensor [ω, a] -> TL.Tensor [ω, b] -> TL.Tensor [ω, a+b]
appendD = TL.appendW
| null | https://raw.githubusercontent.com/nshepperd/funn/23138fc44cfda90afd49927c39b122ed78945293/AI/Funn/CL/Batched/Param.hs | haskell | O(ω) | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module AI.Funn.CL.Batched.Param (
Param(..),
reshape,
split,
appendD
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Foldable
import Data.Monoid
import Data.Proxy
import Data.Traversable
import GHC.TypeLits
import System.IO.Unsafe
import AI.Funn.CL.Blob (Blob, BlobT(Blob))
import qualified AI.Funn.CL.Blob as Blob
import AI.Funn.CL.MonadCL
import AI.Funn.CL.Tensor (Tensor)
import qualified AI.Funn.CL.Tensor as T
import qualified AI.Funn.CL.TensorLazy as TL
import qualified AI.Funn.CL.LazyMem as LM
import AI.Funn.Diff.Diff (Derivable(..))
import AI.Funn.Space
newtype Param (ω :: Nat) (n :: Nat) = Param { getParam :: Tensor '[n] }
instance Derivable (Param ω n) where
type D (Param ω n) = TL.Tensor '[ω, n]
instance (MonadIO m, KnownNat n) => Zero m (Param ω n) where
zero = Param <$> zero
instance (MonadIO m, KnownNat n) => Semi m (Param ω n) where
plus (Param x) (Param y) = Param <$> plus x y
instance (MonadIO m, KnownNat n) => Additive m (Param ω n) where
plusm xs = Param <$> plusm (map getParam xs)
instance (MonadIO m, KnownNat n) => Scale m Double (Param ω n) where
scale x (Param xs) = Param <$> scale x xs
instance (MonadIO m, KnownNat n) => VectorSpace m Double (Param ω n) where
{}
instance (MonadIO m, KnownNat n) => Inner m Double (Param ω n) where
inner (Param x) (Param y) = inner x y
instance (MonadIO m, KnownNat n) => Finite m Double (Param ω n) where
getBasis (Param x) = getBasis x
O(1 )
reshape :: (Prod ds ~ n) => Param ω n -> Tensor ds
reshape (Param xs) = T.reshape xs
O(1 )
split :: (KnownNat a, KnownNat b) => Param ω (a+b) -> (Param ω a, Param ω b)
split (Param xs) = case T.split xs of
(a, b) -> (Param a, Param b)
appendD :: forall ω a b. (KnownDimsF [ω, a, b]) => TL.Tensor [ω, a] -> TL.Tensor [ω, b] -> TL.Tensor [ω, a+b]
appendD = TL.appendW
|
63e0ebd2fd7a08a1edf1ed2f93e5c657fd6f12255afd81e229863920129868a8 | flosell/lambdacd | merge_test.clj | (ns lambdacd.stepresults.merge-test
(:require [clojure.test :refer :all]
[lambdacd.stepresults.merge :refer :all]
[conjure.core :as c]))
(defn some-resolver [_ _ _] nil)
(defn some-other-resolver [_ _ _])
(defn some-third-resolver [_ _ _])
(defn some-merge-fn [map-a map-b]
(assoc (merge map-a map-b)
:something :extra))
(deftest merge-step-results-test
(testing "that it can merge a list of step results"
(is (= {:status :success
:foo :bar
:bar :baz
:something :extra}
(merge-step-results [{:status :success}
{:foo :bar}
{:bar :baz}]
some-merge-fn))))
(testing "that later things overwrite earlier things"
(is (= {:status :success
:foo :baz}
(merge-step-results [{:status :success}
{:foo :bar}
{:foo :baz}]
merge))))
(testing "that an empty list merges to an empty result"
(is (= {}
(merge-step-results [] merge)))))
(deftest merge-two-step-results-test
(testing "that it merges two steps and resolves conflicts using the passed resolvers"
(testing "conflictless merging"
(is (= {:foo "hello" :bar "world"} (merge-two-step-results {:foo "hello"} {:bar "world"}
:resolvers []))))
(testing "using the resolvers"
(testing "the resolver gets called"
(c/stubbing [some-resolver :resolved]
(is (= {:foo :resolved} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver])))
(c/verify-called-once-with-args some-resolver :foo :bar :baz)))
(testing "that the first matching resolver wins"
(c/stubbing [some-resolver nil
some-other-resolver :resolved
some-third-resolver :also-resolved]
(is (= {:foo :resolved} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver some-other-resolver some-third-resolver])))))
(testing "that conflicts will become nil if no resolver is matching"
(is (= {:foo nil} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver])))
(is (= {:foo nil} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers []))))))
(testing "defaults"
(testing "that it merges statuses, maps and in doubt, the last wins"
(is (= {:status :failure
:m {:a :b
:b :c}
:s "b"}
(merge-two-step-results {:status :failure
:m {:a :b}
:s "a"}
{:status :success
:m {:b :c}
:s "b"}))))))
| null | https://raw.githubusercontent.com/flosell/lambdacd/e9ba3cebb2d5f0070a2e0e1e08fc85fc99ee7135/test/clj/lambdacd/stepresults/merge_test.clj | clojure | (ns lambdacd.stepresults.merge-test
(:require [clojure.test :refer :all]
[lambdacd.stepresults.merge :refer :all]
[conjure.core :as c]))
(defn some-resolver [_ _ _] nil)
(defn some-other-resolver [_ _ _])
(defn some-third-resolver [_ _ _])
(defn some-merge-fn [map-a map-b]
(assoc (merge map-a map-b)
:something :extra))
(deftest merge-step-results-test
(testing "that it can merge a list of step results"
(is (= {:status :success
:foo :bar
:bar :baz
:something :extra}
(merge-step-results [{:status :success}
{:foo :bar}
{:bar :baz}]
some-merge-fn))))
(testing "that later things overwrite earlier things"
(is (= {:status :success
:foo :baz}
(merge-step-results [{:status :success}
{:foo :bar}
{:foo :baz}]
merge))))
(testing "that an empty list merges to an empty result"
(is (= {}
(merge-step-results [] merge)))))
(deftest merge-two-step-results-test
(testing "that it merges two steps and resolves conflicts using the passed resolvers"
(testing "conflictless merging"
(is (= {:foo "hello" :bar "world"} (merge-two-step-results {:foo "hello"} {:bar "world"}
:resolvers []))))
(testing "using the resolvers"
(testing "the resolver gets called"
(c/stubbing [some-resolver :resolved]
(is (= {:foo :resolved} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver])))
(c/verify-called-once-with-args some-resolver :foo :bar :baz)))
(testing "that the first matching resolver wins"
(c/stubbing [some-resolver nil
some-other-resolver :resolved
some-third-resolver :also-resolved]
(is (= {:foo :resolved} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver some-other-resolver some-third-resolver])))))
(testing "that conflicts will become nil if no resolver is matching"
(is (= {:foo nil} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers [some-resolver])))
(is (= {:foo nil} (merge-two-step-results {:foo :bar} {:foo :baz}
:resolvers []))))))
(testing "defaults"
(testing "that it merges statuses, maps and in doubt, the last wins"
(is (= {:status :failure
:m {:a :b
:b :c}
:s "b"}
(merge-two-step-results {:status :failure
:m {:a :b}
:s "a"}
{:status :success
:m {:b :c}
:s "b"}))))))
|
|
7259b3faeb480c8f2cb4693c2ae5430679f27cf39e4866e998b3464b31bea82c | alt-romes/slfl | t2.hs | #-- main :: forall a b c d . ((! (a -o b)) -o (((! (a -o b)) -o (d -o c)) -o (d -o c)));
main e f g = let !h = e in f (!(λk -> h k)) g;
| null | https://raw.githubusercontent.com/alt-romes/slfl/4956fcce8ff2ca7622799fe0715c118b568b74eb/STLLC/t2.hs | haskell | main :: forall a b c d . ((! (a -o b)) -o (((! (a -o b)) -o (d -o c)) -o (d -o c))); | main e f g = let !h = e in f (!(λk -> h k)) g;
|
6909c0cef98e186c345675244b5ecee8acd97097e0246ad22410fadd35b235af | rametta/retros | New.hs | module Web.View.Columns.New where
import Web.View.Prelude
newtype NewView = NewView {column :: Column}
instance View NewView where
html NewView {..} =
renderModal
Modal
{ modalTitle = "New Column",
modalCloseUrl = pathTo $ ShowRetroAction $ get #retroId column,
modalFooter = Nothing,
modalContent = renderForm column
}
renderForm :: Column -> Html
renderForm column =
formFor
column
[hsx|
{(hiddenField #retroId)}
{(textField #title) {autofocus = True, required = True}}
{(textField #cover) {placeholder = "e.g "}}
{(textField #sortOrder) { fieldLabel = "Position", helpText = "Ex: A position of 0 would be the left most column" }}
<div class="flex justify-between">
<div class="flex">
<button class="mr-2 bg-green-500 hover:bg-green-600 text-white font-bold py-1 px-2 rounded transition duration-300">Save</button>
<a href={ShowRetroAction $ get #retroId column} class="block btn-gray">Cancel</a>
</div>
</div>
|] | null | https://raw.githubusercontent.com/rametta/retros/8f3ca23bcf8ee30476ff3e68a5969af021956f8b/Web/View/Columns/New.hs | haskell | module Web.View.Columns.New where
import Web.View.Prelude
newtype NewView = NewView {column :: Column}
instance View NewView where
html NewView {..} =
renderModal
Modal
{ modalTitle = "New Column",
modalCloseUrl = pathTo $ ShowRetroAction $ get #retroId column,
modalFooter = Nothing,
modalContent = renderForm column
}
renderForm :: Column -> Html
renderForm column =
formFor
column
[hsx|
{(hiddenField #retroId)}
{(textField #title) {autofocus = True, required = True}}
{(textField #cover) {placeholder = "e.g "}}
{(textField #sortOrder) { fieldLabel = "Position", helpText = "Ex: A position of 0 would be the left most column" }}
<div class="flex justify-between">
<div class="flex">
<button class="mr-2 bg-green-500 hover:bg-green-600 text-white font-bold py-1 px-2 rounded transition duration-300">Save</button>
<a href={ShowRetroAction $ get #retroId column} class="block btn-gray">Cancel</a>
</div>
</div>
|] |
|
ee758f8df13401c8208fe2a3f434931e8915c7ccb948aac041db691fc0e18e20 | johnlawrenceaspden/hobby-code | cookies.clj | ;; necessary dependencies
[ [ org.clojure/clojure " " ]
;; [ring/ring "1.1.7"]]
;; -------------
;; Here's an app, built in a way which should surprise no-one who's read the previous posts
(require 'ring.adapter.jetty
'ring.middleware.stacktrace
'clojure.pprint)
Middleware for spying on the doings of other middleware :
(defn html-escape [string]
(str "<pre>" (clojure.string/escape string {\< "<", \> ">"}) "</pre>"))
(defn format-request [name request]
(with-out-str
(println "-------------------------------")
(println name)
(clojure.pprint/pprint request)
(println "-------------------------------")))
(defn wrap-spy [handler spyname include-body]
(fn [request]
(let [incoming (format-request (str spyname ":\n Incoming Request:") request)]
(println incoming)
(let [response (handler request)]
(let [r (if include-body response (assoc response :body "#<?>"))
outgoing (format-request (str spyname ":\n Outgoing Response Map:") r)]
(println outgoing)
(update-in response [:body] (fn[x] (str (html-escape incoming) x (html-escape outgoing)))))))))
;; Absolute binding promise to someday get around to writing the app
(declare handler)
;; plumbing
(def app
(-> #'handler
(wrap-spy "what the handler sees" true)
(ring.middleware.stacktrace/wrap-stacktrace)
(wrap-spy "what the web server sees" false)))
;; The actual application
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello World!</h1>" )})
;; Start the server if it hasn't already been started
(defonce server (ring.adapter.jetty/run-jetty #'app {:port 8080 :join? false}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Next we'll include the cookies middleware
(require 'ring.middleware.cookies)
;; And re-plumb
(def app
(-> #'handler
(ring.middleware.stacktrace/wrap-stacktrace)
(wrap-spy "what the handler sees" true)
(ring.middleware.cookies/wrap-cookies)
(wrap-spy "what the web server sees" false)))
Now go and look at :8080 again .
;; In the map the handler sees, there is a key :cookies, whose value is {}
;; ( If it's not, you might want to clear cookies for localhost from your browser )
;; Let's make our app set a cookie:
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Setting Cookie!</h1>" )
:cookies {"yo" {:value "hi"}} })
;; What happens now is quite complicated.
;; Our key
{ :cookies {"yo" {:value "hi"}}}
;; Gets converted by the middleware, and combined with our header, to make
{ :headers {"Set-Cookie" '("yo=hi"), "Content-Type" "text/html"}}
;; in the map given to the jetty adapter
;; If you look at the page with
$ curl -sv :8080
;; Then you'll see
;; < Set-Cookie: yo=hi
;; as part of the http transaction
Now if we look at :8080 , the response will contain the Set - Cookie header .
;; Most browsers will react to this by including the cookie whenever they contact the site.
;; You can examine cookies from the browser's point of view by
( In Chrome ) looking at chrome / settings / cookies
( In ) following some interminable GUI procedure that life is too short to describe .
;; If you refresh the page yet again, you should now see:
{:headers {"cookie" "yo=hi"}}
;; in the incoming request from the webserver
;; and a new key:
{:cookies {"yo" {:value "hi"}}}
;; in the map the eventual handler sees (put there by the middleware of course!)
;; We can use this to count how many times a particular browser has been greeted:
(defn seen-before [request]
(try (Integer/parseInt (((request :cookies) "yo") :value))
(catch Exception e :never-before)))
(defn handler [request]
(let [s (seen-before request)]
(cond
(= s :never-before) {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello Stranger!</h1>" )
:cookies {"yo" {:value "1"}}}
(= s 1) {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello Again!</h1>" )
:cookies {"yo" {:value "2"}}}
:else {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hi, this is visit "s"</h1>" )
:cookies {"yo" {:value (str (inc s))}}})))
;; And now, an exercise for the reader!
If I look at my site in Firefox , it works as I expected .
If I look at it with Chrome , it double counts
;; If I use curl, like so:
;; curl -sv :8080 | grep -i hello
;; Then all I ever see is "Hello Stranger"
;; What is going on?
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/ring-wiki/cookies.clj | clojure | necessary dependencies
[ring/ring "1.1.7"]]
-------------
Here's an app, built in a way which should surprise no-one who's read the previous posts
Absolute binding promise to someday get around to writing the app
plumbing
The actual application
Start the server if it hasn't already been started
Next we'll include the cookies middleware
And re-plumb
In the map the handler sees, there is a key :cookies, whose value is {}
( If it's not, you might want to clear cookies for localhost from your browser )
Let's make our app set a cookie:
What happens now is quite complicated.
Our key
Gets converted by the middleware, and combined with our header, to make
in the map given to the jetty adapter
If you look at the page with
Then you'll see
< Set-Cookie: yo=hi
as part of the http transaction
Most browsers will react to this by including the cookie whenever they contact the site.
You can examine cookies from the browser's point of view by
If you refresh the page yet again, you should now see:
in the incoming request from the webserver
and a new key:
in the map the eventual handler sees (put there by the middleware of course!)
We can use this to count how many times a particular browser has been greeted:
And now, an exercise for the reader!
If I use curl, like so:
curl -sv :8080 | grep -i hello
Then all I ever see is "Hello Stranger"
What is going on? | [ [ org.clojure/clojure " " ]
(require 'ring.adapter.jetty
'ring.middleware.stacktrace
'clojure.pprint)
Middleware for spying on the doings of other middleware :
(defn html-escape [string]
(str "<pre>" (clojure.string/escape string {\< "<", \> ">"}) "</pre>"))
(defn format-request [name request]
(with-out-str
(println "-------------------------------")
(println name)
(clojure.pprint/pprint request)
(println "-------------------------------")))
(defn wrap-spy [handler spyname include-body]
(fn [request]
(let [incoming (format-request (str spyname ":\n Incoming Request:") request)]
(println incoming)
(let [response (handler request)]
(let [r (if include-body response (assoc response :body "#<?>"))
outgoing (format-request (str spyname ":\n Outgoing Response Map:") r)]
(println outgoing)
(update-in response [:body] (fn[x] (str (html-escape incoming) x (html-escape outgoing)))))))))
(declare handler)
(def app
(-> #'handler
(wrap-spy "what the handler sees" true)
(ring.middleware.stacktrace/wrap-stacktrace)
(wrap-spy "what the web server sees" false)))
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello World!</h1>" )})
(defonce server (ring.adapter.jetty/run-jetty #'app {:port 8080 :join? false}))
(require 'ring.middleware.cookies)
(def app
(-> #'handler
(ring.middleware.stacktrace/wrap-stacktrace)
(wrap-spy "what the handler sees" true)
(ring.middleware.cookies/wrap-cookies)
(wrap-spy "what the web server sees" false)))
Now go and look at :8080 again .
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Setting Cookie!</h1>" )
:cookies {"yo" {:value "hi"}} })
{ :cookies {"yo" {:value "hi"}}}
{ :headers {"Set-Cookie" '("yo=hi"), "Content-Type" "text/html"}}
$ curl -sv :8080
Now if we look at :8080 , the response will contain the Set - Cookie header .
( In Chrome ) looking at chrome / settings / cookies
( In ) following some interminable GUI procedure that life is too short to describe .
{:headers {"cookie" "yo=hi"}}
{:cookies {"yo" {:value "hi"}}}
(defn seen-before [request]
(try (Integer/parseInt (((request :cookies) "yo") :value))
(catch Exception e :never-before)))
(defn handler [request]
(let [s (seen-before request)]
(cond
(= s :never-before) {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello Stranger!</h1>" )
:cookies {"yo" {:value "1"}}}
(= s 1) {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hello Again!</h1>" )
:cookies {"yo" {:value "2"}}}
:else {:status 200
:headers {"Content-Type" "text/html"}
:body (str "<h1>Hi, this is visit "s"</h1>" )
:cookies {"yo" {:value (str (inc s))}}})))
If I look at my site in Firefox , it works as I expected .
If I look at it with Chrome , it double counts
|
dc7d552b6868b82cf86e709182fdad7669d9f7b11d269cfd447d1764069b0bde | FieryCod/holy-lambda | hello.clj | (ns hello
(:require
[fierycod.holy-lambda.response :as hr]
[fierycod.holy-lambda.core :as h])
(:gen-class))
(defn Hello <
[_]
(hr/text "Hello world!"))
(h/entrypoint [#'Hello])
| null | https://raw.githubusercontent.com/FieryCod/holy-lambda/4b418f0cbf095f6908264e57d07068f73e05926b/benchmarks/holy-lambda-vs-other-runtimes/clojure-babashka/hello.clj | clojure | (ns hello
(:require
[fierycod.holy-lambda.response :as hr]
[fierycod.holy-lambda.core :as h])
(:gen-class))
(defn Hello <
[_]
(hr/text "Hello world!"))
(h/entrypoint [#'Hello])
|
|
212447fc8161bb7734c8642d3256960e06734291584d0ca77042c3990c860ae4 | autotaker/smopeck | Syntax.hs | # LANGUAGE DataKinds #
module Smopeck.Spec.Syntax(
TopLevelDef(..), Field(..), TypeExp.BindName(..),
Method, VarName, UserType, TypeExp, TypeEnv, Primitive,
TypeExtension, Exp(..), TypeRefine, Lattice, LatticeExt(..),
module Smopeck.Spec.Exp,
module Smopeck.Mock.Location,
fTypeExp
) where
import qualified Data.Map as M
import Smopeck.Mock.Location
import Smopeck.Spec.Exp
import Smopeck.Spec.Lattice
import qualified Smopeck.Spec.TypeExp as TypeExp
import Text.Read hiding (Number, String)
data TopLevelDef =
TypeDef UserType TypeExp
| EndpointDef Route Method TypeExtension
deriving(Eq, Show)
type Route = TypeExp.Route
type Method = String
type VarName = TypeExp.VarName
type UserType = TypeExp.UserType
type TypeExp = TypeExp.TypeExp Parsed TypeExp.HDefault
type TypeEnv = TypeExp.DefaultTypeEnv Parsed
type Primitive = TypeExp.Primitive
type TypeExtension = [(Field (TypeExp.BindName Parsed), TypeExp) ]
type Exp = TypeExp.Exp Parsed
type TypeRefine = TypeExp.TypeRefine Parsed
fTypeExp :: String -> String -> TypeExtension -> TypeRefine -> TypeExp
fTypeExp tyName bindName ext ref =
LElem $ TypeExp.TypeExpF
(read tyName)
(TypeExp.BindName bindName)
(M.fromList ext)
ref
TypeExp.NoCond
| null | https://raw.githubusercontent.com/autotaker/smopeck/7e4ab8bf716125783ccaaf84076c992c2bca5c7f/src/Smopeck/Spec/Syntax.hs | haskell | # LANGUAGE DataKinds #
module Smopeck.Spec.Syntax(
TopLevelDef(..), Field(..), TypeExp.BindName(..),
Method, VarName, UserType, TypeExp, TypeEnv, Primitive,
TypeExtension, Exp(..), TypeRefine, Lattice, LatticeExt(..),
module Smopeck.Spec.Exp,
module Smopeck.Mock.Location,
fTypeExp
) where
import qualified Data.Map as M
import Smopeck.Mock.Location
import Smopeck.Spec.Exp
import Smopeck.Spec.Lattice
import qualified Smopeck.Spec.TypeExp as TypeExp
import Text.Read hiding (Number, String)
data TopLevelDef =
TypeDef UserType TypeExp
| EndpointDef Route Method TypeExtension
deriving(Eq, Show)
type Route = TypeExp.Route
type Method = String
type VarName = TypeExp.VarName
type UserType = TypeExp.UserType
type TypeExp = TypeExp.TypeExp Parsed TypeExp.HDefault
type TypeEnv = TypeExp.DefaultTypeEnv Parsed
type Primitive = TypeExp.Primitive
type TypeExtension = [(Field (TypeExp.BindName Parsed), TypeExp) ]
type Exp = TypeExp.Exp Parsed
type TypeRefine = TypeExp.TypeRefine Parsed
fTypeExp :: String -> String -> TypeExtension -> TypeRefine -> TypeExp
fTypeExp tyName bindName ext ref =
LElem $ TypeExp.TypeExpF
(read tyName)
(TypeExp.BindName bindName)
(M.fromList ext)
ref
TypeExp.NoCond
|
|
21a1a8bd45e298b7d2ff237db63209b3a5ea3162f6f45ab0c4159e3c77f8c0db | pallet/stevedore | bash_test.clj | (ns pallet.stevedore.bash-test
(:require
[clojure.string :as string]
[clojure.test :refer [is testing]]
[clojure.tools.logging :as logging]
[pallet.common.filesystem :as filesystem]
[pallet.common.logging.logutils :as logutils]
[pallet.common.shell :as shell]
[pallet.common.string :refer [quoted]]
[pallet.script :as script]
[pallet.stevedore :refer :all]
[pallet.stevedore.bash :refer :all]
[pallet.stevedore.common]
[pallet.stevedore.test-common]))
(defmacro current-line [] (-> &form meta :line))
(defmacro bash-out
"Check output of bash. Implemented as a macro so that errors appear on the
correct line."
([str] `(bash-out ~str 0 ""))
([str exit err-msg]
`(let [r# (shell/bash ~str)]
(when-not (= ~exit (:exit r#))
(logging/errorf
"Unexpected exit status:\n:cmd %s\n:out %s\n:err %s"
~str (:out r#) (:err r#)))
(logging/tracef "bash-out %s %s" ~str r#)
(is (= ~err-msg (:err r#)))
(is (= ~exit (:exit r#)))
(:out r#))))
(defn strip-ws
"strip extraneous whitespace so tests don't fail because of differences in
whitespace" [s]
(-> s
(.replaceAll "[ ]+" " ")
.trim))
(defn strip-line-ws
"strip extraneous whitespace so tests don't fail because of differences in
whitespace"
[#^String s]
(-> s
(.replace "\n" " ")
(.replaceAll "[ ]+" " ")
.trim))
;;; We define a macro rather than a fixture so we can run individual tests
(defmacro deftest [name & body]
`(clojure.test/deftest ~name
(with-script-language :pallet.stevedore.bash/bash
~@body)))
(deftest number-literal
(is (= "42" (script 42)))
(is (= "0.5" (script 1/2))))
(deftest no-comment-on-empty
(is (= "" (script ""))))
(deftest simple-call-test
(is (script= "a b" (script ("a" b))))
(is (= "a b" (with-source-line-comments nil (script ("a" b)))))
(is (= (str " # bash_test.clj:" (current-line) "\na b") (script ("a" b)))
"has source comment on first symbol only (not on args)"))
(deftest call-multi-arg-test
(is (script= "a b c" (script ("a" b c)))))
(deftest test-arithmetic
(is (script= "(x * y)" (script (* x y)))))
(deftest test-return
(is (script= "return 42" (script (return 42)))))
(deftest test-script-call
(let [name "name1"]
(is (script= "grep \"^name1\" /etc/passwd"
(script ("grep" ~(str "\"^" name "\"") "/etc/passwd"))))))
(deftest test-clj
(let [foo 42
bar [1 2 3]]
( is (= " 42 " ( script ( clj foo ) ) ) )
(is (= "42" (script ~foo)))
(is (script= "foo 1 2 3" (script (apply foo ~bar))))))
(deftest test-str
(is (script= "foobar"
(script (str foo bar)))))
(deftest test-quoted
(is (script= "\"foobar\""
(script (quoted (str foo bar))))))
(deftest test-fn
(is (thrown? java.lang.AssertionError
(strip-ws (script (defn [x y]
("foo" a) ("bar" b)))))
"anonymous")
(is (script=
"foo() {\nx=$1\ny=$2\nfoo a\nbar b\n}"
(strip-ws (script (defn foo [x y] ("foo" a) ("bar" b)))))
"without flags")
(is (script=
(str "foo() {\nDEFINE_string \"host\" \"default\" \"Doc\" \"h\"\n"
"FLAGS \"$@\" || exit 1\n"
"eval set -- \"${FLAGS_ARGV}\"\nfoo a\nbar b\n}")
(strip-ws (script (defn foo [[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with flags only")
(is (script=
(str "foo() {\nDEFINE_string \"host\" \"default\" \"Doc\" \"h\"\n"
"FLAGS \"$@\" || exit 1\n"
"eval set -- \"${FLAGS_ARGV}\"\nx=$1\ny=$2\nfoo a\nbar b\n}")
(strip-ws (script (defn foo [x y
[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with flags and arguments")
(is (script=
(str "foo() {\nFLAGS_HELP=\"This is doc\"\nDEFINE_string \"host\" "
"\"default\" \"Doc\" \"h\"\nFLAGS \"$@\" || exit 1\neval set -- "
"\"${FLAGS_ARGV}\"\nx=$1\ny=$2\nfoo a\nbar b\n}")
(strip-ws (script (defn foo
"This is doc"
[x y
[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with docstring and arguments"))
(deftest test-aget
(is (script= "${foo[2]}" (script (aget foo 2)))))
(deftest test-aset
(is (script= "foo[2]=1" (script (aset foo 2 1)))))
(deftest test-set!
(is (script= "foo=1" (script (set! foo 1))))
(is (thrown? clojure.lang.ExceptionInfo (script (set! foo-bar 1)))))
(deftest var-test
(is (script= "foo=1" (script (var foo 1))))
(is (thrown? clojure.lang.ExceptionInfo (script (var foo-bar 1)))))
(deftest alias-test
(is (script= "alias foo='ls -l'" (script (alias foo ("ls" -l))))))
(deftest test-array
(is (script= "(1 2 \"3\" foo)" (script [1 "2" "\"3\"" :foo]))))
(deftest test-if
(is (script= "if [ \"foo\" == \"bar\" ]; then echo fred;fi"
(script (if (= foo bar) (println fred)))))
(is (script=
"if [ \"foo\" == \"bar\" ] && [ \"foo\" != \"baz\" ]; then echo fred;fi"
(script (if (&& (== foo bar) (!= foo baz)) (println fred)))))
(is (= "fred\n"
(bash-out
(script (if (&& (== foo foo) (!= foo baz)) (println "fred"))))))
(is (script=
"if foo; then\nx=3\nfoo x\nelse\ny=4\nbar y\nfi"
(script (if foo (do (var x 3) ("foo" x)) (do (var y 4) ("bar" y))))))
(is (= "not foo\n"
(bash-out (script (if (== foo bar)
(do (println "foo"))
(do (println "not foo")))))))
(is (script= "if [ -e file1 ]; then echo foo;fi"
(script (if (file-exists? "file1") (println "foo")))))
(is (script= "if ! { [ -e file1 ]; }; then echo foo;fi"
(script (if (not (file-exists? "file1")) (println "foo")))))
(is (= "foo\n"
(bash-out
(script (if (not (file-exists? "file1")) (println "foo"))))))
(is (script= "if ! {[ -e file1 ]; }; then echo foo;fi"
(let [condition (script (file-exists? "file1"))]
(script (if (not ~condition) (println "foo"))))))
(is (= "foo\n"
(bash-out (let [condition (script (file-exists? "file1"))]
(script (if (not ~condition) (println "foo")))))))
(is (script=
(str "if ! {[ \"a\" == \"1\" ] && file1; }; then echo foo;fi")
(let [condition (script (and (= a 1) "file1"))]
(script (if (not ~condition) (println "foo"))))))
(is (script= "if ! { grep aa file1; }; then echo foo;fi"
(script (if (not ("grep" "aa" "file1")) (println "foo")))))
(is (script= "if ! { [ -e file1 ]; } || [ \"a\" == \"b\" ]; then echo foo;fi"
(script (if (|| (not (file-exists? "file1")) (== "a" "b"))
(println "foo")))))
(testing "if block as string with newline is treated as compound"
(is (script= "if [ -e f ]; then\nls\nls\nfi"
(script (if (file-exists? "f") "ls\nls")))))
(testing "an expression"
(is (script= "if ! { [ -e md5 ]; } || ls file; then echo 1;fi"
(script (if (|| (not (file-exists? "md5"))
("ls" "file"))
(println 1)))))))
(deftest if-nested-test
(is (script=
(str "if [ \"foo\" == \"bar\" ]; then\nif [ \"foo\" != \"baz\" ]; "
"then echo fred;fi\nfi")
(script (if (== foo bar)
(if (!= foo baz)
(println fred))))))
(is (= "" (bash-out (script (if (== foo bar)
(if (!= foo baz)
(println fred))))))))
(deftest test-if-not
(is (script=
"if ! ( [ -e bar ] ); then echo fred;fi"
(script (if-not (file-exists? bar) (println fred)))))
(is (script=
"if ! ( [ -e bar ] && [ \"foo\" == \"bar\" ] ); then echo fred;fi"
(script (if-not (&& (file-exists? bar) (== foo bar)) (println fred)))))
(is (script=
(str "if ! ( [ \"foo\" == \"bar\" ] && [ \"foo\" == \"baz\" ] ); "
"then echo fred;fi")
(script (if-not (&& (== foo bar) (== foo baz)) (println fred)))))
(is (= "fred\n"
(bash-out (script (if-not (&& (== foo foo) (== foo baz))
(println "fred")))))))
(deftest test-when
(is (script= "if [ \"foo\" == \"bar\" ]; then\necho fred\nfi"
(script (when (= foo bar) (println fred)))))
(is (script= "if foo; then\nx=3\nfoo x\nfi"
(script (when foo (var x 3) ("foo" x))))))
(deftest test-when-not
(is (script= "if ! ( [ \"foo\" == \"bar\" ] ); then\necho fred\nfi"
(script (when-not (= foo bar) (println fred)))))
(is (script= "if ! ( foo ); then\nx=3\nfoo x\nfi"
(script (when-not foo (var x 3) ("foo" x))))))
(deftest test-case
(is (script= "case ${X} in\n1)\nsomething;;\n\"2\")\nsomething else;;\nesac"
(script (case @X
1 ("something")
~(quoted "2") ("something" else))))))
(deftest test-doseq
(is (script= "for X in 1 2 3; do\nsomething ${X}\ndone"
(script (doseq [X [1 2 3]] ("something" @X)))))
(is (script= "for X in $(ls); do\nsomething ${X}\ndone"
(script (doseq [X @("ls")] ("something" @X))))))
(deftest test-map
(is (script= "([packages]=(columnchart))"
(strip-ws (script {:packages ["columnchart"]}))))
(is (script= "{ hash_set x q d; hash_set x p c; }\necho ${x[p]}"
(script (do (var x {:p "c" :q "d"})
(println (aget x :p))))))
(is (= "c\nd\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" "/a/b/c-e" "d"})
(println (get x :p))
(println (get x "/a/b/c-e"))))))
(testing "assoc!"
(is (= "c\n1\n2\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" :q "q"})
(assoc! x :q 1)
(assoc! x :r 2)
(println (get x :p))
(println (get x :q))
(println (get x :r)))))))
(testing "merge!"
(is (= "c\n1\n2\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" :q "q"})
(merge! x {:q 1 :r 2})
(println (get x :p))
(println (get x :q))
(println (get x :r))))))))
(deftest test-do
(is (script= "let x=3\nlet y=4\nlet z=(x + y)"
(strip-ws
(script
(let x 3)
(let y 4)
(let z (+ x y))))))
(is (= "7\n"
(bash-out
(script
(let x 3)
(let y 4)
(let z (+ x y))
(println @z))))))
(deftest deref-test
(is (script= "${TMPDIR-/tmp}" (script (deref TMPDIR :default "/tmp"))))
(is (script= "${TMPDIR:-/tmp}" (script (deref TMPDIR :default-value "/tmp"))))
(is (script= "${TMPDIR=/tmp}" (script (deref TMPDIR :default-assign "/tmp"))))
(is (script= "${TMPDIR:=/tmp}" (script (deref TMPDIR
:default-assign-value "/tmp"))))
(is (script= "${TMPDIR-${TMP}}" (script (deref TMPDIR :default @TMP))))
(is (script= "${TMPDIR-/tmp}" (script @TMPDIR-/tmp)))
(is (script= "$(ls)" (script @("ls"))))
(is (bash-out (checked-commands "ls"))))
(deftest test-combine-forms
(let [stuff `(do
(local ~'x 3)
(local ~'y 4))]
(is (script= "foo() {\nx=$1\nlocal x=3\nlocal y=4\n}"
(script (defn foo [x] ~stuff))))))
(deftest defvar-test
(is (script= "x=1"
(script (defvar x 1)))))
(deftest println-test
(is (script= "echo hello"
(script (println "hello"))))
(is (script= "echo hello there"
(script (println "hello there")))))
(deftest do-script-test
(is (script= "fred" (do-script "fred")))
(is (script= "fred\nblogs" (do-script "fred" "blogs")))
(is (script= "fred\nblogs" (do-script "fred\n\n" "blogs\n")))
(is (script= "fred\nblogs" (do-script "fred\n\n" nil "blogs\n"))))
(deftest chain-commands-test
(is (script= "fred" (apply chain-commands ["fred"])))
(is (script= "fred && \\\nblogs" (apply chain-commands ["fred" "blogs"])))
(is (script= "fred && \\\nblogs"
(apply chain-commands ["fred\n\n" "blogs\n"])))
(is (script= "fred && \\\nblogs"
(apply chain-commands ["fred\n\n" nil "blogs\n"])))
(is (script= "fred" (chain-commands "fred")))
(is (script= "fred && \\\nblogs" (chain-commands "fred" "blogs")))
(is (script= "fred && \\\nblogs" (chain-commands "fred\n\n" "blogs\n")))
(is (script= "fred && \\\nblogs"
(chain-commands "fred\n\n" nil "blogs\n"))))
(deftest chain-script-test
(is (script= "fred" (chained-script ("fred"))))
(is (script= "fred && \\\nblogs" (chained-script ("fred") ("blogs")))))
(deftest checked-commands-test
(is (script=
(str "echo 'test...';\n{\necho fred && \\\necho tom\n } || "
"{ echo '#> test : FAIL'; exit 1;} >&2 "
"\necho '#> test : SUCCESS'")
(checked-commands "test" "echo fred" "echo tom")))
(is (= "test...\ntom\n#> test : SUCCESS\n"
(bash-out (checked-commands "test" "echo tom"))))
(is (= "test...\nfred\ntom\n#> test : SUCCESS\n"
(bash-out (checked-commands "test" "echo fred" "echo tom"))))
(is (= "test...\n"
(bash-out
(checked-commands "test" "test 1 = 2") 1 "#> test : FAIL\n"))))
(deftest checked-script-test
(is (script-no-ws=
(checked-commands "msg" (script "ls") (script "ls"))
(checked-script "msg" ("ls") ("ls"))))
(is (script=
(str "echo 'test...';\n{\necho fred && \\\necho tom\n } || "
"{ echo '#> test : FAIL'; exit 1;} >&2 "
"\necho '#> test : SUCCESS'")
(checked-script "test" (println fred) (println tom))))
(is (= "test...\ntom\n#> test : SUCCESS\n"
(bash-out (checked-script "test" (println tom)))))
(is (= "test...\nfred\ntom\n#> test : SUCCESS\n"
(bash-out (checked-script "test" (println fred) (println tom)))))
(is (= "test...\n"
(bash-out
(checked-script "test" ("test" 1 = 2)) 1 "#> test : FAIL\n"))))
(deftest group-test
(is (script= "{\nls\n}"
(script (group ("ls")))))
(is (script= "{\nls\nls\n}"
(script (group ("ls") ("ls"))))))
(deftest pipe-test
(is (script= "ls"
(script (pipe ("ls")))))
(is (script= "ls | \\\nls"
(script (pipe ("ls") ("ls")))))
(is (= "2"
(string/trim (bash-out
(script (pipe (println "one two") ("wc" -w))))))))
(deftest empty?-test
(is (script= "if [ -z ${a} ]; then echo true;fi"
(script (if (empty? @a) (println true))))))
(deftest unquote-splicing-test
(is (script= "a b c" (script ~@["a" "b" "c"])))
(is (script= "x" (script x ~@[])))
(is (script= "x" (script ("x" ~@[]))))
(is (script= "x" (script ("x" ~@(list)))))
(let [x ["a" "b" "c"]]
(is (script= "a b c" (script ~@x))))
(let [x []]
(is (script= "x" (script x ~@x))))
(let [x nil]
(is (script= "" (script ~@x)))
(is (script= "a" (script (str "a" ~@x)))))
(let [x []]
(is (script= "" (script ~@x))))
(let [fx (fn [] ["a" "b" "c"])]
(is (script= "a b c" (script ~@(fx))))
(is (script= "abc" (script (str ~@(fx))))))
(let [xfn (script/script-fn [& args])]
(script/defimpl xfn :default [& args]
("xfn" ~@args))
(let [x nil]
(is (script= "xfn" (script (xfn ~@x)))))
(let [x [:a 1]]
(is (script= "xfn a 1" (script (xfn ~@x)))))))
(logutils/with-threshold [:error]
(script/defscript x [a])
(script/defimpl x :default [a] a))
| null | https://raw.githubusercontent.com/pallet/stevedore/735530a4d21a119d6bc29e6982eaa9c4840b1d54/test/pallet/stevedore/bash_test.clj | clojure | We define a macro rather than a fixture so we can run individual tests | (ns pallet.stevedore.bash-test
(:require
[clojure.string :as string]
[clojure.test :refer [is testing]]
[clojure.tools.logging :as logging]
[pallet.common.filesystem :as filesystem]
[pallet.common.logging.logutils :as logutils]
[pallet.common.shell :as shell]
[pallet.common.string :refer [quoted]]
[pallet.script :as script]
[pallet.stevedore :refer :all]
[pallet.stevedore.bash :refer :all]
[pallet.stevedore.common]
[pallet.stevedore.test-common]))
(defmacro current-line [] (-> &form meta :line))
(defmacro bash-out
"Check output of bash. Implemented as a macro so that errors appear on the
correct line."
([str] `(bash-out ~str 0 ""))
([str exit err-msg]
`(let [r# (shell/bash ~str)]
(when-not (= ~exit (:exit r#))
(logging/errorf
"Unexpected exit status:\n:cmd %s\n:out %s\n:err %s"
~str (:out r#) (:err r#)))
(logging/tracef "bash-out %s %s" ~str r#)
(is (= ~err-msg (:err r#)))
(is (= ~exit (:exit r#)))
(:out r#))))
(defn strip-ws
"strip extraneous whitespace so tests don't fail because of differences in
whitespace" [s]
(-> s
(.replaceAll "[ ]+" " ")
.trim))
(defn strip-line-ws
"strip extraneous whitespace so tests don't fail because of differences in
whitespace"
[#^String s]
(-> s
(.replace "\n" " ")
(.replaceAll "[ ]+" " ")
.trim))
(defmacro deftest [name & body]
`(clojure.test/deftest ~name
(with-script-language :pallet.stevedore.bash/bash
~@body)))
(deftest number-literal
(is (= "42" (script 42)))
(is (= "0.5" (script 1/2))))
(deftest no-comment-on-empty
(is (= "" (script ""))))
(deftest simple-call-test
(is (script= "a b" (script ("a" b))))
(is (= "a b" (with-source-line-comments nil (script ("a" b)))))
(is (= (str " # bash_test.clj:" (current-line) "\na b") (script ("a" b)))
"has source comment on first symbol only (not on args)"))
(deftest call-multi-arg-test
(is (script= "a b c" (script ("a" b c)))))
(deftest test-arithmetic
(is (script= "(x * y)" (script (* x y)))))
(deftest test-return
(is (script= "return 42" (script (return 42)))))
(deftest test-script-call
(let [name "name1"]
(is (script= "grep \"^name1\" /etc/passwd"
(script ("grep" ~(str "\"^" name "\"") "/etc/passwd"))))))
(deftest test-clj
(let [foo 42
bar [1 2 3]]
( is (= " 42 " ( script ( clj foo ) ) ) )
(is (= "42" (script ~foo)))
(is (script= "foo 1 2 3" (script (apply foo ~bar))))))
(deftest test-str
(is (script= "foobar"
(script (str foo bar)))))
(deftest test-quoted
(is (script= "\"foobar\""
(script (quoted (str foo bar))))))
(deftest test-fn
(is (thrown? java.lang.AssertionError
(strip-ws (script (defn [x y]
("foo" a) ("bar" b)))))
"anonymous")
(is (script=
"foo() {\nx=$1\ny=$2\nfoo a\nbar b\n}"
(strip-ws (script (defn foo [x y] ("foo" a) ("bar" b)))))
"without flags")
(is (script=
(str "foo() {\nDEFINE_string \"host\" \"default\" \"Doc\" \"h\"\n"
"FLAGS \"$@\" || exit 1\n"
"eval set -- \"${FLAGS_ARGV}\"\nfoo a\nbar b\n}")
(strip-ws (script (defn foo [[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with flags only")
(is (script=
(str "foo() {\nDEFINE_string \"host\" \"default\" \"Doc\" \"h\"\n"
"FLAGS \"$@\" || exit 1\n"
"eval set -- \"${FLAGS_ARGV}\"\nx=$1\ny=$2\nfoo a\nbar b\n}")
(strip-ws (script (defn foo [x y
[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with flags and arguments")
(is (script=
(str "foo() {\nFLAGS_HELP=\"This is doc\"\nDEFINE_string \"host\" "
"\"default\" \"Doc\" \"h\"\nFLAGS \"$@\" || exit 1\neval set -- "
"\"${FLAGS_ARGV}\"\nx=$1\ny=$2\nfoo a\nbar b\n}")
(strip-ws (script (defn foo
"This is doc"
[x y
[:string "host" "h" "Doc" "default"]]
("foo" a) ("bar" b)))))
"with docstring and arguments"))
(deftest test-aget
(is (script= "${foo[2]}" (script (aget foo 2)))))
(deftest test-aset
(is (script= "foo[2]=1" (script (aset foo 2 1)))))
(deftest test-set!
(is (script= "foo=1" (script (set! foo 1))))
(is (thrown? clojure.lang.ExceptionInfo (script (set! foo-bar 1)))))
(deftest var-test
(is (script= "foo=1" (script (var foo 1))))
(is (thrown? clojure.lang.ExceptionInfo (script (var foo-bar 1)))))
(deftest alias-test
(is (script= "alias foo='ls -l'" (script (alias foo ("ls" -l))))))
(deftest test-array
(is (script= "(1 2 \"3\" foo)" (script [1 "2" "\"3\"" :foo]))))
(deftest test-if
(is (script= "if [ \"foo\" == \"bar\" ]; then echo fred;fi"
(script (if (= foo bar) (println fred)))))
(is (script=
"if [ \"foo\" == \"bar\" ] && [ \"foo\" != \"baz\" ]; then echo fred;fi"
(script (if (&& (== foo bar) (!= foo baz)) (println fred)))))
(is (= "fred\n"
(bash-out
(script (if (&& (== foo foo) (!= foo baz)) (println "fred"))))))
(is (script=
"if foo; then\nx=3\nfoo x\nelse\ny=4\nbar y\nfi"
(script (if foo (do (var x 3) ("foo" x)) (do (var y 4) ("bar" y))))))
(is (= "not foo\n"
(bash-out (script (if (== foo bar)
(do (println "foo"))
(do (println "not foo")))))))
(is (script= "if [ -e file1 ]; then echo foo;fi"
(script (if (file-exists? "file1") (println "foo")))))
(is (script= "if ! { [ -e file1 ]; }; then echo foo;fi"
(script (if (not (file-exists? "file1")) (println "foo")))))
(is (= "foo\n"
(bash-out
(script (if (not (file-exists? "file1")) (println "foo"))))))
(is (script= "if ! {[ -e file1 ]; }; then echo foo;fi"
(let [condition (script (file-exists? "file1"))]
(script (if (not ~condition) (println "foo"))))))
(is (= "foo\n"
(bash-out (let [condition (script (file-exists? "file1"))]
(script (if (not ~condition) (println "foo")))))))
(is (script=
(str "if ! {[ \"a\" == \"1\" ] && file1; }; then echo foo;fi")
(let [condition (script (and (= a 1) "file1"))]
(script (if (not ~condition) (println "foo"))))))
(is (script= "if ! { grep aa file1; }; then echo foo;fi"
(script (if (not ("grep" "aa" "file1")) (println "foo")))))
(is (script= "if ! { [ -e file1 ]; } || [ \"a\" == \"b\" ]; then echo foo;fi"
(script (if (|| (not (file-exists? "file1")) (== "a" "b"))
(println "foo")))))
(testing "if block as string with newline is treated as compound"
(is (script= "if [ -e f ]; then\nls\nls\nfi"
(script (if (file-exists? "f") "ls\nls")))))
(testing "an expression"
(is (script= "if ! { [ -e md5 ]; } || ls file; then echo 1;fi"
(script (if (|| (not (file-exists? "md5"))
("ls" "file"))
(println 1)))))))
(deftest if-nested-test
(is (script=
(str "if [ \"foo\" == \"bar\" ]; then\nif [ \"foo\" != \"baz\" ]; "
"then echo fred;fi\nfi")
(script (if (== foo bar)
(if (!= foo baz)
(println fred))))))
(is (= "" (bash-out (script (if (== foo bar)
(if (!= foo baz)
(println fred))))))))
(deftest test-if-not
(is (script=
"if ! ( [ -e bar ] ); then echo fred;fi"
(script (if-not (file-exists? bar) (println fred)))))
(is (script=
"if ! ( [ -e bar ] && [ \"foo\" == \"bar\" ] ); then echo fred;fi"
(script (if-not (&& (file-exists? bar) (== foo bar)) (println fred)))))
(is (script=
(str "if ! ( [ \"foo\" == \"bar\" ] && [ \"foo\" == \"baz\" ] ); "
"then echo fred;fi")
(script (if-not (&& (== foo bar) (== foo baz)) (println fred)))))
(is (= "fred\n"
(bash-out (script (if-not (&& (== foo foo) (== foo baz))
(println "fred")))))))
(deftest test-when
(is (script= "if [ \"foo\" == \"bar\" ]; then\necho fred\nfi"
(script (when (= foo bar) (println fred)))))
(is (script= "if foo; then\nx=3\nfoo x\nfi"
(script (when foo (var x 3) ("foo" x))))))
(deftest test-when-not
(is (script= "if ! ( [ \"foo\" == \"bar\" ] ); then\necho fred\nfi"
(script (when-not (= foo bar) (println fred)))))
(is (script= "if ! ( foo ); then\nx=3\nfoo x\nfi"
(script (when-not foo (var x 3) ("foo" x))))))
(deftest test-case
(is (script= "case ${X} in\n1)\nsomething;;\n\"2\")\nsomething else;;\nesac"
(script (case @X
1 ("something")
~(quoted "2") ("something" else))))))
(deftest test-doseq
(is (script= "for X in 1 2 3; do\nsomething ${X}\ndone"
(script (doseq [X [1 2 3]] ("something" @X)))))
(is (script= "for X in $(ls); do\nsomething ${X}\ndone"
(script (doseq [X @("ls")] ("something" @X))))))
(deftest test-map
(is (script= "([packages]=(columnchart))"
(strip-ws (script {:packages ["columnchart"]}))))
(is (script= "{ hash_set x q d; hash_set x p c; }\necho ${x[p]}"
(script (do (var x {:p "c" :q "d"})
(println (aget x :p))))))
(is (= "c\nd\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" "/a/b/c-e" "d"})
(println (get x :p))
(println (get x "/a/b/c-e"))))))
(testing "assoc!"
(is (= "c\n1\n2\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" :q "q"})
(assoc! x :q 1)
(assoc! x :r 2)
(println (get x :p))
(println (get x :q))
(println (get x :r)))))))
(testing "merge!"
(is (= "c\n1\n2\n"
(bash-out (script
~pallet.stevedore.bash/hashlib
(var x {:p "c" :q "q"})
(merge! x {:q 1 :r 2})
(println (get x :p))
(println (get x :q))
(println (get x :r))))))))
(deftest test-do
(is (script= "let x=3\nlet y=4\nlet z=(x + y)"
(strip-ws
(script
(let x 3)
(let y 4)
(let z (+ x y))))))
(is (= "7\n"
(bash-out
(script
(let x 3)
(let y 4)
(let z (+ x y))
(println @z))))))
(deftest deref-test
(is (script= "${TMPDIR-/tmp}" (script (deref TMPDIR :default "/tmp"))))
(is (script= "${TMPDIR:-/tmp}" (script (deref TMPDIR :default-value "/tmp"))))
(is (script= "${TMPDIR=/tmp}" (script (deref TMPDIR :default-assign "/tmp"))))
(is (script= "${TMPDIR:=/tmp}" (script (deref TMPDIR
:default-assign-value "/tmp"))))
(is (script= "${TMPDIR-${TMP}}" (script (deref TMPDIR :default @TMP))))
(is (script= "${TMPDIR-/tmp}" (script @TMPDIR-/tmp)))
(is (script= "$(ls)" (script @("ls"))))
(is (bash-out (checked-commands "ls"))))
(deftest test-combine-forms
(let [stuff `(do
(local ~'x 3)
(local ~'y 4))]
(is (script= "foo() {\nx=$1\nlocal x=3\nlocal y=4\n}"
(script (defn foo [x] ~stuff))))))
(deftest defvar-test
(is (script= "x=1"
(script (defvar x 1)))))
(deftest println-test
(is (script= "echo hello"
(script (println "hello"))))
(is (script= "echo hello there"
(script (println "hello there")))))
(deftest do-script-test
(is (script= "fred" (do-script "fred")))
(is (script= "fred\nblogs" (do-script "fred" "blogs")))
(is (script= "fred\nblogs" (do-script "fred\n\n" "blogs\n")))
(is (script= "fred\nblogs" (do-script "fred\n\n" nil "blogs\n"))))
(deftest chain-commands-test
(is (script= "fred" (apply chain-commands ["fred"])))
(is (script= "fred && \\\nblogs" (apply chain-commands ["fred" "blogs"])))
(is (script= "fred && \\\nblogs"
(apply chain-commands ["fred\n\n" "blogs\n"])))
(is (script= "fred && \\\nblogs"
(apply chain-commands ["fred\n\n" nil "blogs\n"])))
(is (script= "fred" (chain-commands "fred")))
(is (script= "fred && \\\nblogs" (chain-commands "fred" "blogs")))
(is (script= "fred && \\\nblogs" (chain-commands "fred\n\n" "blogs\n")))
(is (script= "fred && \\\nblogs"
(chain-commands "fred\n\n" nil "blogs\n"))))
(deftest chain-script-test
(is (script= "fred" (chained-script ("fred"))))
(is (script= "fred && \\\nblogs" (chained-script ("fred") ("blogs")))))
(deftest checked-commands-test
(is (script=
(str "echo 'test...';\n{\necho fred && \\\necho tom\n } || "
"{ echo '#> test : FAIL'; exit 1;} >&2 "
"\necho '#> test : SUCCESS'")
(checked-commands "test" "echo fred" "echo tom")))
(is (= "test...\ntom\n#> test : SUCCESS\n"
(bash-out (checked-commands "test" "echo tom"))))
(is (= "test...\nfred\ntom\n#> test : SUCCESS\n"
(bash-out (checked-commands "test" "echo fred" "echo tom"))))
(is (= "test...\n"
(bash-out
(checked-commands "test" "test 1 = 2") 1 "#> test : FAIL\n"))))
(deftest checked-script-test
(is (script-no-ws=
(checked-commands "msg" (script "ls") (script "ls"))
(checked-script "msg" ("ls") ("ls"))))
(is (script=
(str "echo 'test...';\n{\necho fred && \\\necho tom\n } || "
"{ echo '#> test : FAIL'; exit 1;} >&2 "
"\necho '#> test : SUCCESS'")
(checked-script "test" (println fred) (println tom))))
(is (= "test...\ntom\n#> test : SUCCESS\n"
(bash-out (checked-script "test" (println tom)))))
(is (= "test...\nfred\ntom\n#> test : SUCCESS\n"
(bash-out (checked-script "test" (println fred) (println tom)))))
(is (= "test...\n"
(bash-out
(checked-script "test" ("test" 1 = 2)) 1 "#> test : FAIL\n"))))
(deftest group-test
(is (script= "{\nls\n}"
(script (group ("ls")))))
(is (script= "{\nls\nls\n}"
(script (group ("ls") ("ls"))))))
(deftest pipe-test
(is (script= "ls"
(script (pipe ("ls")))))
(is (script= "ls | \\\nls"
(script (pipe ("ls") ("ls")))))
(is (= "2"
(string/trim (bash-out
(script (pipe (println "one two") ("wc" -w))))))))
(deftest empty?-test
(is (script= "if [ -z ${a} ]; then echo true;fi"
(script (if (empty? @a) (println true))))))
(deftest unquote-splicing-test
(is (script= "a b c" (script ~@["a" "b" "c"])))
(is (script= "x" (script x ~@[])))
(is (script= "x" (script ("x" ~@[]))))
(is (script= "x" (script ("x" ~@(list)))))
(let [x ["a" "b" "c"]]
(is (script= "a b c" (script ~@x))))
(let [x []]
(is (script= "x" (script x ~@x))))
(let [x nil]
(is (script= "" (script ~@x)))
(is (script= "a" (script (str "a" ~@x)))))
(let [x []]
(is (script= "" (script ~@x))))
(let [fx (fn [] ["a" "b" "c"])]
(is (script= "a b c" (script ~@(fx))))
(is (script= "abc" (script (str ~@(fx))))))
(let [xfn (script/script-fn [& args])]
(script/defimpl xfn :default [& args]
("xfn" ~@args))
(let [x nil]
(is (script= "xfn" (script (xfn ~@x)))))
(let [x [:a 1]]
(is (script= "xfn a 1" (script (xfn ~@x)))))))
(logutils/with-threshold [:error]
(script/defscript x [a])
(script/defimpl x :default [a] a))
|
945542cfd5a5a2df6fe0f26d4ce998e4f8b055948097e10be0a6b763ec9d6a0a | ocamllabs/vscode-ocaml-platform | ast_editor_state.mli | open Import
(** This module's interface is private to [Ast_editor.t]. It is in a separate
module to avoid circular dependencies with [Extension_instance.t] *)
type t
type ast_mode =
| Original_ast
| Preprocessed_ast
val make : unit -> t
val find_original_doc_by_pp_uri : t -> Uri.t -> string option
val find_webview_by_doc : t -> Uri.t -> WebView.t option
val associate_origin_and_pp : t -> origin_uri:Uri.t -> pp_doc_uri:Uri.t -> unit
val get_current_ast_mode : t -> ast_mode
val set_current_ast_mode : t -> ast_mode -> unit
val get_hover_disposable : t -> Disposable.t option
val set_hover_disposable : t -> Disposable.t option -> unit
val entry_exists : t -> origin_doc:Uri.t -> pp_doc:Uri.t -> bool
val on_origin_update_content : t -> Uri.t -> unit
val pp_status : t -> Uri.t -> [ `Absent_or_pped | `Original ]
val remove_doc_entries : t -> Uri.t -> unit
val set_webview : t -> Uri.t -> WebView.t -> unit
val remove_dirty_original_doc : t -> pp_uri:Uri.t -> unit
val remove_webview : t -> Uri.t -> unit
| null | https://raw.githubusercontent.com/ocamllabs/vscode-ocaml-platform/97818d4dd0e29cbd2589e668616896c4755e7d34/src/ast_editor_state.mli | ocaml | * This module's interface is private to [Ast_editor.t]. It is in a separate
module to avoid circular dependencies with [Extension_instance.t] | open Import
type t
type ast_mode =
| Original_ast
| Preprocessed_ast
val make : unit -> t
val find_original_doc_by_pp_uri : t -> Uri.t -> string option
val find_webview_by_doc : t -> Uri.t -> WebView.t option
val associate_origin_and_pp : t -> origin_uri:Uri.t -> pp_doc_uri:Uri.t -> unit
val get_current_ast_mode : t -> ast_mode
val set_current_ast_mode : t -> ast_mode -> unit
val get_hover_disposable : t -> Disposable.t option
val set_hover_disposable : t -> Disposable.t option -> unit
val entry_exists : t -> origin_doc:Uri.t -> pp_doc:Uri.t -> bool
val on_origin_update_content : t -> Uri.t -> unit
val pp_status : t -> Uri.t -> [ `Absent_or_pped | `Original ]
val remove_doc_entries : t -> Uri.t -> unit
val set_webview : t -> Uri.t -> WebView.t -> unit
val remove_dirty_original_doc : t -> pp_uri:Uri.t -> unit
val remove_webview : t -> Uri.t -> unit
|
5e0dc4a29e7b843d3dde5ce25b882e9ab974766e167d239c2e6e36b04075a1b5 | ruisb/LambdaPi | Printer.hs | module SimplyTyped.Printer where
import Interpreter.Types
import SimplyTyped.Types
import Prelude hiding (print)
import Text.PrettyPrint.HughesPJ hiding (parens)
import qualified Text.PrettyPrint.HughesPJ as PP
tPrint :: Int -> Type -> Doc
tPrint p (TFree (Global s)) = text s
tPrint p (Fun ty ty') = parensIf (p > 0) (sep [tPrint 0 ty <> text " ->", nest 2 (tPrint 0 ty')])
iPrint :: Int -> Int -> ITerm -> Doc
iPrint p ii (Ann c ty) = parensIf (p > 1) (cPrint 2 ii c <> text " :: " <> tPrint 0 ty)
iPrint p ii (Bound k vn) = text ('^':vn) --(vars !! (ii - k - 1))
iPrint p ii (Free (Global s))= text s
iPrint p ii (i :@: c) = parensIf (p > 2) (sep [iPrint 2 ii i, nest 2 (cPrint 3 ii c)])
iPrint p ii x = text ("[" ++ show x ++ "]")
cPrint :: Int -> Int -> CTerm -> Doc
cPrint p ii (Inf i) = iPrint p ii i
cPrint p ii (Lam vn c) = parensIf (p > 0) (text "\\ " <> text vn{--CHANGED(vars !! ii)--} <> text " -> " <> cPrint 0 (ii + 1) c)
-- vars :: [String]
vars = [ c : n | n < - " " : map show [ 1 .. ] , c < - [ ' x','y','z ' ] + + [ ' a' .. 'w ' ] ]
parensIf :: Bool -> Doc -> Doc
parensIf True = PP.parens
parensIf False = id
print = render . cPrint 0 0
printType = render . tPrint 0
| null | https://raw.githubusercontent.com/ruisb/LambdaPi/e8aea47b7098407f6ec3abb8ad65ac0c70729bf8/SimplyTyped/Printer.hs | haskell | (vars !! (ii - k - 1))
-CHANGED(vars !! ii)-
vars :: [String] | module SimplyTyped.Printer where
import Interpreter.Types
import SimplyTyped.Types
import Prelude hiding (print)
import Text.PrettyPrint.HughesPJ hiding (parens)
import qualified Text.PrettyPrint.HughesPJ as PP
tPrint :: Int -> Type -> Doc
tPrint p (TFree (Global s)) = text s
tPrint p (Fun ty ty') = parensIf (p > 0) (sep [tPrint 0 ty <> text " ->", nest 2 (tPrint 0 ty')])
iPrint :: Int -> Int -> ITerm -> Doc
iPrint p ii (Ann c ty) = parensIf (p > 1) (cPrint 2 ii c <> text " :: " <> tPrint 0 ty)
iPrint p ii (Free (Global s))= text s
iPrint p ii (i :@: c) = parensIf (p > 2) (sep [iPrint 2 ii i, nest 2 (cPrint 3 ii c)])
iPrint p ii x = text ("[" ++ show x ++ "]")
cPrint :: Int -> Int -> CTerm -> Doc
cPrint p ii (Inf i) = iPrint p ii i
vars = [ c : n | n < - " " : map show [ 1 .. ] , c < - [ ' x','y','z ' ] + + [ ' a' .. 'w ' ] ]
parensIf :: Bool -> Doc -> Doc
parensIf True = PP.parens
parensIf False = id
print = render . cPrint 0 0
printType = render . tPrint 0
|
e26f658427e256c28ada59a61e64fbaccf8ce4977f05316c13ee7bd29d1ee95d | yallop/fomega | support.ml | open Format
module Error = struct
exception Exit of int
type info = FI of string * int * int * int * int | UNKNOWN
type 'a withinfo = {i: info; v: 'a}
let dummyinfo = UNKNOWN
let createInfo f ls cs le ce = FI(f, ls, cs, le, ce)
let merge i1 i2 = match i1, i2 with
| FI (f, ls, cs, _, _), FI (_, _, _, le, ce) -> FI (f, ls, cs, le, ce)
| UNKNOWN, _ | _, UNKNOWN -> UNKNOWN
let errf f =
print_flush();
print_newline();
open_vbox 0;
open_hvbox 0;
f();
print_cut();
close_box();
print_newline();
raise (Exit 1)
let printInfo =
(* In the text of the book, file positions in error messages are replaced
with the string "Error:" *)
function
FI(f,ls,cs,le,ce) ->
if (String.length f <> 0) then begin
print_string f;
print_string ":";
end;
print_int ls; print_string ".";
print_int cs; print_string "-";
if le <> ls then
(print_int le;prerr_string ".");
print_int ce; print_string ":"
| UNKNOWN ->
print_string "<Unknown file and line>: "
let errfAt fi f = errf(fun()-> printInfo fi; print_space(); f())
let err s = errf (fun()-> print_string "Error: "; print_string s)
let error fi s = errfAt fi (fun()-> print_string s)
let warning s =
print_string "Warning: "; print_string s;
print_newline()
let warningAt fi s =
printInfo fi; print_string " Warning: ";
print_string s; print_newline()
end
(* ---------------------------------------------------------------------- *)
module Pervasive = struct
type info = Error.info
let pr = Format.print_string
end (* module pervasive *)
| null | https://raw.githubusercontent.com/yallop/fomega/f0ba8efb4e40202203bc77652d2e768fdab9239c/src/support.ml | ocaml | In the text of the book, file positions in error messages are replaced
with the string "Error:"
----------------------------------------------------------------------
module pervasive | open Format
module Error = struct
exception Exit of int
type info = FI of string * int * int * int * int | UNKNOWN
type 'a withinfo = {i: info; v: 'a}
let dummyinfo = UNKNOWN
let createInfo f ls cs le ce = FI(f, ls, cs, le, ce)
let merge i1 i2 = match i1, i2 with
| FI (f, ls, cs, _, _), FI (_, _, _, le, ce) -> FI (f, ls, cs, le, ce)
| UNKNOWN, _ | _, UNKNOWN -> UNKNOWN
let errf f =
print_flush();
print_newline();
open_vbox 0;
open_hvbox 0;
f();
print_cut();
close_box();
print_newline();
raise (Exit 1)
let printInfo =
function
FI(f,ls,cs,le,ce) ->
if (String.length f <> 0) then begin
print_string f;
print_string ":";
end;
print_int ls; print_string ".";
print_int cs; print_string "-";
if le <> ls then
(print_int le;prerr_string ".");
print_int ce; print_string ":"
| UNKNOWN ->
print_string "<Unknown file and line>: "
let errfAt fi f = errf(fun()-> printInfo fi; print_space(); f())
let err s = errf (fun()-> print_string "Error: "; print_string s)
let error fi s = errfAt fi (fun()-> print_string s)
let warning s =
print_string "Warning: "; print_string s;
print_newline()
let warningAt fi s =
printInfo fi; print_string " Warning: ";
print_string s; print_newline()
end
module Pervasive = struct
type info = Error.info
let pr = Format.print_string
|
99b4e0cabafba5fee5035078a471353e243d6881468ffe68f198ac1b9bb1e9bd | racket/racket7 | defined-table.rkt | #lang racket/base
(require racket/generic)
(define-generics cost
(money-cost cost)
(time-cost cost)
(happiness-cost cost)
#:defined-table cost-types)
(struct food (name price calories)
#:methods gen:cost
[(define (money-cost x)
(food-price x))
(define (happiness-cost x)
;; Guilt from eating too many calories
(/ (- (max (food-calories x) 1000) 1000) 200))])
(struct laundry (num-loads)
#:methods gen:cost
[(define (money-cost x)
(* (laundry-num-loads x) 3))
(define (time-cost x)
;; It's really annoying to start laundry, but subsequent loads are okay
(+ 5 (laundry-num-loads x)))])
(define methods
(hash 'money-cost money-cost
'time-cost time-cost
'happiness-cost happiness-cost))
(define (total-cost stuff)
(for/sum ([thing stuff])
(define supported-costs (cost-types thing))
(for/sum ([cost-type (hash-keys methods)])
(if (hash-ref supported-costs cost-type)
((hash-ref methods cost-type) thing)
0))))
(module+ test
(require rackunit rackunit/text-ui racket/port)
(define (check-basics table)
(check-true (hash? table))
(for ([method (hash-keys table)])
(check-true (symbol? method))
(check-true (boolean? (hash-ref table method)))))
(define pizza (food 'pizza 8 1200))
(define stuff (list pizza (laundry 1)))
(parameterize {[current-output-port (open-output-nowhere)]}
(run-tests
(test-suite
"defined-table"
(check-basics (cost-types pizza))
(check-true (hash-ref (cost-types pizza) 'money-cost))
(check-false (hash-ref (cost-types pizza) 'time-cost))
(check-true (hash-ref (cost-types pizza) 'happiness-cost))
(check-basics (cost-types (car stuff)))
(check-basics (cost-types (cadr stuff)))
(check-equal? (total-cost stuff) 18)))
(void)))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/pkgs/racket-test/tests/generic/defined-table.rkt | racket | Guilt from eating too many calories
It's really annoying to start laundry, but subsequent loads are okay | #lang racket/base
(require racket/generic)
(define-generics cost
(money-cost cost)
(time-cost cost)
(happiness-cost cost)
#:defined-table cost-types)
(struct food (name price calories)
#:methods gen:cost
[(define (money-cost x)
(food-price x))
(define (happiness-cost x)
(/ (- (max (food-calories x) 1000) 1000) 200))])
(struct laundry (num-loads)
#:methods gen:cost
[(define (money-cost x)
(* (laundry-num-loads x) 3))
(define (time-cost x)
(+ 5 (laundry-num-loads x)))])
(define methods
(hash 'money-cost money-cost
'time-cost time-cost
'happiness-cost happiness-cost))
(define (total-cost stuff)
(for/sum ([thing stuff])
(define supported-costs (cost-types thing))
(for/sum ([cost-type (hash-keys methods)])
(if (hash-ref supported-costs cost-type)
((hash-ref methods cost-type) thing)
0))))
(module+ test
(require rackunit rackunit/text-ui racket/port)
(define (check-basics table)
(check-true (hash? table))
(for ([method (hash-keys table)])
(check-true (symbol? method))
(check-true (boolean? (hash-ref table method)))))
(define pizza (food 'pizza 8 1200))
(define stuff (list pizza (laundry 1)))
(parameterize {[current-output-port (open-output-nowhere)]}
(run-tests
(test-suite
"defined-table"
(check-basics (cost-types pizza))
(check-true (hash-ref (cost-types pizza) 'money-cost))
(check-false (hash-ref (cost-types pizza) 'time-cost))
(check-true (hash-ref (cost-types pizza) 'happiness-cost))
(check-basics (cost-types (car stuff)))
(check-basics (cost-types (cadr stuff)))
(check-equal? (total-cost stuff) 18)))
(void)))
|
27fc83e4c971de4a227ea07c4e4b704341db4b0965e172308e5e59a9421c28d9 | huangjs/cl | result.lisp | -*- Mode : Lisp ; Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The data in this file contains enhancments. ;;;;;
;;; ;;;;;
Copyright ( c ) 1984,1987 by , University of Texas ; ; ; ; ;
;;; All rights reserved ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
( c ) Copyright 1982 Massachusetts Institute of Technology ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :maxima)
(macsyma-module result)
(declare-top (special varlist genvar $ratfac $keepfloat modulus *alpha xv))
(load-macsyma-macros ratmac)
(defmfun $poly_discriminant (poly var)
(let* ((varlist (list var))
($ratfac nil)
(genvar ())
(rform (rform poly))
(rvar (car (last genvar)))
(n (pdegree (setq poly (car rform)) rvar)))
(cond ((= n 1) 1)
((or (= n 0) (not (atom (cdr rform))))
(merror "The first argument to 'poly_discriminant' must be a polynomial in ~:M" var))
(t (pdis (presign
(ash (* n (1- n)) -1)
(pquotient (resultant poly (pderivative poly rvar))
(p-lc poly))))))))
(defmfun $resultant (a b mainvar)
(prog (varlist formflag $ratfac res ans genvar $keepfloat)
(setq varlist (list mainvar) $ratfac t ans 1)
(and ($ratp a)(setq formflag t)(setq a ($ratdisrep a)))
(and ($ratp b)(setq formflag t)(setq b ($ratdisrep b)))
(newvar a)
(newvar b)
(setq a (lmake2 (cadr (ratrep* a)) nil))
(setq b (lmake2 (cadr (ratrep* b)) nil))
(setq mainvar (caadr (ratrep* mainvar)))
(do ((l1 a (cdr l1))) ((null l1))
(do ((l2 b (cdr l2))) ((null l2))
(setq res (result1 (caar l1) (caar l2) mainvar))
(setq ans (ptimes ans (pexpt
(cond ((zerop (caddr res)) (car res))
(t (ptimeschk (car res)
(pexpt (makprod (cadr res) nil)
(caddr res)))))
(* (cdar l1) (cdar l2)))))))
(return (cond (formflag (pdis* ans)) (t (pdis ans))))))
(defun result1 (p1 p2 var)
(cond ((or (pcoefp p1) (pointergp var (car p1)))
(list 1 p1 (pdegree p2 var)))
((or (pcoefp p2) (pointergp var (car p2)))
(list 1 p2 (pdegree p1 var)))
((null (cdddr p1))
(cond ((null (cdddr p2)) (list 0 0 1))
(t (list (pexpt (caddr p1) (cadr p2))
(pcsubsty 0 var p2)
(cadr p1)))))
((null (cdddr p2))
(list (pexpt (caddr p2) (cadr p1))
(pcsubsty 0 var p1)
(cadr p2)))
((> (setq var (gcd (pgcdexpon p1) (pgcdexpon p2))) 1)
(list 1 (resultant (pexpon*// p1 var nil)
(pexpon*// p2 var nil)) var))
(t (list 1 (resultant p1 p2) 1))))
(defmvar $resultant '$subres "Designates which resultant algorithm")
(defvar *resultlist '($subres $mod $red))
(defmfun resultant (p1 p2) ;assumes same main var
(if (> (p-le p2) (p-le p1))
(presign (* (p-le p1) (p-le p2)) (resultant p2 p1))
(case $resultant
($subres (subresult p1 p2))
#+broken ($mod (modresult p1 p2))
($red (redresult p1 p2))
(t (merror "No such resultant algorithm")))))
(defun presign (n p)
(if (oddp n) (pminus p) p))
computes resultant using subresultant p.r.s . TOMS Sept. 1978
(defun subresult (p q)
(loop for g = 1 then (p-lc p)
for h = 1 then (pquotient (pexpt g d) h^1-d)
for degq = (pdegree q (p-var p))
for d = (- (p-le p) degq)
for h^1-d = (if (equal h 1) 1 (pexpt h (1- d)))
if (zerop degq) return (if (pzerop q) q (pquotient (pexpt q d) h^1-d))
do (psetq p q
q (presign (1+ d) (pquotient (prem p q)
(ptimes g (ptimes h h^1-d)))))))
;; PACKAGE FOR CALCULATING MULTIVARIATE POLYNOMIAL RESULTANTS
;; USING MODIFIED REDUCED P.R.S.
(defun redresult (u v)
(prog (a r sigma c)
(setq a 1)
(setq sigma 0)
(setq c 1)
a (if (pzerop (setq r (prem u v))) (return (pzero)))
(setq c (ptimeschk c (pexpt (p-lc v)
(* (- (p-le u) (p-le v))
(- (p-le v) (pdegree r (p-var u))
1)))))
(setq sigma (+ sigma (* (p-le u) (p-le v))))
(if (zerop (pdegree r (p-var u)))
(return
(presign sigma
(pquotient (pexpt (pquotientchk r a) (p-le v)) c))))
(psetq u v
v (pquotientchk r a)
a (pexpt (p-lc v) (+ (p-le u) 1 (- (p-le v)))))
(go a)))
;; PACKAGE FOR CALCULATING MULTIVARIATE POLYNOMIAL RESULTANTS
;; USING MODULAR AND EVALUATION HOMOMORPHISMS.
;; modresultant fails on the following example
;;RESULTANT(((-4)*Z)^4+(Y+8*Z)^4+(X-5*Z)^4-1,
;; ((-4)*Z)^4-(X-5*Z)^3*((-4)*Z)^3+(Y+8*Z)^3*((-4)*Z)^2
+ ( -2)*(Y+8*Z)^4+((-4)*Z)^4 + 1,Z )
#+broken
(progn
(defun modresult (a b)
(modresult1 a b (sort (union* (listovars a) (listovars b))
(function pointergp))))
(defun modresult1 (x y varl)
(cond ((null modulus) (pres x y (car varl) (cdr varl)))
(t (cpres x y (car varl) (cdr varl))) ))
(defun pres (a b xr1 varl)
(prog (m n f a* b* c* p q c modulus hmodulus)
(setq m (cadr a))
(setq n (cadr b))
(setq f (coefbound m n (maxnorm (cdr a)) (maxnorm (cdr b)) ))
(setq q 1)
(setq c 0)
(setq p *alpha)
(go step3)
step2 (setq p (newprime p))
step3 (setqmodulus p)
(setq a* (pmod a))
(setq b* (pmod b))
(cond ((or (reject a* m xr1) (reject b* n xr1)) (go step2)))
(setq c* (cpres a* b* xr1 varl))
(setqmodulus nil)
(setq c (lagrange3 c c* p q))
(setq q (* p q))
(cond ((> q f) (return c))
(t (go step2)) ) ))
(defun reject (a m xv)
(not (eqn (pdegree a xv) m)))
(defun coefbound (m n d e)
(* 2 (expt (1+ m) (ash n -1))
(expt (1+ n) (ash m -1))
(cond ((oddp n) (1+ ($isqrt (1+ m))))
(t 1))
(cond ((oddp m) (1+ ($isqrt (1+ n))))
(t 1))
( FACTORIAL ( PLUS M N ) ) USED TO REPLACE PREV . 4 LINES . KNU II P. 375
(expt d n)
(expt e m) ))
(defun main2 (a var exp tot)
(cond ((null a) (cons exp tot))
(t (main2 (cddr a) var
(max (setq var (pdegree (cadr a) var)) exp)
(max (+ (car a) var) tot))) ))
(defun cpres (a b xr1 varl) ;XR1 IS MAIN VAR WHICH
RESULTANT ELIMINATES
(t (prog ( m2 ( m1 (cadr a))
XV IS INTERPOLATED VAR
(declare (fixnum m1 n1 k))
step2
(setq xv (car varl))
(setq varl (cdr varl))
. TOTAL DEG >
(setq n2 (main2 (cdr b) xv 0 0))
(cond ((zerop (+ (car m2) (car n2)))
(cond ((null varl) (return (cpres1 (cdr a) (cdr b))))
(t (go step2)) ) ))
(setq k (1+ (min (+ (* m1 (car n2)) (* n1 (car m2)))
(+ (* m1 (cdr n2)) (* n1 (cdr m2))
(- (* m1 n1))) )))
(setq c 0)
(setq d 1)
(setq m2 (car m2) n2 (car n2))
(setq bp (- 1))
step3
(cond ((equal (setq bp (1+ bp)) modulus)
(merror "Resultant primes too small."))
((zerop m2) (setq a* a))
(t (setq a* (pcsubst a bp xv))
(cond ((reject a* m1 xr1)(go step3)) )) )
(cond ((zerop n2) (setq b* b))
(t (setq b* (pcsubst b bp xv))
(cond ((reject b* n1 xr1) (go step3))) ))
(setq c* (cpres a* b* xr1 varl))
(setq c (lagrange33 c c* d bp))
(setq d (ptimeschk d (list xv 1 1 0 (cminus bp))))
(cond ((> (cadr d) k) (return c))
(t (go step3))))))))
;; *** NOTE THAT MATRIX PRODUCED IS ALWAYS SYMETRIC
;; *** ABOUT THE MINOR DIAGONAL.
(defmfun $bezout (p q var)
(let ((varlist (list var)) genvar)
(newvar p)
(newvar q)
(setq p (cadr (ratrep* p))
q (cadr (ratrep* q)))
(setq p (cond ((> (cadr q) (cadr p)) (bezout q p))
(t (bezout p q))))
(cons '($matrix)
(mapcar #'(lambda (l) (cons '(mlist) (mapcar 'pdis l)))
p))))
(defun vmake (poly n *l)
(do ((i (1- n) (1- i))) ((minusp i))
(cond ((or (null poly) (< (car poly) i))
(setq *l (cons 0 *l)))
(t (setq *l (cons (cadr poly) *l))
(setq poly (cddr poly)))))
(nreverse *l))
(defun bezout (p q)
(let* ((n (1+ (p-le p)))
(n2 (- n (p-le q)))
(a (vmake (p-terms p) n nil))
(b (vmake (p-terms q) n nil))
(ar (reverse (nthcdr n2 a)))
(br (reverse (nthcdr n2 b)))
(l (nzeros n nil)))
(rplacd (nthcdr (1- (p-le p)) a) nil)
(rplacd (nthcdr (1- (p-le p)) b) nil)
(nconc
(mapcar
#'(lambda (ar br)
(setq l (mapcar #'(lambda (a b l)
(ppluschk l (pdifference
(ptimes br a) (ptimes ar b))))
a b (cons 0 l))))
ar br)
(and (pzerop (car b))
(do ((b (vmake (cdr q) (cadr p) nil) (rot* b))
(m nil (cons b m)))
((not (pzerop (car b))) (cons b m))))) ))
(defun rot* (b)
(setq b (copy-list b))
(prog2
(nconc b b)
(cdr b)
(rplacd b nil)))
(defun ppluschk (p q)
(cond ((pzerop p) q)
(t (pplus p q))))
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/maxima/src/result.lisp | lisp | Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
The data in this file contains enhancments. ;;;;;
;;;;;
; ; ; ;
All rights reserved ;;;;;
; ;
assumes same main var
PACKAGE FOR CALCULATING MULTIVARIATE POLYNOMIAL RESULTANTS
USING MODIFIED REDUCED P.R.S.
PACKAGE FOR CALCULATING MULTIVARIATE POLYNOMIAL RESULTANTS
USING MODULAR AND EVALUATION HOMOMORPHISMS.
modresultant fails on the following example
RESULTANT(((-4)*Z)^4+(Y+8*Z)^4+(X-5*Z)^4-1,
((-4)*Z)^4-(X-5*Z)^3*((-4)*Z)^3+(Y+8*Z)^3*((-4)*Z)^2
XR1 IS MAIN VAR WHICH
*** NOTE THAT MATRIX PRODUCED IS ALWAYS SYMETRIC
*** ABOUT THE MINOR DIAGONAL. |
(in-package :maxima)
(macsyma-module result)
(declare-top (special varlist genvar $ratfac $keepfloat modulus *alpha xv))
(load-macsyma-macros ratmac)
(defmfun $poly_discriminant (poly var)
(let* ((varlist (list var))
($ratfac nil)
(genvar ())
(rform (rform poly))
(rvar (car (last genvar)))
(n (pdegree (setq poly (car rform)) rvar)))
(cond ((= n 1) 1)
((or (= n 0) (not (atom (cdr rform))))
(merror "The first argument to 'poly_discriminant' must be a polynomial in ~:M" var))
(t (pdis (presign
(ash (* n (1- n)) -1)
(pquotient (resultant poly (pderivative poly rvar))
(p-lc poly))))))))
(defmfun $resultant (a b mainvar)
(prog (varlist formflag $ratfac res ans genvar $keepfloat)
(setq varlist (list mainvar) $ratfac t ans 1)
(and ($ratp a)(setq formflag t)(setq a ($ratdisrep a)))
(and ($ratp b)(setq formflag t)(setq b ($ratdisrep b)))
(newvar a)
(newvar b)
(setq a (lmake2 (cadr (ratrep* a)) nil))
(setq b (lmake2 (cadr (ratrep* b)) nil))
(setq mainvar (caadr (ratrep* mainvar)))
(do ((l1 a (cdr l1))) ((null l1))
(do ((l2 b (cdr l2))) ((null l2))
(setq res (result1 (caar l1) (caar l2) mainvar))
(setq ans (ptimes ans (pexpt
(cond ((zerop (caddr res)) (car res))
(t (ptimeschk (car res)
(pexpt (makprod (cadr res) nil)
(caddr res)))))
(* (cdar l1) (cdar l2)))))))
(return (cond (formflag (pdis* ans)) (t (pdis ans))))))
(defun result1 (p1 p2 var)
(cond ((or (pcoefp p1) (pointergp var (car p1)))
(list 1 p1 (pdegree p2 var)))
((or (pcoefp p2) (pointergp var (car p2)))
(list 1 p2 (pdegree p1 var)))
((null (cdddr p1))
(cond ((null (cdddr p2)) (list 0 0 1))
(t (list (pexpt (caddr p1) (cadr p2))
(pcsubsty 0 var p2)
(cadr p1)))))
((null (cdddr p2))
(list (pexpt (caddr p2) (cadr p1))
(pcsubsty 0 var p1)
(cadr p2)))
((> (setq var (gcd (pgcdexpon p1) (pgcdexpon p2))) 1)
(list 1 (resultant (pexpon*// p1 var nil)
(pexpon*// p2 var nil)) var))
(t (list 1 (resultant p1 p2) 1))))
(defmvar $resultant '$subres "Designates which resultant algorithm")
(defvar *resultlist '($subres $mod $red))
(if (> (p-le p2) (p-le p1))
(presign (* (p-le p1) (p-le p2)) (resultant p2 p1))
(case $resultant
($subres (subresult p1 p2))
#+broken ($mod (modresult p1 p2))
($red (redresult p1 p2))
(t (merror "No such resultant algorithm")))))
(defun presign (n p)
(if (oddp n) (pminus p) p))
computes resultant using subresultant p.r.s . TOMS Sept. 1978
(defun subresult (p q)
(loop for g = 1 then (p-lc p)
for h = 1 then (pquotient (pexpt g d) h^1-d)
for degq = (pdegree q (p-var p))
for d = (- (p-le p) degq)
for h^1-d = (if (equal h 1) 1 (pexpt h (1- d)))
if (zerop degq) return (if (pzerop q) q (pquotient (pexpt q d) h^1-d))
do (psetq p q
q (presign (1+ d) (pquotient (prem p q)
(ptimes g (ptimes h h^1-d)))))))
(defun redresult (u v)
(prog (a r sigma c)
(setq a 1)
(setq sigma 0)
(setq c 1)
a (if (pzerop (setq r (prem u v))) (return (pzero)))
(setq c (ptimeschk c (pexpt (p-lc v)
(* (- (p-le u) (p-le v))
(- (p-le v) (pdegree r (p-var u))
1)))))
(setq sigma (+ sigma (* (p-le u) (p-le v))))
(if (zerop (pdegree r (p-var u)))
(return
(presign sigma
(pquotient (pexpt (pquotientchk r a) (p-le v)) c))))
(psetq u v
v (pquotientchk r a)
a (pexpt (p-lc v) (+ (p-le u) 1 (- (p-le v)))))
(go a)))
+ ( -2)*(Y+8*Z)^4+((-4)*Z)^4 + 1,Z )
#+broken
(progn
(defun modresult (a b)
(modresult1 a b (sort (union* (listovars a) (listovars b))
(function pointergp))))
(defun modresult1 (x y varl)
(cond ((null modulus) (pres x y (car varl) (cdr varl)))
(t (cpres x y (car varl) (cdr varl))) ))
(defun pres (a b xr1 varl)
(prog (m n f a* b* c* p q c modulus hmodulus)
(setq m (cadr a))
(setq n (cadr b))
(setq f (coefbound m n (maxnorm (cdr a)) (maxnorm (cdr b)) ))
(setq q 1)
(setq c 0)
(setq p *alpha)
(go step3)
step2 (setq p (newprime p))
step3 (setqmodulus p)
(setq a* (pmod a))
(setq b* (pmod b))
(cond ((or (reject a* m xr1) (reject b* n xr1)) (go step2)))
(setq c* (cpres a* b* xr1 varl))
(setqmodulus nil)
(setq c (lagrange3 c c* p q))
(setq q (* p q))
(cond ((> q f) (return c))
(t (go step2)) ) ))
(defun reject (a m xv)
(not (eqn (pdegree a xv) m)))
(defun coefbound (m n d e)
(* 2 (expt (1+ m) (ash n -1))
(expt (1+ n) (ash m -1))
(cond ((oddp n) (1+ ($isqrt (1+ m))))
(t 1))
(cond ((oddp m) (1+ ($isqrt (1+ n))))
(t 1))
( FACTORIAL ( PLUS M N ) ) USED TO REPLACE PREV . 4 LINES . KNU II P. 375
(expt d n)
(expt e m) ))
(defun main2 (a var exp tot)
(cond ((null a) (cons exp tot))
(t (main2 (cddr a) var
(max (setq var (pdegree (cadr a) var)) exp)
(max (+ (car a) var) tot))) ))
RESULTANT ELIMINATES
(t (prog ( m2 ( m1 (cadr a))
XV IS INTERPOLATED VAR
(declare (fixnum m1 n1 k))
step2
(setq xv (car varl))
(setq varl (cdr varl))
. TOTAL DEG >
(setq n2 (main2 (cdr b) xv 0 0))
(cond ((zerop (+ (car m2) (car n2)))
(cond ((null varl) (return (cpres1 (cdr a) (cdr b))))
(t (go step2)) ) ))
(setq k (1+ (min (+ (* m1 (car n2)) (* n1 (car m2)))
(+ (* m1 (cdr n2)) (* n1 (cdr m2))
(- (* m1 n1))) )))
(setq c 0)
(setq d 1)
(setq m2 (car m2) n2 (car n2))
(setq bp (- 1))
step3
(cond ((equal (setq bp (1+ bp)) modulus)
(merror "Resultant primes too small."))
((zerop m2) (setq a* a))
(t (setq a* (pcsubst a bp xv))
(cond ((reject a* m1 xr1)(go step3)) )) )
(cond ((zerop n2) (setq b* b))
(t (setq b* (pcsubst b bp xv))
(cond ((reject b* n1 xr1) (go step3))) ))
(setq c* (cpres a* b* xr1 varl))
(setq c (lagrange33 c c* d bp))
(setq d (ptimeschk d (list xv 1 1 0 (cminus bp))))
(cond ((> (cadr d) k) (return c))
(t (go step3))))))))
(defmfun $bezout (p q var)
(let ((varlist (list var)) genvar)
(newvar p)
(newvar q)
(setq p (cadr (ratrep* p))
q (cadr (ratrep* q)))
(setq p (cond ((> (cadr q) (cadr p)) (bezout q p))
(t (bezout p q))))
(cons '($matrix)
(mapcar #'(lambda (l) (cons '(mlist) (mapcar 'pdis l)))
p))))
(defun vmake (poly n *l)
(do ((i (1- n) (1- i))) ((minusp i))
(cond ((or (null poly) (< (car poly) i))
(setq *l (cons 0 *l)))
(t (setq *l (cons (cadr poly) *l))
(setq poly (cddr poly)))))
(nreverse *l))
(defun bezout (p q)
(let* ((n (1+ (p-le p)))
(n2 (- n (p-le q)))
(a (vmake (p-terms p) n nil))
(b (vmake (p-terms q) n nil))
(ar (reverse (nthcdr n2 a)))
(br (reverse (nthcdr n2 b)))
(l (nzeros n nil)))
(rplacd (nthcdr (1- (p-le p)) a) nil)
(rplacd (nthcdr (1- (p-le p)) b) nil)
(nconc
(mapcar
#'(lambda (ar br)
(setq l (mapcar #'(lambda (a b l)
(ppluschk l (pdifference
(ptimes br a) (ptimes ar b))))
a b (cons 0 l))))
ar br)
(and (pzerop (car b))
(do ((b (vmake (cdr q) (cadr p) nil) (rot* b))
(m nil (cons b m)))
((not (pzerop (car b))) (cons b m))))) ))
(defun rot* (b)
(setq b (copy-list b))
(prog2
(nconc b b)
(cdr b)
(rplacd b nil)))
(defun ppluschk (p q)
(cond ((pzerop p) q)
(t (pplus p q))))
|
a5965360b1fd7ef29970fe7baf7add3394d5d402b88bfee8e12d577b9b8b39d0 | jacekschae/learn-reagent-course-files | init.cljs | (ns giggin.fb.init
(:require ["firebase/app" :as firebase]
["firebase/database"]
["firebase/auth"]))
(defn firebase-init
[]
(firebase/initializeApp
{:apiKey "your-api-key"
:authDomain "your-auth-domain"
:databaseURL "your-databse-url"
:projectId "your-project-id"}))
| null | https://raw.githubusercontent.com/jacekschae/learn-reagent-course-files/bad40303c8e0a8526318a69fdf9c2d786b4240cb/increments/21-firebase-database/src/giggin/fb/init.cljs | clojure | (ns giggin.fb.init
(:require ["firebase/app" :as firebase]
["firebase/database"]
["firebase/auth"]))
(defn firebase-init
[]
(firebase/initializeApp
{:apiKey "your-api-key"
:authDomain "your-auth-domain"
:databaseURL "your-databse-url"
:projectId "your-project-id"}))
|
|
97472afeac45127f258bd8fcd18762e5bb2de27fe0247e3c887402901f9891da | craigfe/sink | cmd.ml | let rec mkdir_p path =
try Unix.mkdir path 0o777 with
| Unix.Unix_error (EEXIST, _, _) -> ()
| Unix.Unix_error (ENOENT, _, _) ->
let parent = Filename.dirname path in
mkdir_p parent;
Unix.mkdir path 0o777
let print_to_file path printer =
let channel = open_out path in
let formatter = Format.formatter_of_out_channel channel in
printer formatter;
Format.pp_print_newline formatter ();
close_out channel
let sequence_commands cmds =
List.fold_left
(fun ret cmd ->
match ret with
| Error e -> Error e
| Ok () -> (
match Unix.system cmd with
| WEXITED 0 -> Ok ()
| WEXITED n ->
Result.errorf "Command \"%s\" failed with return code %d" cmd n
| WSIGNALED _ | WSTOPPED _ ->
Result.errorf "Command \"%s\" was interrupted" cmd ))
(Ok ()) cmds
| null | https://raw.githubusercontent.com/craigfe/sink/c5431edfa1b06f1a09845a481c4afcb3e92f0667/src/sink-unix/cmd.ml | ocaml | let rec mkdir_p path =
try Unix.mkdir path 0o777 with
| Unix.Unix_error (EEXIST, _, _) -> ()
| Unix.Unix_error (ENOENT, _, _) ->
let parent = Filename.dirname path in
mkdir_p parent;
Unix.mkdir path 0o777
let print_to_file path printer =
let channel = open_out path in
let formatter = Format.formatter_of_out_channel channel in
printer formatter;
Format.pp_print_newline formatter ();
close_out channel
let sequence_commands cmds =
List.fold_left
(fun ret cmd ->
match ret with
| Error e -> Error e
| Ok () -> (
match Unix.system cmd with
| WEXITED 0 -> Ok ()
| WEXITED n ->
Result.errorf "Command \"%s\" failed with return code %d" cmd n
| WSIGNALED _ | WSTOPPED _ ->
Result.errorf "Command \"%s\" was interrupted" cmd ))
(Ok ()) cmds
|
|
d2d4d0577a5f180155c676d44335c553b19c0101f48756d532006fe61b3f521c | tonyvanriet/clj-slack-client | conversations.clj | (ns clj-slack-client.conversations
(:require [clj-slack-client.web :as web])
(:refer-clojure :exclude [list]))
(def api-module "conversations")
(defn- call
([method-name token]
(web/call-and-get-response (str api-module "." method-name) {:token token}))
([method-name token channel]
(web/call-and-get-response (str api-module "." method-name)
{:token token :channel channel}))
([method-name token channel options]
(web/call-and-get-response (str api-module "." method-name)
(merge {:token token :channel channel}
options))))
(defn archive
"Archives a conversation. Callable only by user."
[token channel]
(call "archive" token channel))
(defn close
"Closes a direct message. Callable by user and bot tokens."
[token channel]
(call "close" token channel))
(defn create
"Initiates a public or private channel-based conversation.
Callable only by user token."
([token channel-name]
(create token channel-name false))
([token channel-name is-private?]
(web/call-and-get-response "create"
{:token token :name channel-name
:is_private is-private?})))
(defn history
"Fetches a conversation's history of messages and events.
Callable by user and bot tokens."
([token channel]
(history token channel {}))
([token channel options]
(call "history" token channel options)))
(defn info
"Retrieve information about a conversation. Callable by user
and bot tokens."
([token channel]
(token channel false))
([token channel include-locale]
(call "info" token channel include-locale)))
(defn invite
"Invites users to a channel. Callable by user only. Users
are comma separated."
[token channel users]
(call "invite" token channel {:users users}))
(defn join
"Joins an existing conversation. Callably by user only."
[token channel]
(call "join" token channel))
(defn kick
"Removes a user from a conversation. Callable by user only."
[token channel user]
(call "kick" token channel {:user user}))
(defn leave
"Leaves a conversation. Only callable by user."
[token channel]
(call "leave" token channel))
(defn list
"Lists all channels in a Slack team. Accessible by bot and user."
([token]
(call "list" token))
([token options]
(web/call-and-get-response "conversations.list" {:token token})))
(defn members
"Retrieve the members of a conversation. Accessible by bots and users."
([token channel]
(call "members" token channel))
([token channel options]
(call "members" token channel options)))
(defn open
"Opens or resumes a direct or multi-person message. Callable by bot
and user tokens."
[token options]
(web/call-and-get-response "conversations.open"
(assoc options :token token)))
(defn rename
"Renames a conversation. Callable only by user."
[token channel new-name]
(call "rename" token channel {:name new-name}))
(defn replies
"Retrieve a thread of messages posted to a conversation. Accessible
by bot and user tokens."
([token channel time-stamp]
(call "replies" token channel {:ts time-stamp}))
([token channel time-stamp options]
(call "replies" token channel
(assoc options :ts time-stamp))))
(defn set-purpose
"Sets the purpose for a conversation. Supported by bot and user tokens."
[token channel purpose]
(call "setPurpose" token channel {:purpose purpose}))
(defn set-topic
"Sets the topic for a conversation. Supported by bot and user tokens."
[token channel topic]
(call "setPurpose" token channel {:topic topic}))
(defn unarchive
"Reverses conversation archival. Supported by users only."
[token channel]
(call "unarchive" token channel))
| null | https://raw.githubusercontent.com/tonyvanriet/clj-slack-client/6783f003ab93adae057890421622eb5e61ab033d/src/clj_slack_client/conversations.clj | clojure | (ns clj-slack-client.conversations
(:require [clj-slack-client.web :as web])
(:refer-clojure :exclude [list]))
(def api-module "conversations")
(defn- call
([method-name token]
(web/call-and-get-response (str api-module "." method-name) {:token token}))
([method-name token channel]
(web/call-and-get-response (str api-module "." method-name)
{:token token :channel channel}))
([method-name token channel options]
(web/call-and-get-response (str api-module "." method-name)
(merge {:token token :channel channel}
options))))
(defn archive
"Archives a conversation. Callable only by user."
[token channel]
(call "archive" token channel))
(defn close
"Closes a direct message. Callable by user and bot tokens."
[token channel]
(call "close" token channel))
(defn create
"Initiates a public or private channel-based conversation.
Callable only by user token."
([token channel-name]
(create token channel-name false))
([token channel-name is-private?]
(web/call-and-get-response "create"
{:token token :name channel-name
:is_private is-private?})))
(defn history
"Fetches a conversation's history of messages and events.
Callable by user and bot tokens."
([token channel]
(history token channel {}))
([token channel options]
(call "history" token channel options)))
(defn info
"Retrieve information about a conversation. Callable by user
and bot tokens."
([token channel]
(token channel false))
([token channel include-locale]
(call "info" token channel include-locale)))
(defn invite
"Invites users to a channel. Callable by user only. Users
are comma separated."
[token channel users]
(call "invite" token channel {:users users}))
(defn join
"Joins an existing conversation. Callably by user only."
[token channel]
(call "join" token channel))
(defn kick
"Removes a user from a conversation. Callable by user only."
[token channel user]
(call "kick" token channel {:user user}))
(defn leave
"Leaves a conversation. Only callable by user."
[token channel]
(call "leave" token channel))
(defn list
"Lists all channels in a Slack team. Accessible by bot and user."
([token]
(call "list" token))
([token options]
(web/call-and-get-response "conversations.list" {:token token})))
(defn members
"Retrieve the members of a conversation. Accessible by bots and users."
([token channel]
(call "members" token channel))
([token channel options]
(call "members" token channel options)))
(defn open
"Opens or resumes a direct or multi-person message. Callable by bot
and user tokens."
[token options]
(web/call-and-get-response "conversations.open"
(assoc options :token token)))
(defn rename
"Renames a conversation. Callable only by user."
[token channel new-name]
(call "rename" token channel {:name new-name}))
(defn replies
"Retrieve a thread of messages posted to a conversation. Accessible
by bot and user tokens."
([token channel time-stamp]
(call "replies" token channel {:ts time-stamp}))
([token channel time-stamp options]
(call "replies" token channel
(assoc options :ts time-stamp))))
(defn set-purpose
"Sets the purpose for a conversation. Supported by bot and user tokens."
[token channel purpose]
(call "setPurpose" token channel {:purpose purpose}))
(defn set-topic
"Sets the topic for a conversation. Supported by bot and user tokens."
[token channel topic]
(call "setPurpose" token channel {:topic topic}))
(defn unarchive
"Reverses conversation archival. Supported by users only."
[token channel]
(call "unarchive" token channel))
|
|
69da57d17e4ff9e2d12a2946c21cb4328bf57e72ea12cc3b7f56b081019fded5 | sangkilc/ofuzz | envmanager.ml | (* ofuzz - ocaml fuzzing platform *)
* environment manager
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open Ofuzzversion
let shell cmd =
let ch = Unix.open_process_in cmd in
let output = input_line ch in
close_in ch;
output
let os = ostype ()
let arch = shell "uname -m"
let kernel = shell "uname -r"
let cpu =
match os with
| "Linux" ->
shell "grep 'model name' /proc/cpuinfo -m 1 \
| awk '{$1=$2=$3=\"\"; print $0}' \
| sed -e 's/^ *//' -e 's/ *$//'"
| "Darwin" ->
shell "sysctl -n machdep.cpu.brand_string"
| other ->
"unknown OS"
let version = string ()
| null | https://raw.githubusercontent.com/sangkilc/ofuzz/ba53cc90cc06512eb90459a7159772d75ebe954f/src/envmanager.ml | ocaml | ofuzz - ocaml fuzzing platform |
* environment manager
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open Ofuzzversion
let shell cmd =
let ch = Unix.open_process_in cmd in
let output = input_line ch in
close_in ch;
output
let os = ostype ()
let arch = shell "uname -m"
let kernel = shell "uname -r"
let cpu =
match os with
| "Linux" ->
shell "grep 'model name' /proc/cpuinfo -m 1 \
| awk '{$1=$2=$3=\"\"; print $0}' \
| sed -e 's/^ *//' -e 's/ *$//'"
| "Darwin" ->
shell "sysctl -n machdep.cpu.brand_string"
| other ->
"unknown OS"
let version = string ()
|
b34467dfcad6beabcf23c84cb591b936370a9716d9b1d17c4a0a4ace8ccb4693 | ijvcms/chuanqi_dev | map_20224.erl | -module(map_20224).
-export([
range/0,
data/0
]).
range() -> {124, 84}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,2,2,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,0,0,0,0,0,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,2,2,2,1,1,2,1,0,0,0,0,2,2,2,2,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,2,2,2,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,1,1,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,2,2,2,0,0,2,2,1,0,0,0,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,1,0,0,2,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,2,1,1,0,2,2,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,2,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,2,0,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,2,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,0,0,0,0,0,2,2,2,2,1,1,1,1,2,0,0,0,0,0,1,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,2,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,2,2,2,2,1,2,2,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,2,2,0,0,0,2,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,2,0,0,0,1,0,0,0,0,0,1,0,0,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,2,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,2,1,1,1,1,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,2,1,1,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,2,1,1,1,1,1,2,2,0,0,0,0,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,2,1,1,1,1,1,2,2,2,2,2,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0,2,1,1,1,1,1,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,2,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,2,2,2,2,0,2,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,2,0,0,2,2,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,2,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,0,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,1,1,1,2,2,2,2,2,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,2,2,2,1,1,1,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,0,0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,1,0,1,2,2,2,2,1,1,1,1,1,1,0,0,0,0,1,2,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,2,1,1,1,2,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,2,2,2,2,1,1,2,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,2,2,2,2,1,1,1,2,2,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,1,0,2,2,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,2,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,0,0,0,0,2,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,1,1,1,2,2,2,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,0,2,1,1,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,2,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,2,2,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,2,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,2,2,2,1,1,1,1,0,0,0,0,0,2,2,2,2,2,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,1,1,1,0,0,0,0,0,0,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,2,0,0,0,0,2,2,2,2,2,1,1,1,1,2,2,2,0,0,0,1,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,2,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,0,2,2,0,1,1,1,1,1,1,1,1,2,2,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,2,2,0,0,1,1,2,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,2,2,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,2,2,2,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,2,1,2,1,0,0,0,0,0,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,1,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,2,2,0,0,0,2,1,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,0,0,0,0,0,2,2,2,2,2,2,2,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,0,0,0,0,0,0,1,2,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,2,2,2,2,1,1,1,1,1,2,2,2,2,2,2,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,2,1,1,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/map_data/map_20224.erl | erlang | -module(map_20224).
-export([
range/0,
data/0
]).
range() -> {124, 84}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,2,2,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,0,0,0,0,0,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,2,2,2,1,1,2,1,0,0,0,0,2,2,2,2,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,2,2,2,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,1,1,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,2,2,2,0,0,2,2,1,0,0,0,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,1,0,0,2,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,2,1,1,0,2,2,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,2,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,2,0,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,2,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,0,0,0,0,0,2,2,2,2,1,1,1,1,2,0,0,0,0,0,1,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,2,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,2,2,2,1,1,1,1,1,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,2,2,2,2,1,2,2,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,2,2,0,0,0,2,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,2,0,0,0,1,0,0,0,0,0,1,0,0,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,2,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,2,1,1,1,1,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,2,1,1,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,2,1,1,1,1,1,2,2,0,0,0,0,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,2,1,1,1,1,1,2,2,2,2,2,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0,2,1,1,1,1,1,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,2,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,2,2,2,2,0,2,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,2,0,0,2,2,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,2,2,2,2,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,2,0,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,1,1,1,2,2,2,2,2,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,2,2,2,1,1,1,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,0,0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,1,0,1,2,2,2,2,1,1,1,1,1,1,0,0,0,0,1,2,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,2,2,1,1,1,2,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,2,2,2,2,1,1,2,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,2,2,2,2,1,1,1,2,2,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,1,0,2,2,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,2,2,2,2,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,2,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,0,0,0,0,2,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,1,1,1,2,2,2,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,0,2,1,1,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,2,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,2,1,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,2,2,1,1,1,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,2,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,2,2,2,1,1,1,1,0,0,0,0,0,2,2,2,2,2,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,2,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,1,1,1,0,0,0,0,0,0,2,2,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,2,0,0,0,0,2,2,2,2,2,1,1,1,1,2,2,2,0,0,0,1,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,2,2,2,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,0,2,2,0,1,1,1,1,1,1,1,1,2,2,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,1,1,1,2,2,0,0,1,1,2,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,2,2,2,2,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,2,2,2,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,1,1,1,0,0,0,0,0,2,2,2,2,1,2,1,0,0,0,0,0,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,1,2,2,2,2,2,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,2,2,0,0,0,2,1,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,1,1,1,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,1,1,0,0,0,0,0,2,2,2,2,2,2,2,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,0,0,0,0,0,0,1,2,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,2,2,2,2,1,1,1,1,1,2,2,2,2,2,2,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,2,2,2,1,1,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
|
|
1a53d053bb7349ce696427bb54a904673e789b7e5a54f12408d0a4c4e753ef82 | tcsprojects/pgsolver | solverregistry.ml | open Paritygame;;
open Tcsset;;
type global_solver_factory = string array -> global_solver
let solvermap = ref TreeMap.empty_def;;
let register_solver_factory solver_func identifier abbreviation description =
if TreeMap.mem identifier !solvermap
then failwith ("Solver `" ^ identifier ^ "' already registered!\n")
else solvermap := TreeMap.add identifier (solver_func, abbreviation, description) !solvermap;;
let register_solver solver_func = register_solver_factory (fun _ -> solver_func);;
let mem_solver identifier = TreeMap.mem identifier !solvermap;;
let find_solver identifier = TreeMap.find identifier !solvermap;;
let enum_solvers it = TreeMap.iter (fun i (f, a, d) -> it f i a d) !solvermap;;
let fold_solvers fo b = TreeMap.fold (fun i (f, a, d) x -> fo f i a d x) !solvermap b;;
type partial_solver_factory = string array -> partial_solver
let partialsolvermap = ref TreeMap.empty_def;;
let register_partial_solver_factory solver_func identifier abbreviation description =
if TreeMap.mem identifier !partialsolvermap
then failwith ("Partial Solver `" ^ identifier ^ "' already registered!\n")
else partialsolvermap := TreeMap.add identifier (solver_func, abbreviation, description) !partialsolvermap;;
let register_partial_solver solver_func = register_partial_solver_factory (fun _ -> solver_func);;
let mem_partial_solver identifier = TreeMap.mem identifier !partialsolvermap;;
let find_partial_solver identifier = TreeMap.find identifier !partialsolvermap;;
let enum_partial_solvers it = TreeMap.iter (fun i (f, a, d) -> it f i a d) !partialsolvermap;;
let fold_partial_solvers fo b = TreeMap.fold (fun i (f, a, d) x -> fo f i a d x) !partialsolvermap b;; | null | https://raw.githubusercontent.com/tcsprojects/pgsolver/b0c31a8b367c405baed961385ad645d52f648325/src/paritygame/solverregistry.ml | ocaml | open Paritygame;;
open Tcsset;;
type global_solver_factory = string array -> global_solver
let solvermap = ref TreeMap.empty_def;;
let register_solver_factory solver_func identifier abbreviation description =
if TreeMap.mem identifier !solvermap
then failwith ("Solver `" ^ identifier ^ "' already registered!\n")
else solvermap := TreeMap.add identifier (solver_func, abbreviation, description) !solvermap;;
let register_solver solver_func = register_solver_factory (fun _ -> solver_func);;
let mem_solver identifier = TreeMap.mem identifier !solvermap;;
let find_solver identifier = TreeMap.find identifier !solvermap;;
let enum_solvers it = TreeMap.iter (fun i (f, a, d) -> it f i a d) !solvermap;;
let fold_solvers fo b = TreeMap.fold (fun i (f, a, d) x -> fo f i a d x) !solvermap b;;
type partial_solver_factory = string array -> partial_solver
let partialsolvermap = ref TreeMap.empty_def;;
let register_partial_solver_factory solver_func identifier abbreviation description =
if TreeMap.mem identifier !partialsolvermap
then failwith ("Partial Solver `" ^ identifier ^ "' already registered!\n")
else partialsolvermap := TreeMap.add identifier (solver_func, abbreviation, description) !partialsolvermap;;
let register_partial_solver solver_func = register_partial_solver_factory (fun _ -> solver_func);;
let mem_partial_solver identifier = TreeMap.mem identifier !partialsolvermap;;
let find_partial_solver identifier = TreeMap.find identifier !partialsolvermap;;
let enum_partial_solvers it = TreeMap.iter (fun i (f, a, d) -> it f i a d) !partialsolvermap;;
let fold_partial_solvers fo b = TreeMap.fold (fun i (f, a, d) x -> fo f i a d x) !partialsolvermap b;; |
|
e6357b1c2ad8d52fdb0570fdb5946234d28b3d9d4fa647e7fc485795c20b3b23 | casperschipper/ocaml-cisp | cisp5.ml | open Cisp
open Midi
let euclidTrigger = Euclid.euclidTrigger
let encode = Euclid.encode
let map = Seq.map
(* extract properties from input *)
let midiReader =
let ( let* ) x f = Reader.bind f x in
let* trigger = MidiState.boolFromNote in
Reader.return trigger
let sequenced pat =
let init = pat in
let f state =
match state () with
| Seq.Cons((p,n),tl) ->
if n <= 1 then
Some (Some p,tl)
else
Some (None,fun () -> Seq.Cons((p,n-1),tl))
| Seq.Nil -> None
in
Seq.unfold f init
let notesa =
let pat1 = [(60,1);(64,2);(67,1);(72,2);(60,1)] |> seq |> hold (sometimes 1 2 10) in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 1)
let notesb =
let pat1 = [(67,2);(65,2);(67,1);(60,1);(60,1)] |> seq |> hold (pulse (seq [1;2]) (st 1) (st 2)) in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 2)
let notesc =
let pat1 = [(48,1);(48,3);(55,2);(60,1);(58,1);(60,1);(55,1)] |> seq in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 3)
let notesd =
let pitches = st (walki 84 (ch [|(-12);(12);(-7);(7)|]) |> take 10) |> concat
|> Seq.map (fun x -> (walki x (ch [|(-7);5;(-7);(5);(5);(-7);(-7)|]) |> take 4)) |> concat in
let times = seq [2;2;2;1;1;1;2;2;2;2;2;2;2;2;3;2;3;1;1] in
let z = zip pitches times in
let pat1 = z in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 4)
let ofTrigger trig nts =
weavePattern trig (map Option.some nts) (st None)
let makeBundles (trigSq : bool Seq.t ) =
let seqList = [ notesa ;notesb ; notesc; notesd ] |> List.map (fun ns -> ofTrigger trigSq ns) in
let addOptToBundle opt bundle =
match opt with
| Some evt -> addToBundle bundle evt
| None -> bundle
in
seqList |> list_fold_heads_with silenceBundle addOptToBundle
let midiFun input =
input
|> MidiState.makeSeq
|> map (Reader.run midiReader)
|> makeBundles
|> serializeBundles
|> map toRaw
let () = Midi.playMidi midiFun Process.sample_rate
| null | https://raw.githubusercontent.com/casperschipper/ocaml-cisp/571ffb8e508c5427d01e407ba5e91ff2a4604f40/examples/cisp_backup/cycle_2021/cisp5.ml | ocaml | extract properties from input | open Cisp
open Midi
let euclidTrigger = Euclid.euclidTrigger
let encode = Euclid.encode
let map = Seq.map
let midiReader =
let ( let* ) x f = Reader.bind f x in
let* trigger = MidiState.boolFromNote in
Reader.return trigger
let sequenced pat =
let init = pat in
let f state =
match state () with
| Seq.Cons((p,n),tl) ->
if n <= 1 then
Some (Some p,tl)
else
Some (None,fun () -> Seq.Cons((p,n-1),tl))
| Seq.Nil -> None
in
Seq.unfold f init
let notesa =
let pat1 = [(60,1);(64,2);(67,1);(72,2);(60,1)] |> seq |> hold (sometimes 1 2 10) in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 1)
let notesb =
let pat1 = [(67,2);(65,2);(67,1);(60,1);(60,1)] |> seq |> hold (pulse (seq [1;2]) (st 1) (st 2)) in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 2)
let notesc =
let pat1 = [(48,1);(48,3);(55,2);(60,1);(58,1);(60,1);(55,1)] |> seq in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 3)
let notesd =
let pitches = st (walki 84 (ch [|(-12);(12);(-7);(7)|]) |> take 10) |> concat
|> Seq.map (fun x -> (walki x (ch [|(-7);5;(-7);(5);(5);(-7);(-7)|]) |> take 4)) |> concat in
let times = seq [2;2;2;1;1;1;2;2;2;2;2;2;2;2;3;2;3;1;1] in
let z = zip pitches times in
let pat1 = z in
pat1 |> sequenced |> map (fun opt ->
match opt with
| None -> SilenceEvent
| Some p -> makeNoteOfInts p 100 (0.1 |> seci) 4)
let ofTrigger trig nts =
weavePattern trig (map Option.some nts) (st None)
let makeBundles (trigSq : bool Seq.t ) =
let seqList = [ notesa ;notesb ; notesc; notesd ] |> List.map (fun ns -> ofTrigger trigSq ns) in
let addOptToBundle opt bundle =
match opt with
| Some evt -> addToBundle bundle evt
| None -> bundle
in
seqList |> list_fold_heads_with silenceBundle addOptToBundle
let midiFun input =
input
|> MidiState.makeSeq
|> map (Reader.run midiReader)
|> makeBundles
|> serializeBundles
|> map toRaw
let () = Midi.playMidi midiFun Process.sample_rate
|
3dde43f9f42f7795f80f0f48ce156bbd6af023aefb756e35d22f5b8a08419297 | ha-mo-we/Racer | copy-support.lisp | -*- Mode : Lisp ; Syntax : Ansi - Common - Lisp ; Package : RACER ; Base : 10 -*-
Copyright ( c ) 1998 - 2014 ,
, , .
;;; All rights reserved.
Racer is distributed under the following BSD 3 - clause license
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are
;;; met:
;;; Redistributions of source code must retain the above copyright notice,
;;; this list of conditions and the following disclaimer.
;;; Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
Neither the name Racer nor the names of its contributors may be used
;;; to endorse or promote products derived from this software without
;;; specific prior written permission.
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING ,
;;; BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
;;; FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
VOLKER HAARSLEV , RALF MOELLER , NOR FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES, LOSS OF USE, DATA, OR PROFITS, OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
;;; IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
;;; OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
;;; ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :racer)
(defun copy-hash-table (hash-table &optional (added-size 0) (transform-value-fn nil))
(if (null hash-table)
nil
(if (hash-table-p hash-table)
(let* ((custom-hash-function-p
#+:allegro
(eq (hash-table-hash-function hash-table) 'concept-hash)
#+:sbcl
(eq (sb-impl::hash-table-hash-fun hash-table) 'concept-hash)
#-(or :allegro :sbcl) nil)
(clone (racer-make-hash-table :test (hash-table-test hash-table)
:size (+ added-size (hash-table-count hash-table))
:rehash-size (hash-table-rehash-size hash-table)
:rehash-threshold (hash-table-rehash-threshold hash-table)
:structure-p custom-hash-function-p)))
(if transform-value-fn
(loop for key being the hash-key of hash-table using (hash-value value) do
(setf (gethash key clone) (funcall transform-value-fn value)))
(loop for key being the hash-key of hash-table using (hash-value value) do
(setf (gethash key clone) value)))
(incf-statistics *number-of-copied-hash-tables*)
(incf-statistics *number-of-copied-hash-table-entriess* (hash-table-count hash-table))
(incf-statistics *size-of-copied-hash-tables* (hash-table-size hash-table))
clone)
(error "~S is not of type hash-table" hash-table))))
(defun merge-hash-tables (hash-table-1
hash-table-2
&optional
(added-size nil)
(transform-value-fn nil))
#+:debug
(assert (and (hash-table-p hash-table-1) (hash-table-p hash-table-2)))
#+(and :debug (or :allegro))
(assert (and (eq (hash-table-hash-function hash-table-1)
(hash-table-hash-function hash-table-2))))
(let ((merged-table (copy-hash-table hash-table-1
(or added-size (hash-table-count hash-table-2))
transform-value-fn)))
(loop for key being the hash-key of hash-table-2 using (hash-value value)
for old-entry = (gethash key merged-table)
do
(if old-entry
(if transform-value-fn
(setf (gethash key merged-table) (funcall transform-value-fn value))
(setf (gethash key merged-table) (append value old-entry)))
(setf (gethash key merged-table) value)))
(incf-statistics *number-of-merged-hash-tables* 2)
(incf-statistics *number-of-merged-hash-table-entriess*
(+ (hash-table-count hash-table-1) (hash-table-count hash-table-2)))
(incf-statistics *size-of-merged-hash-tables*
(+ (hash-table-size hash-table-1) (hash-table-size hash-table-2)))
merged-table))
| null | https://raw.githubusercontent.com/ha-mo-we/Racer/d690841d10015c7a75b1ded393fcf0a33092c4de/source/copy-support.lisp | lisp | Syntax : Ansi - Common - Lisp ; Package : RACER ; Base : 10 -*-
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES, LOSS OF USE, DATA, OR PROFITS, OR BUSINESS
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
Copyright ( c ) 1998 - 2014 ,
, , .
Racer is distributed under the following BSD 3 - clause license
Neither the name Racer nor the names of its contributors may be used
CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING ,
VOLKER HAARSLEV , RALF MOELLER , NOR FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
(in-package :racer)
(defun copy-hash-table (hash-table &optional (added-size 0) (transform-value-fn nil))
(if (null hash-table)
nil
(if (hash-table-p hash-table)
(let* ((custom-hash-function-p
#+:allegro
(eq (hash-table-hash-function hash-table) 'concept-hash)
#+:sbcl
(eq (sb-impl::hash-table-hash-fun hash-table) 'concept-hash)
#-(or :allegro :sbcl) nil)
(clone (racer-make-hash-table :test (hash-table-test hash-table)
:size (+ added-size (hash-table-count hash-table))
:rehash-size (hash-table-rehash-size hash-table)
:rehash-threshold (hash-table-rehash-threshold hash-table)
:structure-p custom-hash-function-p)))
(if transform-value-fn
(loop for key being the hash-key of hash-table using (hash-value value) do
(setf (gethash key clone) (funcall transform-value-fn value)))
(loop for key being the hash-key of hash-table using (hash-value value) do
(setf (gethash key clone) value)))
(incf-statistics *number-of-copied-hash-tables*)
(incf-statistics *number-of-copied-hash-table-entriess* (hash-table-count hash-table))
(incf-statistics *size-of-copied-hash-tables* (hash-table-size hash-table))
clone)
(error "~S is not of type hash-table" hash-table))))
(defun merge-hash-tables (hash-table-1
hash-table-2
&optional
(added-size nil)
(transform-value-fn nil))
#+:debug
(assert (and (hash-table-p hash-table-1) (hash-table-p hash-table-2)))
#+(and :debug (or :allegro))
(assert (and (eq (hash-table-hash-function hash-table-1)
(hash-table-hash-function hash-table-2))))
(let ((merged-table (copy-hash-table hash-table-1
(or added-size (hash-table-count hash-table-2))
transform-value-fn)))
(loop for key being the hash-key of hash-table-2 using (hash-value value)
for old-entry = (gethash key merged-table)
do
(if old-entry
(if transform-value-fn
(setf (gethash key merged-table) (funcall transform-value-fn value))
(setf (gethash key merged-table) (append value old-entry)))
(setf (gethash key merged-table) value)))
(incf-statistics *number-of-merged-hash-tables* 2)
(incf-statistics *number-of-merged-hash-table-entriess*
(+ (hash-table-count hash-table-1) (hash-table-count hash-table-2)))
(incf-statistics *size-of-merged-hash-tables*
(+ (hash-table-size hash-table-1) (hash-table-size hash-table-2)))
merged-table))
|
4a224122f7cb38b3cab0cffa9ef2adee77f2a1ac1d933005c27ec1c7eb0724ab | julienXX/clj-slack | chat.clj | (ns clj-slack.chat
(:refer-clojure :exclude [update])
(:require [clj-slack.core :refer [slack-request slack-post-request stringify-keys]]
[clojure.data.json :refer [write-str]]))
(defn- serialize-option [option-key options]
(let [option-value (option-key options)]
(if (and option-value (not (string? option-value)))
(assoc options option-key (write-str option-value))
options)))
(defn delete
"Deletes a message."
[connection timestamp channel-id]
(slack-request connection "chat.delete" {"ts" timestamp "channel" channel-id}))
(defn post-message
"Sends a message to a channel.
Optional arguments are:
- username: name of bot
- as_user: pass true (as a string) to post the message as the authed user, instead of as a bot
- parse: change how messages are treated
- link_names: find and link channel names and usernames
- blocks: structured message blocks
- attachments: structured message attachments. (NB: Slack recommends using Blocks
instead of legacy attachments:
#secondary-attachments)
- unfurl_links: pass true to enable unfurling of primarily text-based content
- unfurl_media: pass false to disable unfurling of media content
- icon_url: URL to an image to use as the icon for this message
- icon_emoji: emoji to use as the icon for this message. Overrides icon_url"
([connection channel-id text]
(post-message connection channel-id text {}))
([connection channel-id text optionals]
(->> optionals
(serialize-option :blocks)
(serialize-option :attachments)
stringify-keys
(merge {"channel" channel-id
"text" text})
(slack-post-request connection "chat.postMessage"))))
(defn update
"Sends a message to a channel."
[connection timestamp channel-id text]
(slack-request connection "chat.update" {"ts" timestamp "channel" channel-id "text" text}))
| null | https://raw.githubusercontent.com/julienXX/clj-slack/ff5649161646f11dd8d52d1315c0e74dc723eeb7/src/clj_slack/chat.clj | clojure | (ns clj-slack.chat
(:refer-clojure :exclude [update])
(:require [clj-slack.core :refer [slack-request slack-post-request stringify-keys]]
[clojure.data.json :refer [write-str]]))
(defn- serialize-option [option-key options]
(let [option-value (option-key options)]
(if (and option-value (not (string? option-value)))
(assoc options option-key (write-str option-value))
options)))
(defn delete
"Deletes a message."
[connection timestamp channel-id]
(slack-request connection "chat.delete" {"ts" timestamp "channel" channel-id}))
(defn post-message
"Sends a message to a channel.
Optional arguments are:
- username: name of bot
- as_user: pass true (as a string) to post the message as the authed user, instead of as a bot
- parse: change how messages are treated
- link_names: find and link channel names and usernames
- blocks: structured message blocks
- attachments: structured message attachments. (NB: Slack recommends using Blocks
instead of legacy attachments:
#secondary-attachments)
- unfurl_links: pass true to enable unfurling of primarily text-based content
- unfurl_media: pass false to disable unfurling of media content
- icon_url: URL to an image to use as the icon for this message
- icon_emoji: emoji to use as the icon for this message. Overrides icon_url"
([connection channel-id text]
(post-message connection channel-id text {}))
([connection channel-id text optionals]
(->> optionals
(serialize-option :blocks)
(serialize-option :attachments)
stringify-keys
(merge {"channel" channel-id
"text" text})
(slack-post-request connection "chat.postMessage"))))
(defn update
"Sends a message to a channel."
[connection timestamp channel-id text]
(slack-request connection "chat.update" {"ts" timestamp "channel" channel-id "text" text}))
|
|
722fb09696c399afe2abe827fcfae32d34a0b185ba8b18d9fc5c3e0827791123 | ocsigen/js_of_ocaml | test3.ml | (* TEST
*)
open Effect
open Effect.Deep
type _ t += E : unit t
exception X
let () =
Printf.printf "%d\n%!" @@
match_with (fun () ->
Printf.printf "in handler. raising X\n%!";
raise X) ()
{ retc = (fun v -> v);
exnc = (function
| X -> 10
| e -> raise e);
effc = (fun (type a) (e : a t) ->
match e with
| E -> Some (fun k -> 11)
| e -> None) }
| null | https://raw.githubusercontent.com/ocsigen/js_of_ocaml/3a615d693b213140ea8e5c43d5bbe99569bc898d/compiler/tests-ocaml/lib-effects/test3.ml | ocaml | TEST
|
open Effect
open Effect.Deep
type _ t += E : unit t
exception X
let () =
Printf.printf "%d\n%!" @@
match_with (fun () ->
Printf.printf "in handler. raising X\n%!";
raise X) ()
{ retc = (fun v -> v);
exnc = (function
| X -> 10
| e -> raise e);
effc = (fun (type a) (e : a t) ->
match e with
| E -> Some (fun k -> 11)
| e -> None) }
|
c08421d884f1d7abf2cb73d628b56d2d0a4869d14610c1fa4dd1e43e3677765c | clash-lang/clash-compiler | Leak.hs | # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
module Clash.GHCi.Leak
( LeakIndicators
, getLeakIndicators
, checkLeakIndicators
) where
import Control.Monad
import GHC
import Outputable
import HscTypes
import UniqDFM
import System.Mem
import System.Mem.Weak
Checking for space leaks in GHCi . See # 15111 , and the
-- -fghci-leak-check flag.
data LeakIndicators = LeakIndicators [LeakModIndicators]
data LeakModIndicators = LeakModIndicators
{ leakMod :: Weak HomeModInfo
, leakIface :: Weak ModIface
, leakDetails :: Weak ModDetails
, leakLinkable :: Maybe (Weak Linkable)
}
-- | Grab weak references to some of the data structures representing
-- the currently loaded modules.
getLeakIndicators :: HscEnv -> IO LeakIndicators
getLeakIndicators HscEnv{..} =
fmap LeakIndicators $
forM (eltsUDFM hsc_HPT) $ \hmi@HomeModInfo{..} -> do
leakMod <- mkWeakPtr hmi Nothing
leakIface <- mkWeakPtr hm_iface Nothing
leakDetails <- mkWeakPtr hm_details Nothing
leakLinkable <- mapM (`mkWeakPtr` Nothing) hm_linkable
return $ LeakModIndicators{..}
| Look at the LeakIndicators collected by an earlier call to
-- `getLeakIndicators`, and print messasges if any of them are still
-- alive.
checkLeakIndicators :: DynFlags -> LeakIndicators -> IO ()
checkLeakIndicators dflags (LeakIndicators leakmods) = do
performGC
forM_ leakmods $ \LeakModIndicators{..} -> do
deRefWeak leakMod >>= \case
Nothing -> return ()
Just hmi ->
report ("HomeModInfo for " ++
showSDoc dflags (ppr (mi_module (hm_iface hmi)))) (Just hmi)
deRefWeak leakIface >>= report "ModIface"
deRefWeak leakDetails >>= report "ModDetails"
forM_ leakLinkable $ \l -> deRefWeak l >>= report "Linkable"
where
report :: String -> Maybe a -> IO ()
report _ Nothing = return ()
report msg (Just _) =
putStrLn ("-fghci-leak-check: " ++ msg ++ " is still alive!")
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/clash-ghc/src-bin-861/Clash/GHCi/Leak.hs | haskell | -fghci-leak-check flag.
| Grab weak references to some of the data structures representing
the currently loaded modules.
`getLeakIndicators`, and print messasges if any of them are still
alive. | # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
module Clash.GHCi.Leak
( LeakIndicators
, getLeakIndicators
, checkLeakIndicators
) where
import Control.Monad
import GHC
import Outputable
import HscTypes
import UniqDFM
import System.Mem
import System.Mem.Weak
Checking for space leaks in GHCi . See # 15111 , and the
data LeakIndicators = LeakIndicators [LeakModIndicators]
data LeakModIndicators = LeakModIndicators
{ leakMod :: Weak HomeModInfo
, leakIface :: Weak ModIface
, leakDetails :: Weak ModDetails
, leakLinkable :: Maybe (Weak Linkable)
}
getLeakIndicators :: HscEnv -> IO LeakIndicators
getLeakIndicators HscEnv{..} =
fmap LeakIndicators $
forM (eltsUDFM hsc_HPT) $ \hmi@HomeModInfo{..} -> do
leakMod <- mkWeakPtr hmi Nothing
leakIface <- mkWeakPtr hm_iface Nothing
leakDetails <- mkWeakPtr hm_details Nothing
leakLinkable <- mapM (`mkWeakPtr` Nothing) hm_linkable
return $ LeakModIndicators{..}
| Look at the LeakIndicators collected by an earlier call to
checkLeakIndicators :: DynFlags -> LeakIndicators -> IO ()
checkLeakIndicators dflags (LeakIndicators leakmods) = do
performGC
forM_ leakmods $ \LeakModIndicators{..} -> do
deRefWeak leakMod >>= \case
Nothing -> return ()
Just hmi ->
report ("HomeModInfo for " ++
showSDoc dflags (ppr (mi_module (hm_iface hmi)))) (Just hmi)
deRefWeak leakIface >>= report "ModIface"
deRefWeak leakDetails >>= report "ModDetails"
forM_ leakLinkable $ \l -> deRefWeak l >>= report "Linkable"
where
report :: String -> Maybe a -> IO ()
report _ Nothing = return ()
report msg (Just _) =
putStrLn ("-fghci-leak-check: " ++ msg ++ " is still alive!")
|
b0122b7c1dd8cbca11b926e0a249c4bc237f341119fecc38318e8bea4d6103b2 | roburio/dns-secondary | unikernel.ml | ( c ) 2017 , 2018 , all rights reserved
module Main (R : Mirage_random.S) (P : Mirage_clock.PCLOCK) (M : Mirage_clock.MCLOCK) (T : Mirage_time.S) (S : Tcpip.Stack.V4V6) = struct
module D = Dns_server_mirage.Make(P)(M)(T)(S)
let start _rng _pclock _mclock _ s =
let keys = List.fold_left (fun acc str ->
match Dns.Dnskey.name_key_of_string str with
| Error (`Msg msg) -> Logs.err (fun m -> m "key parse error %s" msg) ; exit 64
| Ok (name, key) -> (name, key) :: acc)
[] (Key_gen.keys ())
in
let t =
Dns_server.Secondary.create ~rng:R.generate
~tsig_verify:Dns_tsig.verify ~tsig_sign:Dns_tsig.sign keys
in
D.secondary s t ;
S.listen s
end
| null | https://raw.githubusercontent.com/roburio/dns-secondary/dc6158ba988117c32a37ee6a0cbcabb075508b02/unikernel.ml | ocaml | ( c ) 2017 , 2018 , all rights reserved
module Main (R : Mirage_random.S) (P : Mirage_clock.PCLOCK) (M : Mirage_clock.MCLOCK) (T : Mirage_time.S) (S : Tcpip.Stack.V4V6) = struct
module D = Dns_server_mirage.Make(P)(M)(T)(S)
let start _rng _pclock _mclock _ s =
let keys = List.fold_left (fun acc str ->
match Dns.Dnskey.name_key_of_string str with
| Error (`Msg msg) -> Logs.err (fun m -> m "key parse error %s" msg) ; exit 64
| Ok (name, key) -> (name, key) :: acc)
[] (Key_gen.keys ())
in
let t =
Dns_server.Secondary.create ~rng:R.generate
~tsig_verify:Dns_tsig.verify ~tsig_sign:Dns_tsig.sign keys
in
D.secondary s t ;
S.listen s
end
|
|
c54bbb55202fae7500b154e4978c41284ba5cfe088da255a5ee12d3ba0f05687 | alanz/ghc-exactprint | TH_spliceD2.hs | # LANGUAGE TemplateHaskell #
module TH_spliceD2 where
import qualified TH_spliceD2_Lib
$( [d| data T = T TH_spliceD2_Lib.T |] )
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc86/TH_spliceD2.hs | haskell | # LANGUAGE TemplateHaskell #
module TH_spliceD2 where
import qualified TH_spliceD2_Lib
$( [d| data T = T TH_spliceD2_Lib.T |] )
|
|
7ffe29d2b79a078727adbbc30f18e10d99e5039a4d68b495fd9c0a5d61826230 | stackbuilders/dotenv-hs | ParseSpec.hs | # OPTIONS_GHC -fno - warn - orphans #
module Configuration.Dotenv.ParseSpec (main, spec) where
import Configuration.Dotenv.Parse (configParser)
import Configuration.Dotenv.ParsedVariable (ParsedVariable(..),
VarValue(..),
VarFragment(..))
import Data.Void (Void)
import Test.Hspec (it, context, describe, Spec, hspec)
import Test.Hspec.Megaparsec (shouldParse, shouldFailOn, shouldSucceedOn)
import Text.Megaparsec (ParseErrorBundle, parse)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "parse" $ do
it "parses unquoted values" $
parseConfig "FOO=bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "parses values with spaces around equal signs" $ do
parseConfig "FOO =bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
parseConfig "FOO= bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
parseConfig "FOO =\t bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "parses double-quoted values" $
parseConfig "FOO=\"bar\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar"])]
it "parses single-quoted values" $
parseConfig "FOO='bar'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "bar"])]
it "parses escaped double quotes" $
parseConfig "FOO=\"escaped\\\"bar\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "escaped\"bar"])]
it "supports CRLF line breaks" $
parseConfig "FOO=bar\r\nbaz=fbb"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"]),
ParsedVariable "baz" (Unquoted [VarLiteral "fbb"])]
it "parses empty values" $
parseConfig "FOO="
`shouldParse` [ParsedVariable "FOO" (Unquoted [])]
it "parses unquoted interpolated values" $ do
parseConfig "FOO=$HOME"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarInterpolation "HOME"])]
parseConfig "FOO=abc_$HOME"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
parseConfig "FOO=${HOME}"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarInterpolation "HOME"])]
parseConfig "FOO=abc_${HOME}"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
it "parses double-quoted interpolated values" $ do
parseConfig "FOO=\"$HOME\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarInterpolation "HOME"])]
parseConfig "FOO=\"abc_$HOME\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
parseConfig "FOO=\"${HOME}\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarInterpolation "HOME"])]
parseConfig "FOO=\"abc_${HOME}\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
it "parses single-quoted interpolated values as literals" $ do
parseConfig "FOO='$HOME'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "$HOME"])]
parseConfig "FOO='abc_$HOME'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "abc_$HOME"])]
parseConfig "FOO='${HOME}'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "${HOME}"])]
parseConfig "FOO='abc_${HOME}'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "abc_${HOME}"])]
it "does not parse if line format is incorrect" $ do
parseConfig `shouldFailOn` "lol$wut"
parseConfig `shouldFailOn` "KEY=\nVALUE"
parseConfig `shouldFailOn` "KEY\n=VALUE"
it "expands newlines in quoted strings" $
parseConfig "FOO=\"bar\nbaz\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar\nbaz"])]
it "does not parse variables with hyphens in the name" $
parseConfig `shouldFailOn` "FOO-BAR=foobar"
it "parses variables with \"_\" in the name" $
parseConfig "FOO_BAR=foobar"
`shouldParse` [ParsedVariable "FOO_BAR" (Unquoted [VarLiteral "foobar"])]
it "parses variables with digits after the first character" $
parseConfig "FOO_BAR_12=foobar"
`shouldParse` [ParsedVariable "FOO_BAR_12" (Unquoted [VarLiteral "foobar"])]
it "allows vertical spaces after a quoted variable" $
parseConfig "foo='bar' "
`shouldParse` [ParsedVariable "foo" (SingleQuoted [VarLiteral "bar"])]
it "does not parse variable names beginning with a digit" $
parseConfig `shouldFailOn` "45FOO_BAR=foobar"
it "strips unquoted values" $
parseConfig "foo=bar "
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"])]
it "ignores empty lines" $
parseConfig "\n \t \nfoo=bar\n \nfizz=buzz"
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"]),
ParsedVariable "fizz" (Unquoted [VarLiteral "buzz"])]
it "ignores inline comments after unquoted arguments" $
parseConfig "FOO=bar # this is foo"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "ignores inline comments after quoted arguments" $
parseConfig "FOO=\"bar\" # this is foo"
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar"])]
it "allows \"#\" in quoted values" $
parseConfig "foo=\"bar#baz\" # comment"
`shouldParse` [ParsedVariable "foo" (DoubleQuoted [VarLiteral "bar#baz"])]
it "ignores comment lines" $
parseConfig "\n\t \n\n # HERE GOES FOO \nfoo=bar"
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"])]
it "doesn't allow more configuration options after a quoted value" $
parseConfig `shouldFailOn` "foo='bar'baz='buz'"
context "$(command) interpolation" $ do
it "parses a simple command" $ do
parseConfig "FOO=$(command)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "command" []])]
it "parses a command anywhere in the value" $ do
parseConfig "FOO=asdf_$(command)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "asdf_", CommandInterpolation "command" []])]
it "parses a command with arguments" $ do
parseConfig "FOO=$(foo-bar arg1 arg2)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "foo-bar" ["arg1", "arg2"]])]
it "parses a command with quoted arguments" $ do
parseConfig "FOO=$(bin/foo \"arg 1\" arg2)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "bin/foo" ["arg 1", "arg2"]])]
it "parses a command with arguments separated by newlines" $ do
parseConfig "FOO=$(foo.sh \"arg 1\"\narg2\n)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "foo.sh" ["arg 1", "arg2"]])]
it "parses empty content (when the file is empty)" $
parseConfig `shouldSucceedOn` ""
parseConfig :: String -> Either (ParseErrorBundle String Void) [ParsedVariable]
parseConfig = parse configParser ""
| null | https://raw.githubusercontent.com/stackbuilders/dotenv-hs/cca92ab12dd6ed6bfa78f1e069b83b81286fead3/spec/Configuration/Dotenv/ParseSpec.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Configuration.Dotenv.ParseSpec (main, spec) where
import Configuration.Dotenv.Parse (configParser)
import Configuration.Dotenv.ParsedVariable (ParsedVariable(..),
VarValue(..),
VarFragment(..))
import Data.Void (Void)
import Test.Hspec (it, context, describe, Spec, hspec)
import Test.Hspec.Megaparsec (shouldParse, shouldFailOn, shouldSucceedOn)
import Text.Megaparsec (ParseErrorBundle, parse)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "parse" $ do
it "parses unquoted values" $
parseConfig "FOO=bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "parses values with spaces around equal signs" $ do
parseConfig "FOO =bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
parseConfig "FOO= bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
parseConfig "FOO =\t bar"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "parses double-quoted values" $
parseConfig "FOO=\"bar\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar"])]
it "parses single-quoted values" $
parseConfig "FOO='bar'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "bar"])]
it "parses escaped double quotes" $
parseConfig "FOO=\"escaped\\\"bar\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "escaped\"bar"])]
it "supports CRLF line breaks" $
parseConfig "FOO=bar\r\nbaz=fbb"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"]),
ParsedVariable "baz" (Unquoted [VarLiteral "fbb"])]
it "parses empty values" $
parseConfig "FOO="
`shouldParse` [ParsedVariable "FOO" (Unquoted [])]
it "parses unquoted interpolated values" $ do
parseConfig "FOO=$HOME"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarInterpolation "HOME"])]
parseConfig "FOO=abc_$HOME"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
parseConfig "FOO=${HOME}"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarInterpolation "HOME"])]
parseConfig "FOO=abc_${HOME}"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
it "parses double-quoted interpolated values" $ do
parseConfig "FOO=\"$HOME\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarInterpolation "HOME"])]
parseConfig "FOO=\"abc_$HOME\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
parseConfig "FOO=\"${HOME}\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarInterpolation "HOME"])]
parseConfig "FOO=\"abc_${HOME}\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "abc_",
VarInterpolation "HOME"])
]
it "parses single-quoted interpolated values as literals" $ do
parseConfig "FOO='$HOME'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "$HOME"])]
parseConfig "FOO='abc_$HOME'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "abc_$HOME"])]
parseConfig "FOO='${HOME}'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "${HOME}"])]
parseConfig "FOO='abc_${HOME}'"
`shouldParse` [ParsedVariable "FOO" (SingleQuoted [VarLiteral "abc_${HOME}"])]
it "does not parse if line format is incorrect" $ do
parseConfig `shouldFailOn` "lol$wut"
parseConfig `shouldFailOn` "KEY=\nVALUE"
parseConfig `shouldFailOn` "KEY\n=VALUE"
it "expands newlines in quoted strings" $
parseConfig "FOO=\"bar\nbaz\""
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar\nbaz"])]
it "does not parse variables with hyphens in the name" $
parseConfig `shouldFailOn` "FOO-BAR=foobar"
it "parses variables with \"_\" in the name" $
parseConfig "FOO_BAR=foobar"
`shouldParse` [ParsedVariable "FOO_BAR" (Unquoted [VarLiteral "foobar"])]
it "parses variables with digits after the first character" $
parseConfig "FOO_BAR_12=foobar"
`shouldParse` [ParsedVariable "FOO_BAR_12" (Unquoted [VarLiteral "foobar"])]
it "allows vertical spaces after a quoted variable" $
parseConfig "foo='bar' "
`shouldParse` [ParsedVariable "foo" (SingleQuoted [VarLiteral "bar"])]
it "does not parse variable names beginning with a digit" $
parseConfig `shouldFailOn` "45FOO_BAR=foobar"
it "strips unquoted values" $
parseConfig "foo=bar "
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"])]
it "ignores empty lines" $
parseConfig "\n \t \nfoo=bar\n \nfizz=buzz"
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"]),
ParsedVariable "fizz" (Unquoted [VarLiteral "buzz"])]
it "ignores inline comments after unquoted arguments" $
parseConfig "FOO=bar # this is foo"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "bar"])]
it "ignores inline comments after quoted arguments" $
parseConfig "FOO=\"bar\" # this is foo"
`shouldParse` [ParsedVariable "FOO" (DoubleQuoted [VarLiteral "bar"])]
it "allows \"#\" in quoted values" $
parseConfig "foo=\"bar#baz\" # comment"
`shouldParse` [ParsedVariable "foo" (DoubleQuoted [VarLiteral "bar#baz"])]
it "ignores comment lines" $
parseConfig "\n\t \n\n # HERE GOES FOO \nfoo=bar"
`shouldParse` [ParsedVariable "foo" (Unquoted [VarLiteral "bar"])]
it "doesn't allow more configuration options after a quoted value" $
parseConfig `shouldFailOn` "foo='bar'baz='buz'"
context "$(command) interpolation" $ do
it "parses a simple command" $ do
parseConfig "FOO=$(command)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "command" []])]
it "parses a command anywhere in the value" $ do
parseConfig "FOO=asdf_$(command)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [VarLiteral "asdf_", CommandInterpolation "command" []])]
it "parses a command with arguments" $ do
parseConfig "FOO=$(foo-bar arg1 arg2)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "foo-bar" ["arg1", "arg2"]])]
it "parses a command with quoted arguments" $ do
parseConfig "FOO=$(bin/foo \"arg 1\" arg2)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "bin/foo" ["arg 1", "arg2"]])]
it "parses a command with arguments separated by newlines" $ do
parseConfig "FOO=$(foo.sh \"arg 1\"\narg2\n)"
`shouldParse` [ParsedVariable "FOO" (Unquoted [CommandInterpolation "foo.sh" ["arg 1", "arg2"]])]
it "parses empty content (when the file is empty)" $
parseConfig `shouldSucceedOn` ""
parseConfig :: String -> Either (ParseErrorBundle String Void) [ParsedVariable]
parseConfig = parse configParser ""
|
|
71f9a3bd07d9997d2926cd2e8caf1584eb7005524b26588d3ae5e73a1a978bf0 | ajhc/ajhc | IORef.hs |
import Data.IORef
fact :: Int -> IO Int
fact n = do
ref <- newIORef 1
let f 1 = return ()
f n = modifyIORef ref (n*) >> f (n - 1)
f n
readIORef ref
main = do
r <- fact 5
putStrLn (replicate r 'x')
| null | https://raw.githubusercontent.com/ajhc/ajhc/8ef784a6a3b5998cfcd95d0142d627da9576f264/regress/tests/3_io/IORef.hs | haskell |
import Data.IORef
fact :: Int -> IO Int
fact n = do
ref <- newIORef 1
let f 1 = return ()
f n = modifyIORef ref (n*) >> f (n - 1)
f n
readIORef ref
main = do
r <- fact 5
putStrLn (replicate r 'x')
|
|
36d1aaa32068e49a368d935e8820c26ba9ba20ef5e9862003a3eae9188e44f38 | effectfully/prefolds | Stepper.hs | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
{-# LANGUAGE RankNTypes, ExistentialQuantification, NoImplicitPrelude #-}
module Stepper where
import Lib
import Data.Strict.Tuple
import Data.Strict.Drive
class (Functor (t m), Functor (s m), Monad m) => TransPairing t s m where
interpretT :: (m a -> m b -> m c) -> t m b -> s m a -> m c
data Stepper t m b = forall acc. Stepper (acc -> m b) (acc -> DriveT (t m) acc) (DriveT m acc)
instance Monad m => Functor (Stepper t m) where
fmap h (Stepper g f a) = Stepper (h <.> g) f a
# INLINEABLE fmap #
interpretDriveT :: TransPairing t s m
=> (DriveT m a -> DriveT m b -> m c) -> DriveT (t m) b -> DriveT (s m) a -> m c
interpretDriveT f (DriveT b) (DriveT a) = interpretT (\a' b' -> f (DriveT a') (DriveT b')) b a
# INLINEABLE interpretDriveT #
instance TransPairing t s m => TransPairing (Stepper t) (Stepper s) m where
interpretT h (Stepper g2 f2 a2) (Stepper g1 f1 a1) = go a1 a2 where
go a1 a2 = driveTM (\(Pair a1' a2') -> h (g1 a1') (g2 a2'))
(\(Pair a1' a2') -> interpretDriveT go (f2 a2') (f1 a1'))
(Pair <$> a1 <&> a2)
# INLINEABLE interpretT #
newtype Fun a m b = Fun { getFun :: a -> m b }
newtype Tup a m b = Tup { getTup :: m (Pair a b) }
instance Functor f => Functor (Fun a f) where
fmap g (Fun f) = Fun $ g <.> f
# INLINEABLE fmap #
instance Functor m => Functor (Tup a m) where
fmap g (Tup p) = Tup $ (\(Pair x y) -> Pair x $ g y) <$> p
# INLINEABLE fmap #
instance Monad m => TransPairing (Fun a) (Tup a) m where
interpretT g (Fun f) (Tup p) = p >>= \(Pair x y) -> g (return y) (f x)
# INLINEABLE interpretT #
DriveT ( Fun a m ) acc
~ Fun a m ( Drive acc )
~ a - > m ( Drive acc )
type Fold a = Stepper (Fun a)
DriveT ( Tup a m ) acc
~ Tup a m ( Drive acc )
-- ~ m (Pair a (Drive acc))
type Unfold a = Stepper (Tup a)
test :: Monad m => Fold a m b -> Unfold a m c -> m b
test = interpretT (*>)
| null | https://raw.githubusercontent.com/effectfully/prefolds/c0a31f6a7e3d6e8a6f8ae28b152e8a03c75b48f3/src/Experiment/Stepper.hs | haskell | # LANGUAGE RankNTypes, ExistentialQuantification, NoImplicitPrelude #
~ m (Pair a (Drive acc)) | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
module Stepper where
import Lib
import Data.Strict.Tuple
import Data.Strict.Drive
class (Functor (t m), Functor (s m), Monad m) => TransPairing t s m where
interpretT :: (m a -> m b -> m c) -> t m b -> s m a -> m c
data Stepper t m b = forall acc. Stepper (acc -> m b) (acc -> DriveT (t m) acc) (DriveT m acc)
instance Monad m => Functor (Stepper t m) where
fmap h (Stepper g f a) = Stepper (h <.> g) f a
# INLINEABLE fmap #
interpretDriveT :: TransPairing t s m
=> (DriveT m a -> DriveT m b -> m c) -> DriveT (t m) b -> DriveT (s m) a -> m c
interpretDriveT f (DriveT b) (DriveT a) = interpretT (\a' b' -> f (DriveT a') (DriveT b')) b a
# INLINEABLE interpretDriveT #
instance TransPairing t s m => TransPairing (Stepper t) (Stepper s) m where
interpretT h (Stepper g2 f2 a2) (Stepper g1 f1 a1) = go a1 a2 where
go a1 a2 = driveTM (\(Pair a1' a2') -> h (g1 a1') (g2 a2'))
(\(Pair a1' a2') -> interpretDriveT go (f2 a2') (f1 a1'))
(Pair <$> a1 <&> a2)
# INLINEABLE interpretT #
newtype Fun a m b = Fun { getFun :: a -> m b }
newtype Tup a m b = Tup { getTup :: m (Pair a b) }
instance Functor f => Functor (Fun a f) where
fmap g (Fun f) = Fun $ g <.> f
# INLINEABLE fmap #
instance Functor m => Functor (Tup a m) where
fmap g (Tup p) = Tup $ (\(Pair x y) -> Pair x $ g y) <$> p
# INLINEABLE fmap #
instance Monad m => TransPairing (Fun a) (Tup a) m where
interpretT g (Fun f) (Tup p) = p >>= \(Pair x y) -> g (return y) (f x)
# INLINEABLE interpretT #
DriveT ( Fun a m ) acc
~ Fun a m ( Drive acc )
~ a - > m ( Drive acc )
type Fold a = Stepper (Fun a)
DriveT ( Tup a m ) acc
~ Tup a m ( Drive acc )
type Unfold a = Stepper (Tup a)
test :: Monad m => Fold a m b -> Unfold a m c -> m b
test = interpretT (*>)
|
50c25439d3a979b9ead8da8435c12e855cbeee8417d8c6dd77b332f4b376ad56 | zotonic/zotonic | filter_url_abs.erl | @author < >
2020
%% @doc 'url_abs' filter, generates an url with hostname/protocol.
Copyright 2020
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(filter_url_abs).
-export([url_abs/2]).
url_abs(undefined, _Context) ->
undefined;
url_abs(<<>>, Context) ->
z_context:abs_url(<<>>, Context);
url_abs(<<$/, _/binary>> = Url, Context) ->
z_context:abs_url(Url, Context);
url_abs(<<"http:", _/binary>> = Url, _Context) ->
Url;
url_abs(<<"https:", _/binary>> = Url, _Context) ->
Url;
url_abs(Name, Context) ->
case filter_url:url(Name, Context) of
undefined -> undefined;
Url -> z_context:abs_url(Url, Context)
end.
| null | https://raw.githubusercontent.com/zotonic/zotonic/852f627c28adf6e5212e8ad5383d4af3a2f25e3f/apps/zotonic_mod_base/src/filters/filter_url_abs.erl | erlang | @doc 'url_abs' filter, generates an url with hostname/protocol.
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | @author < >
2020
Copyright 2020
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(filter_url_abs).
-export([url_abs/2]).
url_abs(undefined, _Context) ->
undefined;
url_abs(<<>>, Context) ->
z_context:abs_url(<<>>, Context);
url_abs(<<$/, _/binary>> = Url, Context) ->
z_context:abs_url(Url, Context);
url_abs(<<"http:", _/binary>> = Url, _Context) ->
Url;
url_abs(<<"https:", _/binary>> = Url, _Context) ->
Url;
url_abs(Name, Context) ->
case filter_url:url(Name, Context) of
undefined -> undefined;
Url -> z_context:abs_url(Url, Context)
end.
|
3fb93858db3da5a7a1d0a1305f27017c27bd809179aeca9b794fd0998ee4413c | facebook/pyre-check | revealLocalsTest.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open OUnit2
open IntegrationTest
let test_reveal_locals context =
let assert_type_errors = assert_type_errors ~context in
let assert_default_type_errors = assert_default_type_errors ~context in
assert_type_errors
{|
def f(a: int, b: int) -> int:
c = a + b
reveal_locals()
return c
|}
[{|Revealed locals [-2]: Revealed local types are:
c: `int`
a: `int`
b: `int`|}];
assert_type_errors
{|
from typing import Optional
c: Optional[int] = None
def f(a: int, b: int) -> int:
global c
c = a + b
reveal_locals()
return c
|}
[
{|Revealed locals [-2]: Revealed local types are:
c: `Optional[int]` (inferred: `int`)
a: `int`
b: `int`|};
];
assert_type_errors
{|
def foo(x: str) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `str`|}];
assert_type_errors
{|
import typing
def foo(x: typing.Union[int, str]) -> None:
x = 1
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
x: `typing.Union[int, str]` (inferred: `typing_extensions.Literal[1]`)|};
];
assert_default_type_errors
{|
def foo(x) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `typing.Any`|}];
assert_type_errors
{|
from builtins import int_to_str
def foo(x: int) -> None:
y = int_to_str(x)
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
y: `str`
x: `int`|}];
assert_type_errors
{|
def foo() -> int:
bar, baz = list(range(2))
reveal_locals()
return bar
|}
[{|Revealed locals [-2]: Revealed local types are:
bar: `int`
baz: `int`|}];
assert_type_errors
{|
import typing
def foo(s: typing.Sequence[float]) -> list[float]:
l = list(s)
bar, baz = l
reveal_locals()
return l
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `float`
baz: `float`
l: `typing.List[float]`
s: `typing.Sequence[float]`|};
];
assert_type_errors
{|
def foo() -> dict[str, int]:
d = dict(a = 1, b = 2)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`|};
];
assert_type_errors
{|
import typing
def foo(map: typing.Mapping[str, int]) -> dict[str, int]:
d = dict(map)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`
map: `typing.Mapping[str, int]`|};
];
assert_type_errors
{|
import typing
def foo(t: typing.Iterable[typing.Tuple[str, int]]) -> dict[str, int]:
d = dict(t)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`
t: `typing.Iterable[typing.Tuple[str, int]]`|};
];
assert_type_errors
{|
import typing
def foo(bar: typing.Union[int, str]) -> None:
if type(bar) is int:
reveal_locals()
else:
reveal_locals()
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`|};
{|Revealed locals [-2]: Revealed local types are:
bar: `str`|};
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Union[int, str]`|};
];
assert_type_errors
{|
x = 1.0
def foo() -> None:
global x
x = 1
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
x: `float` (inferred: `typing_extensions.Literal[1]`)|};
];
assert_type_errors
{|
import typing
class Foo:
attribute: typing.Optional[int] = 1
def foo() -> None:
y = Foo.attribute
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
y: `typing.Optional[int]`|}];
assert_type_errors
{|
import typing
class Foo:
attribute: typing.Optional[int] = 1
def foo() -> None:
Foo.attribute = 1
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
test.Foo: `typing.Type[Foo]`|}];
assert_type_errors
{|
class A:
def foo(self) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
self: `A`|}];
assert_type_errors
{|
class A:
def foo(self) -> None:
def bar() -> None:
pass
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[], None]`
self: `A`|};
];
assert_type_errors
{|
from typing import TypeVar, Generic
T = TypeVar("T")
class A(Generic[T]):
def foo(self) -> None:
def bar() -> None:
pass
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[], None]`
self: `A[Variable[T]]`|};
];
assert_type_errors
{|
class A:
@classmethod
def foo(cls) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
cls: `typing.Type[A]`|}];
assert_type_errors
{|
def foo() -> None:
def baz() -> None:
pass
def bar(x: int) -> int:
return x
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[Named(x, int)], int]`
baz: `typing.Callable[[], None]`|};
];
assert_type_errors
{|
def foo( *args: str, **kwargs: int) -> None:
def f() -> None:
x = args[0]
y = kwargs['key']
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
f: `typing.Callable[[], None]`
x: `str`
y: `int`
args: `typing.Tuple[str, ...]`
kwargs: `typing.Dict[str, int]`|};
];
assert_type_errors
{|
import builtins
class MyInt:
pass
int = MyInt
def f(x:int, y:builtins.int) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `MyInt`
y: `int`|}];
assert_type_errors
{|
from typing import Any
class type:
def __init__(self, __name: str, __bases: tuple[type, ...], __dict: dict[str, Any], **kwds: Any) -> None:
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
__bases: `typing.Tuple[type, ...]`
__dict: `typing.Dict[str, typing.Any]`
__name: `str`
kwds: `typing.Dict[str, typing.Any]`
self: `type`|};
];
()
let () = "revealLocals" >::: ["reveal_locals" >:: test_reveal_locals] |> Test.run
| null | https://raw.githubusercontent.com/facebook/pyre-check/60fd4b92a6573a4d182baa6debfe38c3807a3614/source/analysis/test/integration/revealLocalsTest.ml | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open OUnit2
open IntegrationTest
let test_reveal_locals context =
let assert_type_errors = assert_type_errors ~context in
let assert_default_type_errors = assert_default_type_errors ~context in
assert_type_errors
{|
def f(a: int, b: int) -> int:
c = a + b
reveal_locals()
return c
|}
[{|Revealed locals [-2]: Revealed local types are:
c: `int`
a: `int`
b: `int`|}];
assert_type_errors
{|
from typing import Optional
c: Optional[int] = None
def f(a: int, b: int) -> int:
global c
c = a + b
reveal_locals()
return c
|}
[
{|Revealed locals [-2]: Revealed local types are:
c: `Optional[int]` (inferred: `int`)
a: `int`
b: `int`|};
];
assert_type_errors
{|
def foo(x: str) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `str`|}];
assert_type_errors
{|
import typing
def foo(x: typing.Union[int, str]) -> None:
x = 1
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
x: `typing.Union[int, str]` (inferred: `typing_extensions.Literal[1]`)|};
];
assert_default_type_errors
{|
def foo(x) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `typing.Any`|}];
assert_type_errors
{|
from builtins import int_to_str
def foo(x: int) -> None:
y = int_to_str(x)
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
y: `str`
x: `int`|}];
assert_type_errors
{|
def foo() -> int:
bar, baz = list(range(2))
reveal_locals()
return bar
|}
[{|Revealed locals [-2]: Revealed local types are:
bar: `int`
baz: `int`|}];
assert_type_errors
{|
import typing
def foo(s: typing.Sequence[float]) -> list[float]:
l = list(s)
bar, baz = l
reveal_locals()
return l
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `float`
baz: `float`
l: `typing.List[float]`
s: `typing.Sequence[float]`|};
];
assert_type_errors
{|
def foo() -> dict[str, int]:
d = dict(a = 1, b = 2)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`|};
];
assert_type_errors
{|
import typing
def foo(map: typing.Mapping[str, int]) -> dict[str, int]:
d = dict(map)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`
map: `typing.Mapping[str, int]`|};
];
assert_type_errors
{|
import typing
def foo(t: typing.Iterable[typing.Tuple[str, int]]) -> dict[str, int]:
d = dict(t)
bar = d['a']
reveal_locals()
return d
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`
d: `typing.Dict[str, int]`
t: `typing.Iterable[typing.Tuple[str, int]]`|};
];
assert_type_errors
{|
import typing
def foo(bar: typing.Union[int, str]) -> None:
if type(bar) is int:
reveal_locals()
else:
reveal_locals()
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `int`|};
{|Revealed locals [-2]: Revealed local types are:
bar: `str`|};
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Union[int, str]`|};
];
assert_type_errors
{|
x = 1.0
def foo() -> None:
global x
x = 1
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
x: `float` (inferred: `typing_extensions.Literal[1]`)|};
];
assert_type_errors
{|
import typing
class Foo:
attribute: typing.Optional[int] = 1
def foo() -> None:
y = Foo.attribute
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
y: `typing.Optional[int]`|}];
assert_type_errors
{|
import typing
class Foo:
attribute: typing.Optional[int] = 1
def foo() -> None:
Foo.attribute = 1
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
test.Foo: `typing.Type[Foo]`|}];
assert_type_errors
{|
class A:
def foo(self) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
self: `A`|}];
assert_type_errors
{|
class A:
def foo(self) -> None:
def bar() -> None:
pass
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[], None]`
self: `A`|};
];
assert_type_errors
{|
from typing import TypeVar, Generic
T = TypeVar("T")
class A(Generic[T]):
def foo(self) -> None:
def bar() -> None:
pass
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[], None]`
self: `A[Variable[T]]`|};
];
assert_type_errors
{|
class A:
@classmethod
def foo(cls) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
cls: `typing.Type[A]`|}];
assert_type_errors
{|
def foo() -> None:
def baz() -> None:
pass
def bar(x: int) -> int:
return x
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
bar: `typing.Callable[[Named(x, int)], int]`
baz: `typing.Callable[[], None]`|};
];
assert_type_errors
{|
def foo( *args: str, **kwargs: int) -> None:
def f() -> None:
x = args[0]
y = kwargs['key']
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
f: `typing.Callable[[], None]`
x: `str`
y: `int`
args: `typing.Tuple[str, ...]`
kwargs: `typing.Dict[str, int]`|};
];
assert_type_errors
{|
import builtins
class MyInt:
pass
int = MyInt
def f(x:int, y:builtins.int) -> None:
reveal_locals()
|}
[{|Revealed locals [-2]: Revealed local types are:
x: `MyInt`
y: `int`|}];
assert_type_errors
{|
from typing import Any
class type:
def __init__(self, __name: str, __bases: tuple[type, ...], __dict: dict[str, Any], **kwds: Any) -> None:
reveal_locals()
|}
[
{|Revealed locals [-2]: Revealed local types are:
__bases: `typing.Tuple[type, ...]`
__dict: `typing.Dict[str, typing.Any]`
__name: `str`
kwds: `typing.Dict[str, typing.Any]`
self: `type`|};
];
()
let () = "revealLocals" >::: ["reveal_locals" >:: test_reveal_locals] |> Test.run
|
|
d722123085322e6c99f34806f541b8ed91e3052cb5e8c270c244c05adeb54715 | avsm/eeww | example.ml | (* construct atomic variables *)
let atomic_1, atomic_2 = (Kcas.ref 0, Kcas.ref 3) in
construct kcas operation
let kcas = [ Kcas.mk_cas atomic_1 0 1; Kcas.mk_cas atomic_2 3 4 ] in
apply constructed kcas
ignore (Kcas.kCAS kcas);
atomic_1 = 1 , atomic_2 = 4
assert (Kcas.get atomic_1 = 1);
assert (Kcas.get atomic_2 = 4)
| null | https://raw.githubusercontent.com/avsm/eeww/a316137bc7550870c9fd0c6a907d87e9d9810ae4/lib/kcas/test/example.ml | ocaml | construct atomic variables | let atomic_1, atomic_2 = (Kcas.ref 0, Kcas.ref 3) in
construct kcas operation
let kcas = [ Kcas.mk_cas atomic_1 0 1; Kcas.mk_cas atomic_2 3 4 ] in
apply constructed kcas
ignore (Kcas.kCAS kcas);
atomic_1 = 1 , atomic_2 = 4
assert (Kcas.get atomic_1 = 1);
assert (Kcas.get atomic_2 = 4)
|
e1e41e0807e1bab87e3d9b06f85b13283ce8bf307d73b21d031f8aeb14a813b3 | ndmitchell/cmdargs | Type.hs |
module System.Console.CmdArgs.Explicit.Type where
import Control.Arrow
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
import Data.Semigroup hiding (Arg)
import Prelude
-- | A name, either the name of a flag (@--/foo/@) or the name of a mode.
type Name = String
-- | A help message that goes with either a flag or a mode.
type Help = String
-- | The type of a flag, i.e. @--foo=/TYPE/@.
type FlagHelp = String
---------------------------------------------------------------------
-- UTILITY
| Parse a boolean , accepts as True : true yes on enabled 1 .
parseBool :: String -> Maybe Bool
parseBool s | ls `elem` true = Just True
| ls `elem` false = Just False
| otherwise = Nothing
where
ls = map toLower s
true = ["true","yes","on","enabled","1"]
false = ["false","no","off","disabled","0"]
---------------------------------------------------------------------
GROUPS
-- | A group of items (modes or flags). The items are treated as a list, but the
-- group structure is used when displaying the help message.
data Group a = Group
{groupUnnamed :: [a] -- ^ Normal items.
,groupHidden :: [a] -- ^ Items that are hidden (not displayed in the help message).
,groupNamed :: [(Help, [a])] -- ^ Items that have been grouped, along with a description of each group.
} deriving Show
instance Functor Group where
fmap f (Group a b c) = Group (map f a) (map f b) (map (second $ map f) c)
instance Semigroup (Group a) where
Group x1 x2 x3 <> Group y1 y2 y3 = Group (x1++y1) (x2++y2) (x3++y3)
instance Monoid (Group a) where
mempty = Group [] [] []
mappend = (<>)
-- | Convert a group into a list.
fromGroup :: Group a -> [a]
fromGroup (Group x y z) = x ++ y ++ concatMap snd z
-- | Convert a list into a group, placing all fields in 'groupUnnamed'.
toGroup :: [a] -> Group a
toGroup x = Group x [] []
---------------------------------------------------------------------
TYPES
-- | A mode. Do not use the 'Mode' constructor directly, instead
-- use 'mode' to construct the 'Mode' and then record updates.
Each mode has three main features :
--
-- * A list of submodes ('modeGroupModes')
--
-- * A list of flags ('modeGroupFlags')
--
* Optionally an unnamed argument ( ' ' )
--
-- To produce the help information for a mode, either use 'helpText' or 'show'.
data Mode a = Mode
{modeGroupModes :: Group (Mode a) -- ^ The available sub-modes
,modeNames :: [Name] -- ^ The names assigned to this mode (for the root mode, this name is used as the program name)
,modeValue :: a -- ^ Value to start with
,modeCheck :: a -> Either String a -- ^ Check the value reprsented by a mode is correct, after applying all flags
,modeReform :: a -> Maybe [String] -- ^ Given a value, try to generate the input arguments.
,modeExpandAt :: Bool -- ^ Expand @\@@ arguments with 'expandArgsAt', defaults to 'True', only applied if using an 'IO' processing function.
-- Only the root 'Mode's value will be used.
,modeHelp :: Help -- ^ Help text
,modeHelpSuffix :: [String] -- ^ A longer help suffix displayed after a mode
^ The unnamed arguments , a series of arguments , followed optionally by one for all remaining slots
,modeGroupFlags :: Group (Flag a) -- ^ Groups of flags
}
-- | Extract the modes from a 'Mode'
modeModes :: Mode a -> [Mode a]
modeModes = fromGroup . modeGroupModes
-- | Extract the flags from a 'Mode'
modeFlags :: Mode a -> [Flag a]
modeFlags = fromGroup . modeGroupFlags
-- | The 'FlagInfo' type has the following meaning:
--
--
> FlagReq FlagOpt FlagOptRare / FlagNone
-- > -xfoo -x=foo -x=foo -x -foo
-- > -x foo -x=foo -x foo -x foo
-- > -x=foo -x=foo -x=foo -x=foo
-- > --xx foo --xx=foo --xx foo --xx foo
-- > --xx=foo --xx=foo --xx=foo --xx=foo
data FlagInfo
= FlagReq -- ^ Required argument
| FlagOpt String -- ^ Optional argument
| FlagOptRare String -- ^ Optional argument that requires an = before the value
| FlagNone -- ^ No argument
deriving (Eq,Ord,Show)
| Extract the value from inside a ' ' or ' FlagOptRare ' , or raises an error .
fromFlagOpt :: FlagInfo -> String
fromFlagOpt (FlagOpt x) = x
fromFlagOpt (FlagOptRare x) = x
-- | A function to take a string, and a value, and either produce an error message
-- (@Left@), or a modified value (@Right@).
type Update a = String -> a -> Either String a
-- | A flag, consisting of a list of flag names and other information.
data Flag a = Flag
{flagNames :: [Name] -- ^ The names for the flag.
,flagInfo :: FlagInfo -- ^ Information about a flag's arguments.
,flagValue :: Update a -- ^ The way of processing a flag.
,flagType :: FlagHelp -- ^ The type of data for the flag argument, i.e. FILE\/DIR\/EXT
,flagHelp :: Help -- ^ The help message associated with this flag.
}
| An unnamed argument . Anything not starting with is considered an argument ,
-- apart from @\"-\"@ which is considered to be the argument @\"-\"@, and any arguments
-- following @\"--\"@. For example:
--
-- > programname arg1 -j - --foo arg3 -- -arg4 --arg5=1 arg6
--
-- Would have the arguments:
--
-- > ["arg1","-","arg3","-arg4","--arg5=1","arg6"]
data Arg a = Arg
{argValue :: Update a -- ^ A way of processing the argument.
,argType :: FlagHelp -- ^ The type of data for the argument, i.e. FILE\/DIR\/EXT
^ Is at least one of these arguments required , the command line will fail if none are set
}
---------------------------------------------------------------------
CHECK
-- | Check that a mode is well formed.
checkMode :: Mode a -> Maybe String
checkMode x = msum
[checkNames "modes" $ concatMap modeNames $ modeModes x
,msum $ map checkMode $ modeModes x
,checkGroup $ modeGroupModes x
,checkGroup $ modeGroupFlags x
,checkNames "flag names" $ concatMap flagNames $ modeFlags x]
where
checkGroup :: Group a -> Maybe String
checkGroup x = msum
[check "Empty group name" $ not $ any (null . fst) $ groupNamed x
,check "Empty group contents" $ not $ any (null . snd) $ groupNamed x]
checkNames :: String -> [Name] -> Maybe String
checkNames msg xs = check "Empty names" (not (any null xs)) `mplus` do
bad <- listToMaybe $ xs \\ nub xs
let dupe = filter (== bad) xs
return $ "Sanity check failed, multiple " ++ msg ++ ": " ++ unwords (map show dupe)
check :: String -> Bool -> Maybe String
check msg True = Nothing
check msg False = Just msg
---------------------------------------------------------------------
-- REMAP
-- | Like functor, but where the the argument isn't just covariant.
class Remap m where
| Convert between two values .
remap :: (a -> b) -- ^ Embed a value
-> (b -> (a, a -> b)) -- ^ Extract the mode and give a way of re-embedding
-> m a -> m b
-- | Restricted version of 'remap' where the values are isomorphic.
remap2 :: Remap m => (a -> b) -> (b -> a) -> m a -> m b
remap2 f g = remap f (\x -> (g x, f))
instance Remap Mode where
remap f g x = x
{modeGroupModes = fmap (remap f g) $ modeGroupModes x
,modeValue = f $ modeValue x
,modeCheck = \v -> let (a,b) = g v in fmap b $ modeCheck x a
,modeReform = modeReform x . fst . g
,modeArgs = (fmap (remap f g) *** fmap (remap f g)) $ modeArgs x
,modeGroupFlags = fmap (remap f g) $ modeGroupFlags x}
instance Remap Flag where
remap f g x = x{flagValue = remapUpdate f g $ flagValue x}
instance Remap Arg where
remap f g x = x{argValue = remapUpdate f g $ argValue x}
-- | Version of 'remap' for the 'Update' type alias.
remapUpdate :: (a -> b) -> (b -> (a, a -> b)) -> Update a -> Update b
remapUpdate f g upd = \s v -> let (a,b) = g v in fmap b $ upd s a
---------------------------------------------------------------------
-- MODE/MODES CREATORS
-- | Create an empty mode specifying only 'modeValue'. All other fields will usually be populated
-- using record updates.
modeEmpty :: a -> Mode a
modeEmpty x = Mode mempty [] x Right (const Nothing) True "" [] ([],Nothing) mempty
-- | Create a mode with a name, an initial value, some help text, a way of processing arguments
-- and a list of flags.
mode :: Name -> a -> Help -> Arg a -> [Flag a] -> Mode a
mode name value help arg flags = (modeEmpty value){modeNames=[name], modeHelp=help, modeArgs=([],Just arg), modeGroupFlags=toGroup flags}
-- | Create a list of modes, with a program name, an initial value, some help text and the child modes.
modes :: String -> a -> Help -> [Mode a] -> Mode a
modes name value help xs = (modeEmpty value){modeNames=[name], modeHelp=help, modeGroupModes=toGroup xs}
---------------------------------------------------------------------
-- FLAG CREATORS
-- | Create a flag taking no argument value, with a list of flag names, an update function
-- and some help text.
flagNone :: [Name] -> (a -> a) -> Help -> Flag a
flagNone names f help = Flag names FlagNone upd "" help
where upd _ x = Right $ f x
-- | Create a flag taking an optional argument value, with an optional value, a list of flag names,
-- an update function, the type of the argument and some help text.
flagOpt :: String -> [Name] -> Update a -> FlagHelp -> Help -> Flag a
flagOpt def names upd typ help = Flag names (FlagOpt def) upd typ help
-- | Create a flag taking a required argument value, with a list of flag names,
-- an update function, the type of the argument and some help text.
flagReq :: [Name] -> Update a -> FlagHelp -> Help -> Flag a
flagReq names upd typ help = Flag names FlagReq upd typ help
-- | Create an argument flag, with an update function and the type of the argument.
flagArg :: Update a -> FlagHelp -> Arg a
flagArg upd typ = Arg upd typ False
-- | Create a boolean flag, with a list of flag names, an update function and some help text.
flagBool :: [Name] -> (Bool -> a -> a) -> Help -> Flag a
flagBool names f help = Flag names (FlagOptRare "") upd "" help
where
upd s x = case if s == "" then Just True else parseBool s of
Just b -> Right $ f b x
Nothing -> Left "expected boolean value (true/false)"
| null | https://raw.githubusercontent.com/ndmitchell/cmdargs/7c206f464fa666b2654ae9da59cdff44e310de66/System/Console/CmdArgs/Explicit/Type.hs | haskell | | A name, either the name of a flag (@--/foo/@) or the name of a mode.
| A help message that goes with either a flag or a mode.
| The type of a flag, i.e. @--foo=/TYPE/@.
-------------------------------------------------------------------
UTILITY
-------------------------------------------------------------------
| A group of items (modes or flags). The items are treated as a list, but the
group structure is used when displaying the help message.
^ Normal items.
^ Items that are hidden (not displayed in the help message).
^ Items that have been grouped, along with a description of each group.
| Convert a group into a list.
| Convert a list into a group, placing all fields in 'groupUnnamed'.
-------------------------------------------------------------------
| A mode. Do not use the 'Mode' constructor directly, instead
use 'mode' to construct the 'Mode' and then record updates.
* A list of submodes ('modeGroupModes')
* A list of flags ('modeGroupFlags')
To produce the help information for a mode, either use 'helpText' or 'show'.
^ The available sub-modes
^ The names assigned to this mode (for the root mode, this name is used as the program name)
^ Value to start with
^ Check the value reprsented by a mode is correct, after applying all flags
^ Given a value, try to generate the input arguments.
^ Expand @\@@ arguments with 'expandArgsAt', defaults to 'True', only applied if using an 'IO' processing function.
Only the root 'Mode's value will be used.
^ Help text
^ A longer help suffix displayed after a mode
^ Groups of flags
| Extract the modes from a 'Mode'
| Extract the flags from a 'Mode'
| The 'FlagInfo' type has the following meaning:
> -xfoo -x=foo -x=foo -x -foo
> -x foo -x=foo -x foo -x foo
> -x=foo -x=foo -x=foo -x=foo
> --xx foo --xx=foo --xx foo --xx foo
> --xx=foo --xx=foo --xx=foo --xx=foo
^ Required argument
^ Optional argument
^ Optional argument that requires an = before the value
^ No argument
| A function to take a string, and a value, and either produce an error message
(@Left@), or a modified value (@Right@).
| A flag, consisting of a list of flag names and other information.
^ The names for the flag.
^ Information about a flag's arguments.
^ The way of processing a flag.
^ The type of data for the flag argument, i.e. FILE\/DIR\/EXT
^ The help message associated with this flag.
apart from @\"-\"@ which is considered to be the argument @\"-\"@, and any arguments
following @\"--\"@. For example:
> programname arg1 -j - --foo arg3 -- -arg4 --arg5=1 arg6
Would have the arguments:
> ["arg1","-","arg3","-arg4","--arg5=1","arg6"]
^ A way of processing the argument.
^ The type of data for the argument, i.e. FILE\/DIR\/EXT
-------------------------------------------------------------------
| Check that a mode is well formed.
-------------------------------------------------------------------
REMAP
| Like functor, but where the the argument isn't just covariant.
^ Embed a value
^ Extract the mode and give a way of re-embedding
| Restricted version of 'remap' where the values are isomorphic.
| Version of 'remap' for the 'Update' type alias.
-------------------------------------------------------------------
MODE/MODES CREATORS
| Create an empty mode specifying only 'modeValue'. All other fields will usually be populated
using record updates.
| Create a mode with a name, an initial value, some help text, a way of processing arguments
and a list of flags.
| Create a list of modes, with a program name, an initial value, some help text and the child modes.
-------------------------------------------------------------------
FLAG CREATORS
| Create a flag taking no argument value, with a list of flag names, an update function
and some help text.
| Create a flag taking an optional argument value, with an optional value, a list of flag names,
an update function, the type of the argument and some help text.
| Create a flag taking a required argument value, with a list of flag names,
an update function, the type of the argument and some help text.
| Create an argument flag, with an update function and the type of the argument.
| Create a boolean flag, with a list of flag names, an update function and some help text. |
module System.Console.CmdArgs.Explicit.Type where
import Control.Arrow
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
import Data.Semigroup hiding (Arg)
import Prelude
type Name = String
type Help = String
type FlagHelp = String
| Parse a boolean , accepts as True : true yes on enabled 1 .
parseBool :: String -> Maybe Bool
parseBool s | ls `elem` true = Just True
| ls `elem` false = Just False
| otherwise = Nothing
where
ls = map toLower s
true = ["true","yes","on","enabled","1"]
false = ["false","no","off","disabled","0"]
GROUPS
data Group a = Group
} deriving Show
instance Functor Group where
fmap f (Group a b c) = Group (map f a) (map f b) (map (second $ map f) c)
instance Semigroup (Group a) where
Group x1 x2 x3 <> Group y1 y2 y3 = Group (x1++y1) (x2++y2) (x3++y3)
instance Monoid (Group a) where
mempty = Group [] [] []
mappend = (<>)
fromGroup :: Group a -> [a]
fromGroup (Group x y z) = x ++ y ++ concatMap snd z
toGroup :: [a] -> Group a
toGroup x = Group x [] []
TYPES
Each mode has three main features :
* Optionally an unnamed argument ( ' ' )
data Mode a = Mode
^ The unnamed arguments , a series of arguments , followed optionally by one for all remaining slots
}
modeModes :: Mode a -> [Mode a]
modeModes = fromGroup . modeGroupModes
modeFlags :: Mode a -> [Flag a]
modeFlags = fromGroup . modeGroupFlags
> FlagReq FlagOpt FlagOptRare / FlagNone
data FlagInfo
deriving (Eq,Ord,Show)
| Extract the value from inside a ' ' or ' FlagOptRare ' , or raises an error .
fromFlagOpt :: FlagInfo -> String
fromFlagOpt (FlagOpt x) = x
fromFlagOpt (FlagOptRare x) = x
type Update a = String -> a -> Either String a
data Flag a = Flag
}
| An unnamed argument . Anything not starting with is considered an argument ,
data Arg a = Arg
^ Is at least one of these arguments required , the command line will fail if none are set
}
CHECK
checkMode :: Mode a -> Maybe String
checkMode x = msum
[checkNames "modes" $ concatMap modeNames $ modeModes x
,msum $ map checkMode $ modeModes x
,checkGroup $ modeGroupModes x
,checkGroup $ modeGroupFlags x
,checkNames "flag names" $ concatMap flagNames $ modeFlags x]
where
checkGroup :: Group a -> Maybe String
checkGroup x = msum
[check "Empty group name" $ not $ any (null . fst) $ groupNamed x
,check "Empty group contents" $ not $ any (null . snd) $ groupNamed x]
checkNames :: String -> [Name] -> Maybe String
checkNames msg xs = check "Empty names" (not (any null xs)) `mplus` do
bad <- listToMaybe $ xs \\ nub xs
let dupe = filter (== bad) xs
return $ "Sanity check failed, multiple " ++ msg ++ ": " ++ unwords (map show dupe)
check :: String -> Bool -> Maybe String
check msg True = Nothing
check msg False = Just msg
class Remap m where
| Convert between two values .
-> m a -> m b
remap2 :: Remap m => (a -> b) -> (b -> a) -> m a -> m b
remap2 f g = remap f (\x -> (g x, f))
instance Remap Mode where
remap f g x = x
{modeGroupModes = fmap (remap f g) $ modeGroupModes x
,modeValue = f $ modeValue x
,modeCheck = \v -> let (a,b) = g v in fmap b $ modeCheck x a
,modeReform = modeReform x . fst . g
,modeArgs = (fmap (remap f g) *** fmap (remap f g)) $ modeArgs x
,modeGroupFlags = fmap (remap f g) $ modeGroupFlags x}
instance Remap Flag where
remap f g x = x{flagValue = remapUpdate f g $ flagValue x}
instance Remap Arg where
remap f g x = x{argValue = remapUpdate f g $ argValue x}
remapUpdate :: (a -> b) -> (b -> (a, a -> b)) -> Update a -> Update b
remapUpdate f g upd = \s v -> let (a,b) = g v in fmap b $ upd s a
modeEmpty :: a -> Mode a
modeEmpty x = Mode mempty [] x Right (const Nothing) True "" [] ([],Nothing) mempty
mode :: Name -> a -> Help -> Arg a -> [Flag a] -> Mode a
mode name value help arg flags = (modeEmpty value){modeNames=[name], modeHelp=help, modeArgs=([],Just arg), modeGroupFlags=toGroup flags}
modes :: String -> a -> Help -> [Mode a] -> Mode a
modes name value help xs = (modeEmpty value){modeNames=[name], modeHelp=help, modeGroupModes=toGroup xs}
flagNone :: [Name] -> (a -> a) -> Help -> Flag a
flagNone names f help = Flag names FlagNone upd "" help
where upd _ x = Right $ f x
flagOpt :: String -> [Name] -> Update a -> FlagHelp -> Help -> Flag a
flagOpt def names upd typ help = Flag names (FlagOpt def) upd typ help
flagReq :: [Name] -> Update a -> FlagHelp -> Help -> Flag a
flagReq names upd typ help = Flag names FlagReq upd typ help
flagArg :: Update a -> FlagHelp -> Arg a
flagArg upd typ = Arg upd typ False
flagBool :: [Name] -> (Bool -> a -> a) -> Help -> Flag a
flagBool names f help = Flag names (FlagOptRare "") upd "" help
where
upd s x = case if s == "" then Just True else parseBool s of
Just b -> Right $ f b x
Nothing -> Left "expected boolean value (true/false)"
|
7cf5ea0e572eca21de0e06299580af3600798f752190e324859775c0e21e939a | rbkmoney/genlib | genlib_range.erl | -module(genlib_range).
%% @doc Module for working with number sequences (like lists:seq/2,3),
%% but more efficiently (i.e. without generating a list of numbers)
%%
%% Supports both forward- and backward-ranges (increasing and decreasing respectively)
-export([map/2]).
-export([foldl/3]).
-export([to_list/1]).
-type bound() :: integer().
-type step() :: neg_integer() | pos_integer().
-type t() :: {bound(), bound()} | {bound(), bound(), step()}.
-define(IS_RANGE(R),
((is_integer(element(1, R))) andalso
(is_integer(element(2, R))) andalso
(?IS_SIMPLE_RANGE(R) orelse ?IS_RANGE_WITH_STEP(R)))
).
-define(IS_SIMPLE_RANGE(R),
(tuple_size(R) == 2)
).
-define(IS_RANGE_WITH_STEP(R),
(tuple_size(R) == 3 andalso
is_integer(element(3, R)) andalso
element(3, R) /= 0)
).
%% @doc Map over range
-spec map(fun((integer()) -> T), t()) -> [T].
map(Fun0, Range) when is_function(Fun0, 1) ->
Fun1 = fun(Idx, Acc) ->
[Fun0(Idx) | Acc]
end,
lists:reverse(foldl(Fun1, [], Range));
map(_, _) ->
error(badarg).
@doc Fold over range from starting from the first boundary
-spec foldl(fun((integer(), T) -> T), T, t()) -> T.
foldl(Fun, Acc, Range) when is_function(Fun, 2), ?IS_RANGE(Range) ->
{From, To, Step} = to_extended_range(Range),
do_foldl(Fun, Acc, From, To, Step);
foldl(_, _, _) ->
error(badarg).
%% @doc Convert range to list
%% Somewhat similar to lists:seq/2,3, but covers all possible valid variations of arguments
-spec to_list(t()) -> [integer()].
to_list(Range) ->
{From, To, Step} = to_extended_range(Range),
if
From < To, Step < 0 -> [];
From > To, Step > 0 -> [];
true -> lists:seq(From, To, Step)
end.
%%
%% Internals
%%
do_foldl(_Fun, Acc, From, To, Step) when (From > To andalso Step > 0) -> Acc;
do_foldl(_Fun, Acc, From, To, Step) when (From < To andalso Step < 0) -> Acc;
do_foldl(Fun, Acc, From, To, Step) -> do_foldl(Fun, Fun(From, Acc), From + Step, To, Step).
to_extended_range({From, To}) ->
{From, To, 1};
to_extended_range({_From, _To, _Step} = Range) ->
Range.
| null | https://raw.githubusercontent.com/rbkmoney/genlib/2bbc54d4abe0f779d57c8f5911dce64d295b1cd1/src/genlib_range.erl | erlang | @doc Module for working with number sequences (like lists:seq/2,3),
but more efficiently (i.e. without generating a list of numbers)
Supports both forward- and backward-ranges (increasing and decreasing respectively)
@doc Map over range
@doc Convert range to list
Somewhat similar to lists:seq/2,3, but covers all possible valid variations of arguments
Internals
| -module(genlib_range).
-export([map/2]).
-export([foldl/3]).
-export([to_list/1]).
-type bound() :: integer().
-type step() :: neg_integer() | pos_integer().
-type t() :: {bound(), bound()} | {bound(), bound(), step()}.
-define(IS_RANGE(R),
((is_integer(element(1, R))) andalso
(is_integer(element(2, R))) andalso
(?IS_SIMPLE_RANGE(R) orelse ?IS_RANGE_WITH_STEP(R)))
).
-define(IS_SIMPLE_RANGE(R),
(tuple_size(R) == 2)
).
-define(IS_RANGE_WITH_STEP(R),
(tuple_size(R) == 3 andalso
is_integer(element(3, R)) andalso
element(3, R) /= 0)
).
-spec map(fun((integer()) -> T), t()) -> [T].
map(Fun0, Range) when is_function(Fun0, 1) ->
Fun1 = fun(Idx, Acc) ->
[Fun0(Idx) | Acc]
end,
lists:reverse(foldl(Fun1, [], Range));
map(_, _) ->
error(badarg).
@doc Fold over range from starting from the first boundary
-spec foldl(fun((integer(), T) -> T), T, t()) -> T.
foldl(Fun, Acc, Range) when is_function(Fun, 2), ?IS_RANGE(Range) ->
{From, To, Step} = to_extended_range(Range),
do_foldl(Fun, Acc, From, To, Step);
foldl(_, _, _) ->
error(badarg).
-spec to_list(t()) -> [integer()].
to_list(Range) ->
{From, To, Step} = to_extended_range(Range),
if
From < To, Step < 0 -> [];
From > To, Step > 0 -> [];
true -> lists:seq(From, To, Step)
end.
do_foldl(_Fun, Acc, From, To, Step) when (From > To andalso Step > 0) -> Acc;
do_foldl(_Fun, Acc, From, To, Step) when (From < To andalso Step < 0) -> Acc;
do_foldl(Fun, Acc, From, To, Step) -> do_foldl(Fun, Fun(From, Acc), From + Step, To, Step).
to_extended_range({From, To}) ->
{From, To, 1};
to_extended_range({_From, _To, _Step} = Range) ->
Range.
|
4a52019b7b9a0e741b98db5f3cf7a6855819e5d52494c8e8aa178826bc750cd5 | exoscale/clojure-kubernetes-client | authentication_v1.clj | (ns clojure-kubernetes-client.api.authentication-v1
(:require [clojure-kubernetes-client.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[clojure-kubernetes-client.specs.v1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-version :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink-spec :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-mutating-webhook-configuration :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-status :refer :all]
[clojure-kubernetes-client.specs.v1-lifecycle :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-status :refer :all]
[clojure-kubernetes-client.specs.v1-server-address-by-client-cidr :refer :all]
[clojure-kubernetes-client.specs.v1-glusterfs-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-volume-mount :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-status :refer :all]
[clojure-kubernetes-client.specs.v1-secret-volume-source :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-tls :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1-topology-selector-term :refer :all]
[clojure-kubernetes-client.specs.v1-initializers :refer :all]
[clojure-kubernetes-client.specs.v1-http-get-action :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-node-config-source :refer :all]
[clojure-kubernetes-client.specs.v1-nfs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-node-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-secret-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-container-image :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.v1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-json-schema-props :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-service-account-token-projection :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-status :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-target :refer :all]
[clojure-kubernetes-client.specs.v1-lease :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v2beta2-pods-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-spec :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-rules-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-topology-selector-label-requirement :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress :refer :all]
[clojure-kubernetes-client.specs.v1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-spec :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-subject :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template-spec :refer :all]
[clojure-kubernetes-client.specs.v1-non-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-validation :refer :all]
[clojure-kubernetes-client.specs.v1-local-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-spec :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1-api-service :refer :all]
[clojure-kubernetes-client.specs.v1-pod-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-tls :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy-spec :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-list :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-host-path :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1beta1-priority-class :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy :refer :all]
[clojure-kubernetes-client.specs.v1-service-list :refer :all]
[clojure-kubernetes-client.specs.v1-lease-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-projection :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector-term :refer :all]
[clojure-kubernetes-client.specs.v1-local-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-event :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-subresource-scale :refer :all]
[clojure-kubernetes-client.specs.v1-component-status-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1-preconditions :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-api-group-list :refer :all]
[clojure-kubernetes-client.specs.v1-node-status :refer :all]
[clojure-kubernetes-client.specs.v1-node-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-validating-webhook-configuration-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-rules-review :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-port :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-flex-volume :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-security-context :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-rules-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-container-port :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-value-status :refer :all]
[clojure-kubernetes-client.specs.v1-probe :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-external-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-glusterfs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-role :refer :all]
[clojure-kubernetes-client.specs.v1-load-balancer-ingress :refer :all]
[clojure-kubernetes-client.specs.v1-volume-projection :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-list :refer :all]
[clojure-kubernetes-client.specs.v1-fc-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v2beta1-external-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-egress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-photon-persistent-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-scope-selector :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.v1-service-port :refer :all]
[clojure-kubernetes-client.specs.v1-flex-volume-source :refer :all]
[clojure-kubernetes-client.specs.v2beta2-object-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-tcp-socket-action :refer :all]
[clojure-kubernetes-client.specs.v1-toleration :refer :all]
[clojure-kubernetes-client.specs.v1-endpoints :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-spec :refer :all]
[clojure-kubernetes-client.specs.apiregistration-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-identifier :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-pod-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-node-config-source :refer :all]
[clojure-kubernetes-client.specs.v1-object-field-selector :refer :all]
[clojure-kubernetes-client.specs.v1-watch-event :refer :all]
[clojure-kubernetes-client.specs.v1-client-ip-config :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink-list :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-supplemental-groups-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-run-as-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-node-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-external-documentation :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template-list :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scoped-resource-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-running :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-status :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1beta1-storage-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-priority-class :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-run-as-user-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-secret-key-selector :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-rollback :refer :all]
[clojure-kubernetes-client.specs.v1-job :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1-event-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-address :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-list :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-id-range :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-driver :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-non-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rule-with-operations :refer :all]
[clojure-kubernetes-client.specs.v1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.admissionregistration-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1-secret-reference :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-rules-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-initializer :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset-list :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-spec :refer :all]
[clojure-kubernetes-client.specs.v1-secret-env-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-http-ingress-rule-value :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-fs-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-egress-rule :refer :all]
[clojure-kubernetes-client.specs.v2beta2-external-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-node-config-status :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-spec :refer :all]
[clojure-kubernetes-client.specs.v1-host-alias :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-column-definition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-rollback-config :refer :all]
[clojure-kubernetes-client.specs.v2beta1-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-taint :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1-pod-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-list :refer :all]
[clojure-kubernetes-client.specs.runtime-raw-extension :refer :all]
[clojure-kubernetes-client.specs.v1-pod-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-backend :refer :all]
[clojure-kubernetes-client.specs.v1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-http-ingress-path :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-validating-webhook-configuration :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-conversion :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v1-host-path-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-security-context :refer :all]
[clojure-kubernetes-client.specs.v1beta1-runtime-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-list-meta :refer :all]
[clojure-kubernetes-client.specs.v1beta1-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v2beta2-resource-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-empty-dir-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-spec :refer :all]
[clojure-kubernetes-client.specs.v1-subject :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-terminated :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-spec :refer :all]
[clojure-kubernetes-client.specs.v1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1beta1-job-template-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-config-map :refer :all]
[clojure-kubernetes-client.specs.v2beta2-resource-metric-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-host-port-range :refer :all]
[clojure-kubernetes-client.specs.v1-subject-rules-review-status :refer :all]
[clojure-kubernetes-client.specs.v2beta2-external-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event-series :refer :all]
[clojure-kubernetes-client.specs.v1-rbd-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-git-repo-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-attached-volume :refer :all]
[clojure-kubernetes-client.specs.v2beta1-pods-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-condition :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-backend :refer :all]
[clojure-kubernetes-client.specs.v1-se-linux-options :refer :all]
[clojure-kubernetes-client.specs.v1beta2-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-volume-file :refer :all]
[clojure-kubernetes-client.specs.v1-volume-device :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-list :refer :all]
[clojure-kubernetes-client.specs.v1-event-series :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-env-from-source :refer :all]
[clojure-kubernetes-client.specs.v1-api-group :refer :all]
[clojure-kubernetes-client.specs.v1-binding :refer :all]
[clojure-kubernetes-client.specs.v1-pod-readiness-gate :refer :all]
[clojure-kubernetes-client.specs.v1-storage-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-cinder-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-id-range :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-condition :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-rollback-config :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-se-linux-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta1-local-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-node-system-info :refer :all]
[clojure-kubernetes-client.specs.v1-typed-local-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-flocker-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-subset :refer :all]
[clojure-kubernetes-client.specs.v1-node-spec :refer :all]
[clojure-kubernetes-client.specs.v1-gce-persistent-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1-node-daemon-endpoints :refer :all]
[clojure-kubernetes-client.specs.v1-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-projection :refer :all]
[clojure-kubernetes-client.specs.v1-status-cause :refer :all]
[clojure-kubernetes-client.specs.v1-scale-io-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-exec-action :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-rules-review :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scale :refer :all]
[clojure-kubernetes-client.specs.v1-env-var :refer :all]
[clojure-kubernetes-client.specs.v1-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1-volume-node-affinity :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-preferred-scheduling-term :refer :all]
[clojure-kubernetes-client.specs.v1-secret :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-label-selector :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-mutating-webhook-configuration-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1-node :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-waiting :refer :all]
[clojure-kubernetes-client.specs.v1-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1beta1-user-info :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-run-as-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-job-template-spec :refer :all]
[clojure-kubernetes-client.specs.v1-portworx-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-status-details :refer :all]
[clojure-kubernetes-client.specs.v1-ip-block :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-storage-class-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-user-info :refer :all]
[clojure-kubernetes-client.specs.v1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request :refer :all]
[clojure-kubernetes-client.specs.v1-local-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v2beta1-pods-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-non-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-delete-options :refer :all]
[clojure-kubernetes-client.specs.v1-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-job-spec :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-env-source :refer :all]
[clojure-kubernetes-client.specs.v1-job-condition :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-component-condition :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset-spec :refer :all]
[clojure-kubernetes-client.specs.v1-service :refer :all]
[clojure-kubernetes-client.specs.v1-pod-dns-config :refer :all]
[clojure-kubernetes-client.specs.v1-cinder-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.admissionregistration-v1beta1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-peer :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-eviction :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-group-version-for-discovery :refer :all]
[clojure-kubernetes-client.specs.v2beta1-resource-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-service-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-list :refer :all]
[clojure-kubernetes-client.specs.v1-api-versions :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-pod-dns-config-option :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-spec :refer :all]
[clojure-kubernetes-client.specs.v1-priority-class :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-http-ingress-path :refer :all]
[clojure-kubernetes-client.specs.v1-resource-field-selector :refer :all]
[clojure-kubernetes-client.specs.v1-container-state :refer :all]
[clojure-kubernetes-client.specs.v2beta1-resource-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota :refer :all]
[clojure-kubernetes-client.specs.v1beta1-non-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-supplemental-groups-strategy-options :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scale-io-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-http-ingress-rule-value :refer :all]
[clojure-kubernetes-client.specs.v1-secret-projection :refer :all]
[clojure-kubernetes-client.specs.v1-session-affinity-config :refer :all]
[clojure-kubernetes-client.specs.v1-sysctl :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-managed-fields-entry :refer :all]
[clojure-kubernetes-client.specs.v1-resource-requirements :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.v1-owner-reference :refer :all]
[clojure-kubernetes-client.specs.v1-rbd-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-event-list :refer :all]
[clojure-kubernetes-client.specs.v1-key-to-path :refer :all]
[clojure-kubernetes-client.specs.v2beta2-object-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-csi-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-container-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-subresources :refer :all]
[clojure-kubernetes-client.specs.v1-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-api-resource-list :refer :all]
[clojure-kubernetes-client.specs.v1-azure-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta1-object-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-ip-block :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-handler :refer :all]
[clojure-kubernetes-client.specs.v2beta2-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-load-balancer-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-anti-affinity :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-env-var-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-names :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-spec :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-flex-volume :refer :all]
[clojure-kubernetes-client.specs.v1-quobyte-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-key-selector :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-capabilities :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-metric-spec :refer :all]
[clojure-kubernetes-client.specs.v1-token-review :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-list :refer :all]
[clojure-kubernetes-client.specs.v2beta2-pods-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-scale-status :refer :all]
[clojure-kubernetes-client.specs.v1-component-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-rollback :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-run-as-user-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-aws-elastic-block-store-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1-label-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-azure-file-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-storage-os-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-volume :refer :all]
[clojure-kubernetes-client.specs.v1-flex-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1-weighted-pod-affinity-term :refer :all]
[clojure-kubernetes-client.specs.v1-ceph-fs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template :refer :all]
[clojure-kubernetes-client.specs.apiextensions-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-iscsi-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-endpoint :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set :refer :all]
[clojure-kubernetes-client.specs.v1-token-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-service-account-list :refer :all]
[clojure-kubernetes-client.specs.v1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-token-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job :refer :all]
[clojure-kubernetes-client.specs.v1beta2-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta1-object-metric-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-fs-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-service-account :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-runtime-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.version-info :refer :all]
[clojure-kubernetes-client.specs.v1-pod-affinity-term :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-status :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-spec :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-host-path :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-port :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller :refer :all]
[clojure-kubernetes-client.specs.v1-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-status :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-list :refer :all]
[clojure-kubernetes-client.specs.v1-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-service-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-host-port-range :refer :all]
[clojure-kubernetes-client.specs.v1-ceph-fs-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-job-list :refer :all]
[clojure-kubernetes-client.specs.v1-azure-file-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook-throttle-config :refer :all]
[clojure-kubernetes-client.specs.v1-lease-spec :refer :all]
[clojure-kubernetes-client.specs.v1-api-resource :refer :all]
[clojure-kubernetes-client.specs.v1-container :refer :all]
[clojure-kubernetes-client.specs.v1beta1-webhook :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-spec :refer :all]
[clojure-kubernetes-client.specs.v1-namespace :refer :all]
[clojure-kubernetes-client.specs.v1-csi-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-spec :refer :all]
[clojure-kubernetes-client.specs.v1-node-address :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-item :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-iscsi-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver-list :refer :all]
[clojure-kubernetes-client.specs.v1-http-header :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-port :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-pod :refer :all]
[clojure-kubernetes-client.specs.v1-vsphere-virtual-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook :refer :all]
[clojure-kubernetes-client.specs.v1-projected-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-peer :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role :refer :all]
[clojure-kubernetes-client.specs.v1-storage-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget :refer :all]
[clojure-kubernetes-client.specs.v1-endpoints-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-policy :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node :refer :all]
[clojure-kubernetes-client.specs.v1-storage-os-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-spec :refer :all]
[clojure-kubernetes-client.specs.apiextensions-v1beta1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-se-linux-strategy-options :refer :all]
)
(:import (java.io File)))
(defn-spec create-token-review-with-http-info any?
"
create a TokenReview"
([body v1-token-review, ] (create-token-review-with-http-info body nil))
([body v1-token-review, {:keys [dryRun fieldManager pretty]} (s/map-of keyword? any?)]
(check-required-params body)
(call-api "/apis/authentication.k8s.io/v1/tokenreviews" :post
{:path-params {}
:header-params {}
:query-params {"dryRun" dryRun "fieldManager" fieldManager "pretty" pretty }
:form-params {}
:body-param body
:content-types []
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken"]})))
(defn-spec create-token-review v1-token-review
"
create a TokenReview"
([body v1-token-review, ] (create-token-review body nil))
([body v1-token-review, optional-params any?]
(let [res (:data (create-token-review-with-http-info body optional-params))]
(if (:decode-models *api-context*)
(st/decode v1-token-review res st/string-transformer)
res))))
(defn-spec get-api-resources-with-http-info any?
"
get available resources"
[]
(call-api "/apis/authentication.k8s.io/v1/" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken"]}))
(defn-spec get-api-resources v1-api-resource-list
"
get available resources"
[]
(let [res (:data (get-api-resources-with-http-info))]
(if (:decode-models *api-context*)
(st/decode v1-api-resource-list res st/string-transformer)
res)))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/api/authentication_v1.clj | clojure | (ns clojure-kubernetes-client.api.authentication-v1
(:require [clojure-kubernetes-client.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[clojure-kubernetes-client.specs.v1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-version :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink-spec :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-mutating-webhook-configuration :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-status :refer :all]
[clojure-kubernetes-client.specs.v1-lifecycle :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-status :refer :all]
[clojure-kubernetes-client.specs.v1-server-address-by-client-cidr :refer :all]
[clojure-kubernetes-client.specs.v1-glusterfs-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-volume-mount :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-status :refer :all]
[clojure-kubernetes-client.specs.v1-secret-volume-source :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-tls :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1-topology-selector-term :refer :all]
[clojure-kubernetes-client.specs.v1-initializers :refer :all]
[clojure-kubernetes-client.specs.v1-http-get-action :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-node-config-source :refer :all]
[clojure-kubernetes-client.specs.v1-nfs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-node-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-secret-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-container-image :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.v1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-json-schema-props :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-service-account-token-projection :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-status :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-target :refer :all]
[clojure-kubernetes-client.specs.v1-lease :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v2beta2-pods-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-spec :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-rules-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-topology-selector-label-requirement :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress :refer :all]
[clojure-kubernetes-client.specs.v1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-spec :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-subject :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template-spec :refer :all]
[clojure-kubernetes-client.specs.v1-non-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-validation :refer :all]
[clojure-kubernetes-client.specs.v1-local-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-spec :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1-api-service :refer :all]
[clojure-kubernetes-client.specs.v1-pod-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-tls :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy-spec :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-list :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-host-path :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1beta1-priority-class :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy :refer :all]
[clojure-kubernetes-client.specs.v1-service-list :refer :all]
[clojure-kubernetes-client.specs.v1-lease-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-projection :refer :all]
[clojure-kubernetes-client.specs.v1-node-selector-term :refer :all]
[clojure-kubernetes-client.specs.v1-local-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-event :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-subresource-scale :refer :all]
[clojure-kubernetes-client.specs.v1-component-status-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1-preconditions :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-api-group-list :refer :all]
[clojure-kubernetes-client.specs.v1-node-status :refer :all]
[clojure-kubernetes-client.specs.v1-node-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-validating-webhook-configuration-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-rules-review :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-port :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-flex-volume :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-security-context :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-rules-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-container-port :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-value-status :refer :all]
[clojure-kubernetes-client.specs.v1-probe :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-external-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-glusterfs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-role :refer :all]
[clojure-kubernetes-client.specs.v1-load-balancer-ingress :refer :all]
[clojure-kubernetes-client.specs.v1-volume-projection :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-list :refer :all]
[clojure-kubernetes-client.specs.v1-fc-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v2beta1-external-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-egress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-photon-persistent-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-scope-selector :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.v1-service-port :refer :all]
[clojure-kubernetes-client.specs.v1-flex-volume-source :refer :all]
[clojure-kubernetes-client.specs.v2beta2-object-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-tcp-socket-action :refer :all]
[clojure-kubernetes-client.specs.v1-toleration :refer :all]
[clojure-kubernetes-client.specs.v1-endpoints :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-spec :refer :all]
[clojure-kubernetes-client.specs.apiregistration-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-identifier :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-pod-list :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-node-config-source :refer :all]
[clojure-kubernetes-client.specs.v1-object-field-selector :refer :all]
[clojure-kubernetes-client.specs.v1-watch-event :refer :all]
[clojure-kubernetes-client.specs.v1-client-ip-config :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-audit-sink-list :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-supplemental-groups-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-run-as-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-node-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-external-documentation :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template-list :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scoped-resource-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-running :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-status :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1beta1-storage-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-priority-class :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-run-as-user-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-secret-key-selector :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-rollback :refer :all]
[clojure-kubernetes-client.specs.v1-job :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1-event-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-scale :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-address :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-list :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-id-range :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node-driver :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-non-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rule-with-operations :refer :all]
[clojure-kubernetes-client.specs.v1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.admissionregistration-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1-secret-reference :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-rules-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-initializer :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset-list :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-spec :refer :all]
[clojure-kubernetes-client.specs.v1-secret-env-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-http-ingress-rule-value :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-fs-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-egress-rule :refer :all]
[clojure-kubernetes-client.specs.v2beta2-external-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-node-config-status :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-spec :refer :all]
[clojure-kubernetes-client.specs.v1-host-alias :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-column-definition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-rollback-config :refer :all]
[clojure-kubernetes-client.specs.v2beta1-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-taint :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1-pod-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-api-service-list :refer :all]
[clojure-kubernetes-client.specs.runtime-raw-extension :refer :all]
[clojure-kubernetes-client.specs.v1-pod-condition :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-backend :refer :all]
[clojure-kubernetes-client.specs.v1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-http-ingress-path :refer :all]
[clojure-kubernetes-client.specs.v1beta1-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-validating-webhook-configuration :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-conversion :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v1-host-path-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-security-context :refer :all]
[clojure-kubernetes-client.specs.v1beta1-runtime-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-list-meta :refer :all]
[clojure-kubernetes-client.specs.v1beta1-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v2beta2-resource-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-empty-dir-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-spec :refer :all]
[clojure-kubernetes-client.specs.v1-subject :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-terminated :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request-spec :refer :all]
[clojure-kubernetes-client.specs.v1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1beta1-job-template-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1-config-map :refer :all]
[clojure-kubernetes-client.specs.v2beta2-resource-metric-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-host-port-range :refer :all]
[clojure-kubernetes-client.specs.v1-subject-rules-review-status :refer :all]
[clojure-kubernetes-client.specs.v2beta2-external-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event-series :refer :all]
[clojure-kubernetes-client.specs.v1-rbd-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-git-repo-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-attached-volume :refer :all]
[clojure-kubernetes-client.specs.v2beta1-pods-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-condition :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-backend :refer :all]
[clojure-kubernetes-client.specs.v1-se-linux-options :refer :all]
[clojure-kubernetes-client.specs.v1beta2-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-volume-file :refer :all]
[clojure-kubernetes-client.specs.v1-volume-device :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-list :refer :all]
[clojure-kubernetes-client.specs.v1-event-series :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-env-from-source :refer :all]
[clojure-kubernetes-client.specs.v1-api-group :refer :all]
[clojure-kubernetes-client.specs.v1-binding :refer :all]
[clojure-kubernetes-client.specs.v1-pod-readiness-gate :refer :all]
[clojure-kubernetes-client.specs.v1-storage-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-cinder-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-id-range :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-condition :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-rollback-config :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-se-linux-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta1-local-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-node-system-info :refer :all]
[clojure-kubernetes-client.specs.v1-typed-local-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-flocker-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-subset :refer :all]
[clojure-kubernetes-client.specs.v1-node-spec :refer :all]
[clojure-kubernetes-client.specs.v1-gce-persistent-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1-node-daemon-endpoints :refer :all]
[clojure-kubernetes-client.specs.v1-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-downward-api-projection :refer :all]
[clojure-kubernetes-client.specs.v1-status-cause :refer :all]
[clojure-kubernetes-client.specs.v1-scale-io-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-exec-action :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-rules-review :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scale :refer :all]
[clojure-kubernetes-client.specs.v1-env-var :refer :all]
[clojure-kubernetes-client.specs.v1-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1-volume-node-affinity :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-preferred-scheduling-term :refer :all]
[clojure-kubernetes-client.specs.v1-secret :refer :all]
[clojure-kubernetes-client.specs.v1-self-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-label-selector :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-mutating-webhook-configuration-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1-node :refer :all]
[clojure-kubernetes-client.specs.v1-container-state-waiting :refer :all]
[clojure-kubernetes-client.specs.v1-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1beta1-user-info :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-status :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-run-as-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-condition :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-job-template-spec :refer :all]
[clojure-kubernetes-client.specs.v1-portworx-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-status-details :refer :all]
[clojure-kubernetes-client.specs.v1-ip-block :refer :all]
[clojure-kubernetes-client.specs.v1beta1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-storage-class-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-user-info :refer :all]
[clojure-kubernetes-client.specs.v1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1beta1-certificate-signing-request :refer :all]
[clojure-kubernetes-client.specs.v1-local-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v2beta1-pods-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-non-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-delete-options :refer :all]
[clojure-kubernetes-client.specs.v1-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-job-spec :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-env-source :refer :all]
[clojure-kubernetes-client.specs.v1-job-condition :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-csi-driver :refer :all]
[clojure-kubernetes-client.specs.v1-component-condition :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-pod-preset-spec :refer :all]
[clojure-kubernetes-client.specs.v1-service :refer :all]
[clojure-kubernetes-client.specs.v1-pod-dns-config :refer :all]
[clojure-kubernetes-client.specs.v1-cinder-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.admissionregistration-v1beta1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-peer :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-eviction :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-group-version-for-discovery :refer :all]
[clojure-kubernetes-client.specs.v2beta1-resource-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-service-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-list :refer :all]
[clojure-kubernetes-client.specs.v1-api-versions :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-list :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-deployment :refer :all]
[clojure-kubernetes-client.specs.v1-pod-dns-config-option :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-spec :refer :all]
[clojure-kubernetes-client.specs.v1-priority-class :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-http-ingress-path :refer :all]
[clojure-kubernetes-client.specs.v1-resource-field-selector :refer :all]
[clojure-kubernetes-client.specs.v1-container-state :refer :all]
[clojure-kubernetes-client.specs.v2beta1-resource-metric-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota :refer :all]
[clojure-kubernetes-client.specs.v1beta1-non-resource-attributes :refer :all]
[clojure-kubernetes-client.specs.v1-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-supplemental-groups-strategy-options :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1-scale-io-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-http-ingress-rule-value :refer :all]
[clojure-kubernetes-client.specs.v1-secret-projection :refer :all]
[clojure-kubernetes-client.specs.v1-session-affinity-config :refer :all]
[clojure-kubernetes-client.specs.v1-sysctl :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-managed-fields-entry :refer :all]
[clojure-kubernetes-client.specs.v1-resource-requirements :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-policy-rule :refer :all]
[clojure-kubernetes-client.specs.v1-owner-reference :refer :all]
[clojure-kubernetes-client.specs.v1-rbd-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-event-list :refer :all]
[clojure-kubernetes-client.specs.v1-key-to-path :refer :all]
[clojure-kubernetes-client.specs.v2beta2-object-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-csi-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-container-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-subresources :refer :all]
[clojure-kubernetes-client.specs.v1-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-api-resource-list :refer :all]
[clojure-kubernetes-client.specs.v1-azure-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale :refer :all]
[clojure-kubernetes-client.specs.v2beta2-metric-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta1-object-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-ip-block :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-handler :refer :all]
[clojure-kubernetes-client.specs.v2beta2-cross-version-object-reference :refer :all]
[clojure-kubernetes-client.specs.v1-load-balancer-status :refer :all]
[clojure-kubernetes-client.specs.v1-pod-anti-affinity :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-env-var-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition-names :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-namespace-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller-spec :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-allowed-flex-volume :refer :all]
[clojure-kubernetes-client.specs.v1-quobyte-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-config-map-key-selector :refer :all]
[clojure-kubernetes-client.specs.v1-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1-capabilities :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-list :refer :all]
[clojure-kubernetes-client.specs.v2beta1-metric-spec :refer :all]
[clojure-kubernetes-client.specs.v1-token-review :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-list :refer :all]
[clojure-kubernetes-client.specs.v2beta2-pods-metric-source :refer :all]
[clojure-kubernetes-client.specs.v1-scale-status :refer :all]
[clojure-kubernetes-client.specs.v1-component-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-binding :refer :all]
[clojure-kubernetes-client.specs.v1beta1-event :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-rollback :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-run-as-user-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-aws-elastic-block-store-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-controller-revision-list :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-aggregation-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-list :refer :all]
[clojure-kubernetes-client.specs.v1beta2-scale-spec :refer :all]
[clojure-kubernetes-client.specs.v1-label-selector-requirement :refer :all]
[clojure-kubernetes-client.specs.v1-azure-file-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-storage-os-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-volume :refer :all]
[clojure-kubernetes-client.specs.v1-flex-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-horizontal-pod-autoscaler-list :refer :all]
[clojure-kubernetes-client.specs.v1-weighted-pod-affinity-term :refer :all]
[clojure-kubernetes-client.specs.v1-ceph-fs-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-pod-template :refer :all]
[clojure-kubernetes-client.specs.apiextensions-v1beta1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-resource-rule :refer :all]
[clojure-kubernetes-client.specs.v1-iscsi-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-endpoint :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-cluster-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.v1-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-error :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set :refer :all]
[clojure-kubernetes-client.specs.v1-token-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-service-account-list :refer :all]
[clojure-kubernetes-client.specs.v1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-priority-class-list :refer :all]
[clojure-kubernetes-client.specs.v1-token-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-role-binding-list :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job :refer :all]
[clojure-kubernetes-client.specs.v1beta2-controller-revision :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-volume-attachment-status :refer :all]
[clojure-kubernetes-client.specs.v1beta2-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.v2beta1-object-metric-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-pod-security-policy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-pod-security-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-update-strategy :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-fs-group-strategy-options :refer :all]
[clojure-kubernetes-client.specs.v1-service-account :refer :all]
[clojure-kubernetes-client.specs.v1beta2-deployment-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-lease-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-runtime-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-service-reference :refer :all]
[clojure-kubernetes-client.specs.v1beta1-stateful-set-spec :refer :all]
[clojure-kubernetes-client.specs.version-info :refer :all]
[clojure-kubernetes-client.specs.v1-pod-affinity-term :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1-resource-quota-status :refer :all]
[clojure-kubernetes-client.specs.v2alpha1-cron-job-spec :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1beta1-replica-set-status :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-allowed-host-path :refer :all]
[clojure-kubernetes-client.specs.v1-endpoint-port :refer :all]
[clojure-kubernetes-client.specs.v1beta1-token-review-status :refer :all]
[clojure-kubernetes-client.specs.v1-replication-controller :refer :all]
[clojure-kubernetes-client.specs.v1-affinity :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-status :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-list :refer :all]
[clojure-kubernetes-client.specs.v1-job-status :refer :all]
[clojure-kubernetes-client.specs.v1-service-spec :refer :all]
[clojure-kubernetes-client.specs.policy-v1beta1-host-port-range :refer :all]
[clojure-kubernetes-client.specs.v1-ceph-fs-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-job-list :refer :all]
[clojure-kubernetes-client.specs.v1-azure-file-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta2-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-volume-attachment :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook-throttle-config :refer :all]
[clojure-kubernetes-client.specs.v1-lease-spec :refer :all]
[clojure-kubernetes-client.specs.v1-api-resource :refer :all]
[clojure-kubernetes-client.specs.v1-container :refer :all]
[clojure-kubernetes-client.specs.v1beta1-webhook :refer :all]
[clojure-kubernetes-client.specs.v1beta1-cron-job-spec :refer :all]
[clojure-kubernetes-client.specs.v1-namespace :refer :all]
[clojure-kubernetes-client.specs.v1-csi-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget-spec :refer :all]
[clojure-kubernetes-client.specs.v1-node-address :refer :all]
[clojure-kubernetes-client.specs.v1-limit-range-item :refer :all]
[clojure-kubernetes-client.specs.v1beta1-daemon-set-condition :refer :all]
[clojure-kubernetes-client.specs.v1-iscsi-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-cluster-role-list :refer :all]
[clojure-kubernetes-client.specs.v1-deployment-condition :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-ingress-rule :refer :all]
[clojure-kubernetes-client.specs.v1-daemon-set-status :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-driver-list :refer :all]
[clojure-kubernetes-client.specs.v1-http-header :refer :all]
[clojure-kubernetes-client.specs.v1-rolling-update-stateful-set-strategy :refer :all]
[clojure-kubernetes-client.specs.networking-v1beta1-ingress :refer :all]
[clojure-kubernetes-client.specs.v2beta1-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta2-rolling-update-daemon-set :refer :all]
[clojure-kubernetes-client.specs.v1beta2-daemon-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-port :refer :all]
[clojure-kubernetes-client.specs.apps-v1beta1-deployment-strategy :refer :all]
[clojure-kubernetes-client.specs.v1-pod :refer :all]
[clojure-kubernetes-client.specs.v1-vsphere-virtual-disk-volume-source :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-deployment-status :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-webhook :refer :all]
[clojure-kubernetes-client.specs.v1-projected-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1beta1-network-policy-peer :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role :refer :all]
[clojure-kubernetes-client.specs.v1-storage-class :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-role-ref :refer :all]
[clojure-kubernetes-client.specs.v1beta1-subject-access-review-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-pod-disruption-budget :refer :all]
[clojure-kubernetes-client.specs.v1-endpoints-list :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-policy :refer :all]
[clojure-kubernetes-client.specs.v2beta2-horizontal-pod-autoscaler-spec :refer :all]
[clojure-kubernetes-client.specs.v1beta1-csi-node :refer :all]
[clojure-kubernetes-client.specs.v1-storage-os-persistent-volume-source :refer :all]
[clojure-kubernetes-client.specs.v1-api-service-spec :refer :all]
[clojure-kubernetes-client.specs.apiextensions-v1beta1-webhook-client-config :refer :all]
[clojure-kubernetes-client.specs.v1beta1-self-subject-access-review :refer :all]
[clojure-kubernetes-client.specs.v1-network-policy-list :refer :all]
[clojure-kubernetes-client.specs.v1beta1-custom-resource-definition :refer :all]
[clojure-kubernetes-client.specs.v1-persistent-volume :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-se-linux-strategy-options :refer :all]
)
(:import (java.io File)))
(defn-spec create-token-review-with-http-info any?
"
create a TokenReview"
([body v1-token-review, ] (create-token-review-with-http-info body nil))
([body v1-token-review, {:keys [dryRun fieldManager pretty]} (s/map-of keyword? any?)]
(check-required-params body)
(call-api "/apis/authentication.k8s.io/v1/tokenreviews" :post
{:path-params {}
:header-params {}
:query-params {"dryRun" dryRun "fieldManager" fieldManager "pretty" pretty }
:form-params {}
:body-param body
:content-types []
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken"]})))
(defn-spec create-token-review v1-token-review
"
create a TokenReview"
([body v1-token-review, ] (create-token-review body nil))
([body v1-token-review, optional-params any?]
(let [res (:data (create-token-review-with-http-info body optional-params))]
(if (:decode-models *api-context*)
(st/decode v1-token-review res st/string-transformer)
res))))
(defn-spec get-api-resources-with-http-info any?
"
get available resources"
[]
(call-api "/apis/authentication.k8s.io/v1/" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken"]}))
(defn-spec get-api-resources v1-api-resource-list
"
get available resources"
[]
(let [res (:data (get-api-resources-with-http-info))]
(if (:decode-models *api-context*)
(st/decode v1-api-resource-list res st/string-transformer)
res)))
|
|
d4eb7d8c193f3d09aa988765cc4c402be32bb325cc61b931785ae3b5b1f86947 | realworldocaml/book | html_output.ml | open! Core
open! Import
module Unix = Core_unix
include Patdiff_kernel.Html_output.Private.Make (struct
let mtime file =
let%map.Or_error stats =
Or_error.try_with (fun () -> Unix.stat (File_name.real_name_exn file))
in
stats.st_mtime |> Time.Span.of_sec |> Time.of_span_since_epoch
;;
end)
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/patdiff/lib/src/html_output.ml | ocaml | open! Core
open! Import
module Unix = Core_unix
include Patdiff_kernel.Html_output.Private.Make (struct
let mtime file =
let%map.Or_error stats =
Or_error.try_with (fun () -> Unix.stat (File_name.real_name_exn file))
in
stats.st_mtime |> Time.Span.of_sec |> Time.of_span_since_epoch
;;
end)
|
|
be985aa213dcfcf8172e3c427b4bb9396cd490ef76848954ea421adcf863affa | UU-ComputerScience/uhc | Minimal1.hs | {- ----------------------------------------------------------------------------------------
-- what : minimal program, no type sig for main
-- expected: all ok
---------------------------------------------------------------------------------------- -}
module Main where
main = return ()
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/Minimal1.hs | haskell | ----------------------------------------------------------------------------------------
-- what : minimal program, no type sig for main
-- expected: all ok
---------------------------------------------------------------------------------------- |
module Main where
main = return ()
|
9cc16353d92f2fcef7293e5a738bc4fcb65fbd59adcdf06860fd9a2f101b86b7 | billstclair/trubanc-lisp | test.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - FAD - TEST ; Base : 10 -*-
$ Header : /usr / local / cvsrep / cl - fad / test.lisp , v 1.11 2008/03/12 00:10:43 edi Exp $
Copyright ( c ) 2004 - 2008 , Dr. . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package #:cl-fad-test)
(defparameter *tmp-dir*
#+(or :win32 :mswindows :windows) "c:\\tmp\\"
#-(or :win32 :mswindows :windows) "/tmp/")
(defvar *test-counter* 0)
(defmacro assert* (form)
`(progn
(format t "Trying to assert ~A~%" ',form)
(assert ,form)
(format t "Test ~A passed.~%" (incf *test-counter*))))
(defun test ()
(setq *test-counter* 0)
(let ((fad-dir (merge-pathnames (pathname-as-directory "fad-test")
*tmp-dir*)))
(delete-directory-and-files fad-dir :if-does-not-exist :ignore)
(assert* (directory-pathname-p fad-dir))
(assert* (directory-pathname-p (pathname *tmp-dir*)))
(let ((foo-file (merge-pathnames "foo.lisp"
fad-dir)))
(assert* (not (directory-pathname-p foo-file)))
(assert* (not (file-exists-p foo-file)))
(assert* (not (file-exists-p fad-dir)))
(with-open-file (out (ensure-directories-exist foo-file)
:direction :output
:if-does-not-exist :create)
(write-string "NIL" out))
(assert* (file-exists-p foo-file))
(assert* (not (directory-exists-p foo-file)))
(assert* (file-exists-p fad-dir))
(assert* (directory-exists-p fad-dir))
(assert* (equal fad-dir
(pathname-as-directory fad-dir)))
(assert* (equal foo-file
(pathname-as-file foo-file)))
(assert* (not (equal fad-dir
(pathname-as-file fad-dir))))
(assert* (not (equal foo-file
(pathname-as-directory foo-file))))
(dolist (name '("bar" "baz"))
(let ((dir (merge-pathnames (pathname-as-directory name)
fad-dir)))
(dolist (name '("foo.text" "bar.lisp"))
(let ((file (merge-pathnames name dir)))
(with-open-file (out (ensure-directories-exist file)
:direction :output
:if-does-not-exist :create)
(write-string "NIL" out))))))
;; /tmp/fad-test/foo.lisp
;; /tmp/fad-test/bar/bar.lisp
;; /tmp/fad-test/bar/foo.text
;; /tmp/fad-test/baz/bar.lisp
;; /tmp/fad-test/baz/foo.text
files : 5
dirs : 3
(let ((file-counter 0)
(file-and-dir-counter 0)
(bar-counter 0))
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-counter)))
file - counter = > 5
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-and-dir-counter))
:directories t)
file - and - dir - counter = > 5 + 3
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf bar-counter))
:test (lambda (file)
(string= (pathname-name file)
"bar"))
:directories t)
;; do not traverse the baz directory
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-and-dir-counter))
:test (lambda (file)
(not (and (directory-pathname-p file)
(string= (first (last (pathname-directory file)))
"baz"))))
:directories :breadth-first)
file - and - dir - counter = > 5 + 3 + 2 dirs + 3 files
(assert* (= 5 file-counter))
(assert* (= 13 file-and-dir-counter))
(assert* (= 2 bar-counter)))
(let ((bar-file (merge-pathnames "bar.lisp" fad-dir)))
(copy-file foo-file bar-file)
(assert* (file-exists-p bar-file))
(with-open-file (foo-stream foo-file :element-type '(unsigned-byte 8))
(with-open-file (bar-stream bar-file :element-type '(unsigned-byte 8))
(assert* (= (file-length foo-stream)
(file-length bar-stream)))
(loop for foo-byte = (read-byte foo-stream nil nil)
for bar-byte = (read-byte bar-stream nil nil)
while (and foo-byte bar-byte)
do (assert* (eql foo-byte bar-byte))))))
(let ((baz-dir (merge-pathnames (pathname-as-directory "baz")
fad-dir))
(list (mapcar #'namestring (list-directory fad-dir))))
(assert* (find (namestring (truename foo-file)) list :test #'string=))
(assert* (find (namestring (truename baz-dir)) list :test #'string=))
(assert* (not (find (namestring (pathname-as-file baz-dir))
list
:test #'string=)))))
(delete-directory-and-files fad-dir :if-does-not-exist :error)
(assert* (not (file-exists-p fad-dir)))
(assert* (not (directory-exists-p fad-dir))))
(format t "All tests passed.~%"))
| null | https://raw.githubusercontent.com/billstclair/trubanc-lisp/5436d2eca5b1ed10bc47eec7080f6cb90f98ca65/systems/cl-fad-0.6.2/test.lisp | lisp | Syntax : COMMON - LISP ; Package : CL - FAD - TEST ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/tmp/fad-test/foo.lisp
/tmp/fad-test/bar/bar.lisp
/tmp/fad-test/bar/foo.text
/tmp/fad-test/baz/bar.lisp
/tmp/fad-test/baz/foo.text
do not traverse the baz directory | $ Header : /usr / local / cvsrep / cl - fad / test.lisp , v 1.11 2008/03/12 00:10:43 edi Exp $
Copyright ( c ) 2004 - 2008 , Dr. . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package #:cl-fad-test)
(defparameter *tmp-dir*
#+(or :win32 :mswindows :windows) "c:\\tmp\\"
#-(or :win32 :mswindows :windows) "/tmp/")
(defvar *test-counter* 0)
(defmacro assert* (form)
`(progn
(format t "Trying to assert ~A~%" ',form)
(assert ,form)
(format t "Test ~A passed.~%" (incf *test-counter*))))
(defun test ()
(setq *test-counter* 0)
(let ((fad-dir (merge-pathnames (pathname-as-directory "fad-test")
*tmp-dir*)))
(delete-directory-and-files fad-dir :if-does-not-exist :ignore)
(assert* (directory-pathname-p fad-dir))
(assert* (directory-pathname-p (pathname *tmp-dir*)))
(let ((foo-file (merge-pathnames "foo.lisp"
fad-dir)))
(assert* (not (directory-pathname-p foo-file)))
(assert* (not (file-exists-p foo-file)))
(assert* (not (file-exists-p fad-dir)))
(with-open-file (out (ensure-directories-exist foo-file)
:direction :output
:if-does-not-exist :create)
(write-string "NIL" out))
(assert* (file-exists-p foo-file))
(assert* (not (directory-exists-p foo-file)))
(assert* (file-exists-p fad-dir))
(assert* (directory-exists-p fad-dir))
(assert* (equal fad-dir
(pathname-as-directory fad-dir)))
(assert* (equal foo-file
(pathname-as-file foo-file)))
(assert* (not (equal fad-dir
(pathname-as-file fad-dir))))
(assert* (not (equal foo-file
(pathname-as-directory foo-file))))
(dolist (name '("bar" "baz"))
(let ((dir (merge-pathnames (pathname-as-directory name)
fad-dir)))
(dolist (name '("foo.text" "bar.lisp"))
(let ((file (merge-pathnames name dir)))
(with-open-file (out (ensure-directories-exist file)
:direction :output
:if-does-not-exist :create)
(write-string "NIL" out))))))
files : 5
dirs : 3
(let ((file-counter 0)
(file-and-dir-counter 0)
(bar-counter 0))
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-counter)))
file - counter = > 5
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-and-dir-counter))
:directories t)
file - and - dir - counter = > 5 + 3
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf bar-counter))
:test (lambda (file)
(string= (pathname-name file)
"bar"))
:directories t)
(walk-directory fad-dir
(lambda (file)
(declare (ignore file))
(incf file-and-dir-counter))
:test (lambda (file)
(not (and (directory-pathname-p file)
(string= (first (last (pathname-directory file)))
"baz"))))
:directories :breadth-first)
file - and - dir - counter = > 5 + 3 + 2 dirs + 3 files
(assert* (= 5 file-counter))
(assert* (= 13 file-and-dir-counter))
(assert* (= 2 bar-counter)))
(let ((bar-file (merge-pathnames "bar.lisp" fad-dir)))
(copy-file foo-file bar-file)
(assert* (file-exists-p bar-file))
(with-open-file (foo-stream foo-file :element-type '(unsigned-byte 8))
(with-open-file (bar-stream bar-file :element-type '(unsigned-byte 8))
(assert* (= (file-length foo-stream)
(file-length bar-stream)))
(loop for foo-byte = (read-byte foo-stream nil nil)
for bar-byte = (read-byte bar-stream nil nil)
while (and foo-byte bar-byte)
do (assert* (eql foo-byte bar-byte))))))
(let ((baz-dir (merge-pathnames (pathname-as-directory "baz")
fad-dir))
(list (mapcar #'namestring (list-directory fad-dir))))
(assert* (find (namestring (truename foo-file)) list :test #'string=))
(assert* (find (namestring (truename baz-dir)) list :test #'string=))
(assert* (not (find (namestring (pathname-as-file baz-dir))
list
:test #'string=)))))
(delete-directory-and-files fad-dir :if-does-not-exist :error)
(assert* (not (file-exists-p fad-dir)))
(assert* (not (directory-exists-p fad-dir))))
(format t "All tests passed.~%"))
|
58e7c5859a430962c3605c5b2436458c16926a812e679aac358b38d62f5c8dca | AvramRobert/omnia | view.clj | (ns omnia.repl.view
(:require [schema.core :as s]
[omnia.repl.text :as t]
[omnia.util.arithmetic :refer [++ -- mod*]]
[omnia.util.collection :refer [bounded-subvec assoc-new]]
[omnia.schema.view :refer [View]]
[omnia.schema.common :refer [Point Region]]
[omnia.schema.text :refer [Text Line]]))
(def continuation (t/from-string "..."))
(def delimiter (t/from-string "------"))
(s/defn create-view :- View
"A `View` is a structure enclosing some form of text that supports
projecting that text within a bounded view.
It uses the following attributes to keep track of the projection.
field-of-view:
* the size of the projected view. Amount of lines that can be viewed at one time.
view-offset:
* offset withing the bounded view.
* Even though the view is bounded by `field-of-view`, the cursor can still navigate the entire text.
To move the view properly, this offset keeps track of when the cursor has moved beyond the bounds
of `field-of-view` and by how many lines
scroll-offset:
* when scrolling, how many lines have been scrolled"
[text :- Text
size :- s/Int]
{:text text
:field-of-view size
:view-offset 0
:scroll-offset 0})
(s/defn empty-view-with-size :- View
[size :- s/Int]
(create-view t/empty-text size))
(s/def empty-view :- View
(empty-view-with-size 0))
(s/defn view-offset :- s/Int
[view :- View]
(:view-offset view))
(s/defn field-of-view :- s/Int
[view :- View]
(:field-of-view view))
(s/defn scroll-offset :- s/Int
[view :- View]
(:scroll-offset view))
(s/defn text :- Text
[view :- View]
(:text view))
(s/defn reset-scroll :- View
[view :- View]
(assoc-new view :scroll-offset 0))
(s/defn reset-text :- View
[view :- View
text :- Text]
(assoc view :text text))
(s/defn reset-view-offset :- View
[view :- View]
(assoc-new view :view-offset 0))
(s/defn resize :- View
[view :- View, field-of-view :- s/Int]
(assoc view :field-of-view field-of-view :scroll-offset 0))
(s/defn hollow? :- s/Bool
[view :- View]
(-> view (text) (:lines) (empty?)))
(s/defn with-view-offset :- View
[view :- View, offset :- s/Int]
(assoc view :view-offset offset))
(s/defn with-scroll-offset :- View
[view :- View, offset :- s/Int]
(assoc view :scroll-offset offset))
(s/defn current-line :- [Character]
[view :- View]
(-> view (text) (t/current-line)))
(s/defn total-offset :- s/Int
[view :- View]
(let [fov (field-of-view view)
s-off (scroll-offset view)
v-off (view-offset view)]
(+ fov v-off s-off)))
(s/defn bottom-y :- s/Int
[view :- View]
"The lower y bound of a page (exclusive)
bottom-y = height - view-offset - 1
Subtract 1 because we count from 0"
(let [v-off (view-offset view)
height (-> view (text) (t/size))]
(-- height v-off 1)))
(s/defn top-y :- s/Int
[view :- View]
"The upper y bound of a page (inclusive)
top-y = (height - fov - ov)"
(let [fov (field-of-view view)
v-off (view-offset view)
height (-> view (text) (t/size))]
(-- height fov v-off)))
(s/defn project-y :- s/Int
[view :- View, y :- s/Int]
"given view-y, screen-y = view-y - top-y
given screen-y, view-y = screen-y + top-y"
(let [fov (field-of-view view)
h (-> view (text) (t/size))
ys (top-y view)]
(if (> h fov) (-- y ys) y)))
(s/defn project-cursor :- Point
[view :- View, [x hy] :- Point]
[x (project-y view hy)])
(s/defn project-view-cursor :- Point
[view :- View]
(project-cursor view (-> view (text) (:cursor))))
(s/defn project-view-text :- [Line]
[view :- View]
(let [text (text view)
fov (field-of-view view)
v-off (view-offset view)
s-off (scroll-offset view)
viewable-chunk (+ fov v-off s-off)
y-start (-- (t/size text) viewable-chunk)
y-end (++ y-start fov)]
(bounded-subvec (:lines text) y-start y-end)))
(s/defn project :- Text
[view :- View]
(-> view (project-view-text) (t/create-text) (t/reset-cursor (project-view-cursor view))))
(s/defn clip-selection :- Region
[view :- View
selection :- Region]
(let [fov (field-of-view view)
h (-> view (text) (t/size))
[xs ys] (:from selection)
[xe ye] (:until selection)
top (top-y view)
bottom (bottom-y view)
unpaged? (< h fov)
clipped-top? (< ys top)
clipped-bottom? (> ye bottom)
visible-top? (<= top ys bottom)
visible-bottom? (<= top ye bottom)
end-bottom (-> view (text) (t/reset-y bottom) (t/end-x) (:cursor))]
(cond
unpaged? selection
(and visible-top?
visible-bottom?) selection
(and visible-top?
clipped-bottom?) {:from [xs ys]
:until end-bottom}
(and visible-bottom?
clipped-top?) {:from [0 top]
:until [xe ye]}
:else {:from [0 bottom]
:until [0 bottom]})))
(s/defn project-selection :- Region
"projecting y outside the bounds leads to:
a) -n when upper bound is exceeded by n
b) fov + n numbers, when lower bound exceeded by n"
[view :- View
region :- Region]
(-> view
(clip-selection region)
(update :from (partial project-cursor view))
(update :until (partial project-cursor view))))
(s/defn correct-between :- s/Int
[view :- View
previous-view :- View]
(let [fov (field-of-view view)
v-off (view-offset view)
h (-> view (text) (t/size))
[_ y] (-> view (text) (:cursor))
pfov (field-of-view previous-view)
ph (-> previous-view (text) (t/size))
upper-y (top-y view) ;; the top viewable y
lower-y (bottom-y view) ;; the lower viewable y
over-upper? (< y upper-y)
over-lower? (> y lower-y)
at-lower? (= y lower-y)
smaller? (< h ph)
larger? (> h ph)
unpaged? (and (<= h fov)
(<= ph fov))
resized? (and (not= pfov fov)
(not= 0 v-off))]
(cond
resized? (++ v-off (- pfov fov)) ;; we've changed the terminal size
unpaged? v-off ;; we've not exceeded the fov
(and larger? at-lower?) v-off ;; we've gotten bigger but we're still at the bottom
(or larger? smaller?) (++ v-off (- h ph)) ;; we've changed in size
over-upper? (++ v-off (- upper-y y)) ;; we've exceeded the upper bound
over-lower? (-- v-off (- y lower-y)) ;; we've exceeded the lower bound
:else v-off)))
(s/defn corrected :- View
([view :- View]
(corrected view view))
([view :- View,
previous-view :- View]
(assoc view :view-offset (correct-between view previous-view))))
(s/defn enrich-with :- View
[view :- View, texts :- [Text]]
(let [text (text view)]
(->> texts (apply t/join text) (reset-text view))))
(s/defn riffle-window :- View
[text :- Text
size :- s/Int]
(let [content (->> text (t/start) (t/end-x))]
(-> (empty-view-with-size size) (enrich-with [content]) (corrected))))
(s/defn riffle :- View
[view :- View]
(let [text (text view)
[_ y] (:cursor text)
height (t/size text)
y' (mod* (inc y) height)]
(-> view
(reset-text (t/reset-y text y'))
(corrected))))
(s/defn scroll-up :- View
[view :- View]
(let [offset (scroll-offset view)
total-offset (total-offset view)
text-size (-> view (text) (t/size))
result (if (>= total-offset text-size) offset (inc offset))]
(assoc-new view :scroll-offset result)))
(s/defn scroll-down :- View
[view :- View]
(let [offset (scroll-offset view)
result (if (zero? offset) offset (dec offset))]
(assoc-new view :scroll-offset result)))
(s/defn show :- String
[view :- View]
(-> view (text) (t/debug-string)))
(s/defn deselect :- View
[view :- View]
(->> view (text) (t/deselect) (reset-text view)))
(s/defn paginate :- Text
[view :- View]
(let [truncated? (-> view (view-offset) (zero?) (not))
extend #(if truncated? (t/append % continuation) %)]
(-> view
(project)
(extend)
(t/indent 1))))
(s/defn pop-up :- View
[view :- View, embedded :- View]
(let [text (text view)
paginated (paginate embedded)
ph (t/size text)
top (-> text (t/peer (fn [l [x & _]] (conj l x))))
bottom (-> text (t/peer (fn [_ [_ & r]] (drop (+ ph 2) r))))]
(assoc view :text (-> (t/join top delimiter paginated)
(t/end-x)
(t/append delimiter bottom)))))
| null | https://raw.githubusercontent.com/AvramRobert/omnia/af515fb75d7492251436a3d844c0616c82d4eee0/src/omnia/repl/view.clj | clojure | the top viewable y
the lower viewable y
we've changed the terminal size
we've not exceeded the fov
we've gotten bigger but we're still at the bottom
we've changed in size
we've exceeded the upper bound
we've exceeded the lower bound | (ns omnia.repl.view
(:require [schema.core :as s]
[omnia.repl.text :as t]
[omnia.util.arithmetic :refer [++ -- mod*]]
[omnia.util.collection :refer [bounded-subvec assoc-new]]
[omnia.schema.view :refer [View]]
[omnia.schema.common :refer [Point Region]]
[omnia.schema.text :refer [Text Line]]))
(def continuation (t/from-string "..."))
(def delimiter (t/from-string "------"))
(s/defn create-view :- View
"A `View` is a structure enclosing some form of text that supports
projecting that text within a bounded view.
It uses the following attributes to keep track of the projection.
field-of-view:
* the size of the projected view. Amount of lines that can be viewed at one time.
view-offset:
* offset withing the bounded view.
* Even though the view is bounded by `field-of-view`, the cursor can still navigate the entire text.
To move the view properly, this offset keeps track of when the cursor has moved beyond the bounds
of `field-of-view` and by how many lines
scroll-offset:
* when scrolling, how many lines have been scrolled"
[text :- Text
size :- s/Int]
{:text text
:field-of-view size
:view-offset 0
:scroll-offset 0})
(s/defn empty-view-with-size :- View
[size :- s/Int]
(create-view t/empty-text size))
(s/def empty-view :- View
(empty-view-with-size 0))
(s/defn view-offset :- s/Int
[view :- View]
(:view-offset view))
(s/defn field-of-view :- s/Int
[view :- View]
(:field-of-view view))
(s/defn scroll-offset :- s/Int
[view :- View]
(:scroll-offset view))
(s/defn text :- Text
[view :- View]
(:text view))
(s/defn reset-scroll :- View
[view :- View]
(assoc-new view :scroll-offset 0))
(s/defn reset-text :- View
[view :- View
text :- Text]
(assoc view :text text))
(s/defn reset-view-offset :- View
[view :- View]
(assoc-new view :view-offset 0))
(s/defn resize :- View
[view :- View, field-of-view :- s/Int]
(assoc view :field-of-view field-of-view :scroll-offset 0))
(s/defn hollow? :- s/Bool
[view :- View]
(-> view (text) (:lines) (empty?)))
(s/defn with-view-offset :- View
[view :- View, offset :- s/Int]
(assoc view :view-offset offset))
(s/defn with-scroll-offset :- View
[view :- View, offset :- s/Int]
(assoc view :scroll-offset offset))
(s/defn current-line :- [Character]
[view :- View]
(-> view (text) (t/current-line)))
(s/defn total-offset :- s/Int
[view :- View]
(let [fov (field-of-view view)
s-off (scroll-offset view)
v-off (view-offset view)]
(+ fov v-off s-off)))
(s/defn bottom-y :- s/Int
[view :- View]
"The lower y bound of a page (exclusive)
bottom-y = height - view-offset - 1
Subtract 1 because we count from 0"
(let [v-off (view-offset view)
height (-> view (text) (t/size))]
(-- height v-off 1)))
(s/defn top-y :- s/Int
[view :- View]
"The upper y bound of a page (inclusive)
top-y = (height - fov - ov)"
(let [fov (field-of-view view)
v-off (view-offset view)
height (-> view (text) (t/size))]
(-- height fov v-off)))
(s/defn project-y :- s/Int
[view :- View, y :- s/Int]
"given view-y, screen-y = view-y - top-y
given screen-y, view-y = screen-y + top-y"
(let [fov (field-of-view view)
h (-> view (text) (t/size))
ys (top-y view)]
(if (> h fov) (-- y ys) y)))
(s/defn project-cursor :- Point
[view :- View, [x hy] :- Point]
[x (project-y view hy)])
(s/defn project-view-cursor :- Point
[view :- View]
(project-cursor view (-> view (text) (:cursor))))
(s/defn project-view-text :- [Line]
[view :- View]
(let [text (text view)
fov (field-of-view view)
v-off (view-offset view)
s-off (scroll-offset view)
viewable-chunk (+ fov v-off s-off)
y-start (-- (t/size text) viewable-chunk)
y-end (++ y-start fov)]
(bounded-subvec (:lines text) y-start y-end)))
(s/defn project :- Text
[view :- View]
(-> view (project-view-text) (t/create-text) (t/reset-cursor (project-view-cursor view))))
(s/defn clip-selection :- Region
[view :- View
selection :- Region]
(let [fov (field-of-view view)
h (-> view (text) (t/size))
[xs ys] (:from selection)
[xe ye] (:until selection)
top (top-y view)
bottom (bottom-y view)
unpaged? (< h fov)
clipped-top? (< ys top)
clipped-bottom? (> ye bottom)
visible-top? (<= top ys bottom)
visible-bottom? (<= top ye bottom)
end-bottom (-> view (text) (t/reset-y bottom) (t/end-x) (:cursor))]
(cond
unpaged? selection
(and visible-top?
visible-bottom?) selection
(and visible-top?
clipped-bottom?) {:from [xs ys]
:until end-bottom}
(and visible-bottom?
clipped-top?) {:from [0 top]
:until [xe ye]}
:else {:from [0 bottom]
:until [0 bottom]})))
(s/defn project-selection :- Region
"projecting y outside the bounds leads to:
a) -n when upper bound is exceeded by n
b) fov + n numbers, when lower bound exceeded by n"
[view :- View
region :- Region]
(-> view
(clip-selection region)
(update :from (partial project-cursor view))
(update :until (partial project-cursor view))))
(s/defn correct-between :- s/Int
[view :- View
previous-view :- View]
(let [fov (field-of-view view)
v-off (view-offset view)
h (-> view (text) (t/size))
[_ y] (-> view (text) (:cursor))
pfov (field-of-view previous-view)
ph (-> previous-view (text) (t/size))
over-upper? (< y upper-y)
over-lower? (> y lower-y)
at-lower? (= y lower-y)
smaller? (< h ph)
larger? (> h ph)
unpaged? (and (<= h fov)
(<= ph fov))
resized? (and (not= pfov fov)
(not= 0 v-off))]
(cond
:else v-off)))
(s/defn corrected :- View
([view :- View]
(corrected view view))
([view :- View,
previous-view :- View]
(assoc view :view-offset (correct-between view previous-view))))
(s/defn enrich-with :- View
[view :- View, texts :- [Text]]
(let [text (text view)]
(->> texts (apply t/join text) (reset-text view))))
(s/defn riffle-window :- View
[text :- Text
size :- s/Int]
(let [content (->> text (t/start) (t/end-x))]
(-> (empty-view-with-size size) (enrich-with [content]) (corrected))))
(s/defn riffle :- View
[view :- View]
(let [text (text view)
[_ y] (:cursor text)
height (t/size text)
y' (mod* (inc y) height)]
(-> view
(reset-text (t/reset-y text y'))
(corrected))))
(s/defn scroll-up :- View
[view :- View]
(let [offset (scroll-offset view)
total-offset (total-offset view)
text-size (-> view (text) (t/size))
result (if (>= total-offset text-size) offset (inc offset))]
(assoc-new view :scroll-offset result)))
(s/defn scroll-down :- View
[view :- View]
(let [offset (scroll-offset view)
result (if (zero? offset) offset (dec offset))]
(assoc-new view :scroll-offset result)))
(s/defn show :- String
[view :- View]
(-> view (text) (t/debug-string)))
(s/defn deselect :- View
[view :- View]
(->> view (text) (t/deselect) (reset-text view)))
(s/defn paginate :- Text
[view :- View]
(let [truncated? (-> view (view-offset) (zero?) (not))
extend #(if truncated? (t/append % continuation) %)]
(-> view
(project)
(extend)
(t/indent 1))))
(s/defn pop-up :- View
[view :- View, embedded :- View]
(let [text (text view)
paginated (paginate embedded)
ph (t/size text)
top (-> text (t/peer (fn [l [x & _]] (conj l x))))
bottom (-> text (t/peer (fn [_ [_ & r]] (drop (+ ph 2) r))))]
(assoc view :text (-> (t/join top delimiter paginated)
(t/end-x)
(t/append delimiter bottom)))))
|
f2a47762de33469a69c960bfcc59ab51ae54ee1c3c514068bd5352ac2edfd8f1 | paurkedal/viz | cst_types.ml | Copyright ( C ) 2010 - -2016 Petter A. Urkedal < >
*
* This file is part of the Viz Compiler < / > .
*
* The Viz Compiler is free software : you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation , either version 3 of the License , or ( at your option )
* any later version .
*
* The Viz Compiler is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License
* for more details .
*
* You should have received a copy of the GNU General Public License along
* with the Viz Compiler . If not , see < / > .
*
* This file is part of the Viz Compiler </>.
*
* The Viz Compiler is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation, either version 3 of the License, or (at your option)
* any later version.
*
* The Viz Compiler is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with the Viz Compiler. If not, see </>.
*)
* Type Definitions of the Concrete Syntax Tree
(* These type definitions are kept in a separate file without an interface file
* to avoid duplicating all the constructor cases. *)
TYPE_CONV_PATH "Camlviz.Cst_types"
open Sexplib
open Leaf_types
type idrhint = Ih_none | Ih_univ | Ih_inj
type cidr = Cidr of loc * idr
type cmonad = string
type cpred =
| Cpred_let of loc * cmonad option * ctrm * cpred * cpred
| Cpred_if of loc * ctrm * cpred * cpred
| Cpred_back of loc
| Cpred_at of loc * (ctrm * cpred) list
| Cpred_expr0 of loc * idr
| Cpred_expr of loc * idr * ctrm
| Cpred_expr_which of loc * idr * ctrm * cwhich
| Cpred_seq of loc * idr * ctrm * cpred option
| Cpred_seq_which of loc * idr * ctrm * cwhich * cpred option
| Cpred_cond of loc * idr * ctrm * cpred * cpred option
| Cpred_upon of loc * ctrm * cpred * cpred
and ctrm =
| Ctrm_ref of cidr * idrhint
| Ctrm_literal of loc * lit
| Ctrm_label of loc * cidr * ctrm
| Ctrm_quantify of loc * cidr * ctrm * ctrm
| Ctrm_rel of loc * ctrm * (loc * cidr * ctrm) list
| Ctrm_apply of loc * ctrm * ctrm
| Ctrm_project of loc * cidr * ctrm
| Ctrm_array of loc * ctrm list
| Ctrm_what of loc * cmonad option * cpred
| Ctrm_where of loc * cdef list
| Ctrm_with of loc * ctrm option * cdef list
and cdef =
| Cdef_include of loc * bool * ctrm
| Cdef_open of loc * abi * ctrm
| Cdef_use of loc * ctrm
| Cdef_type of loc * abi * ctrm * cdef list
| Cdef_in of loc * bool * ctrm * ctrm
| Cdec_sig of loc * cidr
| Cdef_sig of loc * cidr * ctrm
| Cdef_val of loc * val_info * ctrm
| Cdef_let of loc * cmonad option * ctrm * cpred
| Cdef_inj of loc * abi * ctrm
| Cdef_lex of loc * string * (cidr * cidr list) list
| Cdef_lexalias of loc * (cidr * cidr) list
and cwhich = cmonad option * cpred
| null | https://raw.githubusercontent.com/paurkedal/viz/ab1f1071fafdc51eae69185ec55d7a6e7bb94ea9/camlviz/cst_types.ml | ocaml | These type definitions are kept in a separate file without an interface file
* to avoid duplicating all the constructor cases. | Copyright ( C ) 2010 - -2016 Petter A. Urkedal < >
*
* This file is part of the Viz Compiler < / > .
*
* The Viz Compiler is free software : you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation , either version 3 of the License , or ( at your option )
* any later version .
*
* The Viz Compiler is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License
* for more details .
*
* You should have received a copy of the GNU General Public License along
* with the Viz Compiler . If not , see < / > .
*
* This file is part of the Viz Compiler </>.
*
* The Viz Compiler is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation, either version 3 of the License, or (at your option)
* any later version.
*
* The Viz Compiler is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License along
* with the Viz Compiler. If not, see </>.
*)
* Type Definitions of the Concrete Syntax Tree
TYPE_CONV_PATH "Camlviz.Cst_types"
open Sexplib
open Leaf_types
type idrhint = Ih_none | Ih_univ | Ih_inj
type cidr = Cidr of loc * idr
type cmonad = string
type cpred =
| Cpred_let of loc * cmonad option * ctrm * cpred * cpred
| Cpred_if of loc * ctrm * cpred * cpred
| Cpred_back of loc
| Cpred_at of loc * (ctrm * cpred) list
| Cpred_expr0 of loc * idr
| Cpred_expr of loc * idr * ctrm
| Cpred_expr_which of loc * idr * ctrm * cwhich
| Cpred_seq of loc * idr * ctrm * cpred option
| Cpred_seq_which of loc * idr * ctrm * cwhich * cpred option
| Cpred_cond of loc * idr * ctrm * cpred * cpred option
| Cpred_upon of loc * ctrm * cpred * cpred
and ctrm =
| Ctrm_ref of cidr * idrhint
| Ctrm_literal of loc * lit
| Ctrm_label of loc * cidr * ctrm
| Ctrm_quantify of loc * cidr * ctrm * ctrm
| Ctrm_rel of loc * ctrm * (loc * cidr * ctrm) list
| Ctrm_apply of loc * ctrm * ctrm
| Ctrm_project of loc * cidr * ctrm
| Ctrm_array of loc * ctrm list
| Ctrm_what of loc * cmonad option * cpred
| Ctrm_where of loc * cdef list
| Ctrm_with of loc * ctrm option * cdef list
and cdef =
| Cdef_include of loc * bool * ctrm
| Cdef_open of loc * abi * ctrm
| Cdef_use of loc * ctrm
| Cdef_type of loc * abi * ctrm * cdef list
| Cdef_in of loc * bool * ctrm * ctrm
| Cdec_sig of loc * cidr
| Cdef_sig of loc * cidr * ctrm
| Cdef_val of loc * val_info * ctrm
| Cdef_let of loc * cmonad option * ctrm * cpred
| Cdef_inj of loc * abi * ctrm
| Cdef_lex of loc * string * (cidr * cidr list) list
| Cdef_lexalias of loc * (cidr * cidr) list
and cwhich = cmonad option * cpred
|
fb18c91dd2d3fe3d1e2b9103bd08d934161d5ae6e97ba73a751355d9192eeb25 | ohua-dev/ohua-core | Refs.hs | module Ohua.ParseTools.Refs where
import Ohua.Prelude
import Ohua.ALang.Lang
ohuaLangNS :: NSRef
ohuaLangNS = makeThrow ["ohua", "lang"]
mkQualVar :: NSRef -> Binding -> Expr
mkQualVar ns name0 = PureFunction (QualifiedBinding ns name0) Nothing
mkOhuaLangRef :: Binding -> Expr
mkOhuaLangRef = mkQualVar ohuaLangNS
ifBuiltin :: Expr
ifBuiltin = mkOhuaLangRef "if"
smapBuiltin :: Expr
smapBuiltin = mkOhuaLangRef "smap"
funcTyConRef :: QualifiedBinding
funcTyConRef = QualifiedBinding ohuaLangNS "->"
funcTyConSBind :: SomeBinding
funcTyConSBind = Qual funcTyConRef
funcTyCon :: SomeTyVar
funcTyCon = TyCon funcTyConSBind
mkFunc :: DefaultTyExpr -> DefaultTyExpr -> DefaultTyExpr
mkFunc a b = TyRef funcTyCon `TyApp` a `TyApp` b
mkTuple :: QualifiedBinding
mkTuple = QualifiedBinding ohuaLangNS "(,)"
| null | https://raw.githubusercontent.com/ohua-dev/ohua-core/978fa3369922f86cc3fc474d5f2c554cc87fd60a/core/src/Ohua/ParseTools/Refs.hs | haskell | module Ohua.ParseTools.Refs where
import Ohua.Prelude
import Ohua.ALang.Lang
ohuaLangNS :: NSRef
ohuaLangNS = makeThrow ["ohua", "lang"]
mkQualVar :: NSRef -> Binding -> Expr
mkQualVar ns name0 = PureFunction (QualifiedBinding ns name0) Nothing
mkOhuaLangRef :: Binding -> Expr
mkOhuaLangRef = mkQualVar ohuaLangNS
ifBuiltin :: Expr
ifBuiltin = mkOhuaLangRef "if"
smapBuiltin :: Expr
smapBuiltin = mkOhuaLangRef "smap"
funcTyConRef :: QualifiedBinding
funcTyConRef = QualifiedBinding ohuaLangNS "->"
funcTyConSBind :: SomeBinding
funcTyConSBind = Qual funcTyConRef
funcTyCon :: SomeTyVar
funcTyCon = TyCon funcTyConSBind
mkFunc :: DefaultTyExpr -> DefaultTyExpr -> DefaultTyExpr
mkFunc a b = TyRef funcTyCon `TyApp` a `TyApp` b
mkTuple :: QualifiedBinding
mkTuple = QualifiedBinding ohuaLangNS "(,)"
|
|
f70ec1d570a04ae80b242aa49ac83a54e5e04ca4777f9cd7432fd48ef5eb59d8 | babashka/nbb | load_file_test.cljs | (ns load-file-test
(:require [nbb.core :refer [load-file *file*]]))
(def f *file*)
(.then (load-file "test-scripts/loaded_by_load_file_test.cljs")
(fn [m]
(assoc m :load-file-test-file-dyn-var f)))
| null | https://raw.githubusercontent.com/babashka/nbb/2c9b5478ed67c3c9258ef40c50e2eb2b12b641fe/test-scripts/load_file_test.cljs | clojure | (ns load-file-test
(:require [nbb.core :refer [load-file *file*]]))
(def f *file*)
(.then (load-file "test-scripts/loaded_by_load_file_test.cljs")
(fn [m]
(assoc m :load-file-test-file-dyn-var f)))
|
|
2ebe0f2d00e9da5aac8411e1f429ed4d8109ebb52137620897639d4ffbfe0738 | distrap/gcodehs | Ann.hs | # LANGUAGE DeriveFunctor #
module Data.GCode.Ann (
Ann(..)
, stripAnnotation
) where
Type for annotating ` Code ` or ` Canon ` with source positions .
Type for annotating `Code` or `Canon` with source positions.
-}
data Ann a = SrcLine Integer a
deriving (Show, Eq, Ord, Functor)
stripAnnotation :: Ann a -> a
stripAnnotation (SrcLine _ x) = x
| null | https://raw.githubusercontent.com/distrap/gcodehs/8a8dbc66445cff4ce832bb56f42ef03b3215e235/src/Data/GCode/Ann.hs | haskell | # LANGUAGE DeriveFunctor #
module Data.GCode.Ann (
Ann(..)
, stripAnnotation
) where
Type for annotating ` Code ` or ` Canon ` with source positions .
Type for annotating `Code` or `Canon` with source positions.
-}
data Ann a = SrcLine Integer a
deriving (Show, Eq, Ord, Functor)
stripAnnotation :: Ann a -> a
stripAnnotation (SrcLine _ x) = x
|
|
2d1386fdc320f68b555ae42ca0c059f446150f741a0fba51f13b68814c4e74cf | ulyssesdotcodes/oscillare | Server.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE ViewPatterns #
module Server where
import Control.Concurrent (forkIO, killThread)
import Control.Monad
import Data.Aeson
import GHC.Generics
import Language.Haskell.Interpreter
import Language.Haskell.Interpreter.Unsafe
import Network (listenOn, withSocketsDo, accept, PortID(..), Socket)
import System.Environment (getArgs)
import System.FilePath.Posix
import System.FSNotify
import System.IO (stdout, hFlush, hSetBuffering, hGetContents, hGetLine, hPutStrLn, BufferMode(..), Handle)
import System.CPUTime
import Text.Printf
import Debug.Trace
import qualified Data.ByteString.Char8 as BS
run :: IO ()
run = do
getArgs >>= \case
[pathToWatch] -> watchPath pathToWatch
_ -> error "Error: Name a file"
runInterpreter (evalLD "r $ rectangle (float 0.2) (float 0.2)")
>>= putStrLn
. show
watchPath :: FilePath -> IO ()
watchPath path = do
wmgr <- startManagerConf (WatchConfig (Debounce 3) 1 False)
thread <- forkIO (servePath path wmgr)
putStrLn "Press enter to exit"
void getLine
stopManager wmgr
killThread thread
servePath :: FilePath -> WatchManager -> IO ()
servePath path wmgr = withSocketsDo $ do
sock <- listenOn $ PortNumber 5959
putStrLn $ "Listening on 5959"
sockHandler path sock wmgr
evalLD :: FilePath -> InterpreterT IO String
evalLD file = do
loadModules [file]
setImports [takeBaseName file]
eval "network"
sockHandler :: FilePath -> Socket -> WatchManager -> IO ()
sockHandler path sock wmgr = do
(handle, _, _) <- accept sock
hSetBuffering handle NoBuffering
void . forkIO $ watcher path handle wmgr
putStrLn "Reopen"
sockHandler path sock wmgr
watcher :: FilePath -> Handle -> WatchManager -> IO ()
watcher path handle wmgr = do
BS.hPutStrLn handle "I gotcha"
let update = do
putStrLn "Test 2"
start <- getCPUTime
msg <- getCodeOrError path
end <- getCPUTime
printf "Updating took %0.3f sec\n" (((fromIntegral (end - start)) / (10^12)) :: Double)
BS.hPutStrLn handle (BS.pack . show $ encode $ toJSON msg)
let onChange = \case
Modified p _ -> do
putStrLn p
update
return ()
_ -> return ()
update
putStrLn "This should only run once"
_ <- watchDir wmgr (takeDirectory path) (const True) onChange
_ <- hGetLine handle
return ()
Thank you ( github.com/sleexyz ) for this
data Msg = Err String
| Code String
deriving (Show, Generic, ToJSON, FromJSON)
getCodeOrError :: FilePath -> IO Msg
getCodeOrError path = do
unsafeRunInterpreterWithArgs ["-package-db C:\\sr\\snapshots\\36302ab3\\pkgdb", "-package-db ./.stack-work/install/1d0d0160/pkgdb", "-no-user-package-db"] (evalLD path) >>= return . \case
Left err -> case err of
UnknownError str -> Err str
WontCompile errors -> Err . mconcat $ errMsg <$> errors
NotAllowed str -> Err str
GhcException str -> Err str
Right str -> Code str | null | https://raw.githubusercontent.com/ulyssesdotcodes/oscillare/b764ae4352c85f00131b5c585fb8774b6d3cb0d8/src/Server.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE DeriveAnyClass # | # LANGUAGE LambdaCase #
# LANGUAGE DeriveGeneric #
# LANGUAGE ViewPatterns #
module Server where
import Control.Concurrent (forkIO, killThread)
import Control.Monad
import Data.Aeson
import GHC.Generics
import Language.Haskell.Interpreter
import Language.Haskell.Interpreter.Unsafe
import Network (listenOn, withSocketsDo, accept, PortID(..), Socket)
import System.Environment (getArgs)
import System.FilePath.Posix
import System.FSNotify
import System.IO (stdout, hFlush, hSetBuffering, hGetContents, hGetLine, hPutStrLn, BufferMode(..), Handle)
import System.CPUTime
import Text.Printf
import Debug.Trace
import qualified Data.ByteString.Char8 as BS
run :: IO ()
run = do
getArgs >>= \case
[pathToWatch] -> watchPath pathToWatch
_ -> error "Error: Name a file"
runInterpreter (evalLD "r $ rectangle (float 0.2) (float 0.2)")
>>= putStrLn
. show
watchPath :: FilePath -> IO ()
watchPath path = do
wmgr <- startManagerConf (WatchConfig (Debounce 3) 1 False)
thread <- forkIO (servePath path wmgr)
putStrLn "Press enter to exit"
void getLine
stopManager wmgr
killThread thread
servePath :: FilePath -> WatchManager -> IO ()
servePath path wmgr = withSocketsDo $ do
sock <- listenOn $ PortNumber 5959
putStrLn $ "Listening on 5959"
sockHandler path sock wmgr
evalLD :: FilePath -> InterpreterT IO String
evalLD file = do
loadModules [file]
setImports [takeBaseName file]
eval "network"
sockHandler :: FilePath -> Socket -> WatchManager -> IO ()
sockHandler path sock wmgr = do
(handle, _, _) <- accept sock
hSetBuffering handle NoBuffering
void . forkIO $ watcher path handle wmgr
putStrLn "Reopen"
sockHandler path sock wmgr
watcher :: FilePath -> Handle -> WatchManager -> IO ()
watcher path handle wmgr = do
BS.hPutStrLn handle "I gotcha"
let update = do
putStrLn "Test 2"
start <- getCPUTime
msg <- getCodeOrError path
end <- getCPUTime
printf "Updating took %0.3f sec\n" (((fromIntegral (end - start)) / (10^12)) :: Double)
BS.hPutStrLn handle (BS.pack . show $ encode $ toJSON msg)
let onChange = \case
Modified p _ -> do
putStrLn p
update
return ()
_ -> return ()
update
putStrLn "This should only run once"
_ <- watchDir wmgr (takeDirectory path) (const True) onChange
_ <- hGetLine handle
return ()
Thank you ( github.com/sleexyz ) for this
data Msg = Err String
| Code String
deriving (Show, Generic, ToJSON, FromJSON)
getCodeOrError :: FilePath -> IO Msg
getCodeOrError path = do
unsafeRunInterpreterWithArgs ["-package-db C:\\sr\\snapshots\\36302ab3\\pkgdb", "-package-db ./.stack-work/install/1d0d0160/pkgdb", "-no-user-package-db"] (evalLD path) >>= return . \case
Left err -> case err of
UnknownError str -> Err str
WontCompile errors -> Err . mconcat $ errMsg <$> errors
NotAllowed str -> Err str
GhcException str -> Err str
Right str -> Code str |
f20d7605747016684d84ec24ca21ce4b3843618b5045c851f9d5fc832372df48 | Copilot-Language/copilot-language | Ord.hs | --------------------------------------------------------------------------------
Copyright © 2011 National Institute of Aerospace / Galois , Inc.
--------------------------------------------------------------------------------
{-# LANGUAGE Safe #-}
-- | Comparison operators applied point-wise on streams.
module Copilot.Language.Operators.Ord
( (<=)
, (>=)
, (<)
, (>)
) where
import Copilot.Core (Typed, typeOf)
import qualified Copilot.Core as Core
import Copilot.Language.Prelude
import Copilot.Language.Stream
import qualified Prelude as P
--------------------------------------------------------------------------------
| Compare two streams point - wise for order .
--
-- The output stream contains the value True at a point in time if the
element in the first stream is smaller or equal than the element in
the second stream at that point in time , and False otherwise .
(<=) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) <= (Const y) = Const (x P.<= y)
x <= y = Op2 (Core.Le typeOf) x y
| Compare two streams point - wise for order .
--
-- The output stream contains the value True at a point in time if the
element in the first stream is greater or equal than the element in
the second stream at that point in time , and False otherwise .
(>=) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) >= (Const y) = Const (x P.>= y)
x >= y = Op2 (Core.Ge typeOf) x y
| Compare two streams point - wise for order .
--
-- The output stream contains the value True at a point in time if the
element in the first stream is smaller than the element in the second stream
-- at that point in time, and False otherwise.
(<) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) < (Const y) = Const (x P.< y)
x < y = Op2 (Core.Lt typeOf) x y
| Compare two streams point - wise for order .
--
-- The output stream contains the value True at a point in time if the element
in the first stream is greater than the element in the second stream at that
-- point in time, and False otherwise.
(>) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) > (Const y) = Const (x P.> y)
x > y = Op2 (Core.Gt typeOf) x y
--------------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/Copilot-Language/copilot-language/ea3bb24bc02079d2b96be9e55c73cae7430fca2f/src/Copilot/Language/Operators/Ord.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------
# LANGUAGE Safe #
| Comparison operators applied point-wise on streams.
------------------------------------------------------------------------------
The output stream contains the value True at a point in time if the
The output stream contains the value True at a point in time if the
The output stream contains the value True at a point in time if the
at that point in time, and False otherwise.
The output stream contains the value True at a point in time if the element
point in time, and False otherwise.
------------------------------------------------------------------------------ | Copyright © 2011 National Institute of Aerospace / Galois , Inc.
module Copilot.Language.Operators.Ord
( (<=)
, (>=)
, (<)
, (>)
) where
import Copilot.Core (Typed, typeOf)
import qualified Copilot.Core as Core
import Copilot.Language.Prelude
import Copilot.Language.Stream
import qualified Prelude as P
| Compare two streams point - wise for order .
element in the first stream is smaller or equal than the element in
the second stream at that point in time , and False otherwise .
(<=) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) <= (Const y) = Const (x P.<= y)
x <= y = Op2 (Core.Le typeOf) x y
| Compare two streams point - wise for order .
element in the first stream is greater or equal than the element in
the second stream at that point in time , and False otherwise .
(>=) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) >= (Const y) = Const (x P.>= y)
x >= y = Op2 (Core.Ge typeOf) x y
| Compare two streams point - wise for order .
element in the first stream is smaller than the element in the second stream
(<) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) < (Const y) = Const (x P.< y)
x < y = Op2 (Core.Lt typeOf) x y
| Compare two streams point - wise for order .
in the first stream is greater than the element in the second stream at that
(>) :: (P.Ord a, Typed a) => Stream a -> Stream a -> Stream Bool
(Const x) > (Const y) = Const (x P.> y)
x > y = Op2 (Core.Gt typeOf) x y
|
f1efbfb658565c61f148027b567c474ad88c9597b18d976135d43ca68e2beaa0 | elastic/eui-cljs | json_popover_content.cljs | (ns eui.json-popover-content
(:require ["@elastic/eui/lib/components/datagrid/body/data_grid_cell_popover.js" :as eui]))
(def JsonPopoverContent eui/JsonPopoverContent)
(def DataGridCellPopoverContext eui/DataGridCellPopoverContext)
(def DefaultCellPopover eui/DefaultCellPopover)
(def useCellPopover eui/useCellPopover)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/json_popover_content.cljs | clojure | (ns eui.json-popover-content
(:require ["@elastic/eui/lib/components/datagrid/body/data_grid_cell_popover.js" :as eui]))
(def JsonPopoverContent eui/JsonPopoverContent)
(def DataGridCellPopoverContext eui/DataGridCellPopoverContext)
(def DefaultCellPopover eui/DefaultCellPopover)
(def useCellPopover eui/useCellPopover)
|
|
83702d2821fdc79187dff18b07aa598007152bdfc2b10af613dfe7bc6f6c9d4e | futurice/haskell-mega-repo | BookInformationResponse.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DerivingVia #-}
# LANGUAGE InstanceSigs #
# LANGUAGE TemplateHaskell #
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Futurice.App.Library.Types.BookInformationResponse where
import Futurice.App.Sisosota.Types (ContentHash)
import Futurice.Generics
import Futurice.Prelude
import Prelude ()
import Futurice.App.Library.Types.BookInformation
import Futurice.App.Library.Types.Item
import Futurice.App.Library.Types.Library
data Books = Books
{ _booksLibrary :: !Library
, _booksBookId :: !ItemId
}
deriving (Eq, Ord, Show, GhcGeneric, ToSchema, Typeable, SopGeneric, HasDatatypeInfo)
deriving (ToJSON, FromJSON) via (Sopica Books)
data BookInformationResponse = BookInformationResponse
{ _id :: !BookInformationId
, _title :: !Text
, _ISBN :: !Text
, _author :: !Text
, _publisher :: !Text
, _published :: !Int
, _cover :: !ContentHash
, _infoLink :: !Text
, _books :: ![Books]
} deriving (Show, Typeable, GhcGeneric, SopGeneric, HasDatatypeInfo)
deriving (ToJSON, FromJSON) via (Sopica BookInformationResponse)
instance ToSchema BookInformationResponse
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/library-app/src/Futurice/App/Library/Types/BookInformationResponse.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DerivingVia #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators # | # LANGUAGE InstanceSigs #
# LANGUAGE TemplateHaskell #
module Futurice.App.Library.Types.BookInformationResponse where
import Futurice.App.Sisosota.Types (ContentHash)
import Futurice.Generics
import Futurice.Prelude
import Prelude ()
import Futurice.App.Library.Types.BookInformation
import Futurice.App.Library.Types.Item
import Futurice.App.Library.Types.Library
data Books = Books
{ _booksLibrary :: !Library
, _booksBookId :: !ItemId
}
deriving (Eq, Ord, Show, GhcGeneric, ToSchema, Typeable, SopGeneric, HasDatatypeInfo)
deriving (ToJSON, FromJSON) via (Sopica Books)
data BookInformationResponse = BookInformationResponse
{ _id :: !BookInformationId
, _title :: !Text
, _ISBN :: !Text
, _author :: !Text
, _publisher :: !Text
, _published :: !Int
, _cover :: !ContentHash
, _infoLink :: !Text
, _books :: ![Books]
} deriving (Show, Typeable, GhcGeneric, SopGeneric, HasDatatypeInfo)
deriving (ToJSON, FromJSON) via (Sopica BookInformationResponse)
instance ToSchema BookInformationResponse
|
bea2e5cbcb2ef0aac0afd82e7fc62e00e82ebf8cc009405878af42156dba22a9 | sethtrain/clj-twilio | core.clj | (ns twilio.test.twiml.core
(:use [twilio.twiml.core] :reload)
(:use [clojure.test]
[twilio.twiml.voice]))
(deftest test-single-verb
(is (= (hangup)
[:Hangup])))
(deftest test-simple-verb
(is (= (record)
(record {})))
(is (= (record {:timeout 1})
[:Record {:timeout 1}])))
(deftest test-content-verb
(is (= (say "Message")
(say {} "Message")))
(is (= (say "Message")
[:Say {} "Message"]))
(is (= (say {:voice "woman"} "Message")
[:Say {:voice "woman"} "Message"])))
(deftest test-nested-verb
(is (= (gather)
[:Gather {} nil]))
(is (= (gather (say "Message"))
[:Gather {} (list [:Say {} "Message"])]))
(is (= (gather {} (say "Message") (play "message.mp3"))
[:Gather {} (list [:Say {} "Message"] [:Play {} "message.mp3"])])))
| null | https://raw.githubusercontent.com/sethtrain/clj-twilio/924b2aab51f543611dc37022f7c0e82b677ee521/test/twilio/test/twiml/core.clj | clojure | (ns twilio.test.twiml.core
(:use [twilio.twiml.core] :reload)
(:use [clojure.test]
[twilio.twiml.voice]))
(deftest test-single-verb
(is (= (hangup)
[:Hangup])))
(deftest test-simple-verb
(is (= (record)
(record {})))
(is (= (record {:timeout 1})
[:Record {:timeout 1}])))
(deftest test-content-verb
(is (= (say "Message")
(say {} "Message")))
(is (= (say "Message")
[:Say {} "Message"]))
(is (= (say {:voice "woman"} "Message")
[:Say {:voice "woman"} "Message"])))
(deftest test-nested-verb
(is (= (gather)
[:Gather {} nil]))
(is (= (gather (say "Message"))
[:Gather {} (list [:Say {} "Message"])]))
(is (= (gather {} (say "Message") (play "message.mp3"))
[:Gather {} (list [:Say {} "Message"] [:Play {} "message.mp3"])])))
|
|
7c14767ea71e0a16884bd5087945ebe36fb762266ee6462d1e9fced220264461 | ivg/x86-lifter | decode.ml | open Core_kernel.Std
open Bap.Std
open Opcode
module Dis = Disasm_expert.Basic
let decode_any read insn =
Option.try_with (fun () -> read (Sexp.of_string (Dis.Insn.name insn)))
let opcode = decode_any t_of_sexp
let prefix = decode_any prefix_of_sexp
| null | https://raw.githubusercontent.com/ivg/x86-lifter/52254f2ab6fb5648af56959a1c7b7f901f608e89/decode.ml | ocaml | open Core_kernel.Std
open Bap.Std
open Opcode
module Dis = Disasm_expert.Basic
let decode_any read insn =
Option.try_with (fun () -> read (Sexp.of_string (Dis.Insn.name insn)))
let opcode = decode_any t_of_sexp
let prefix = decode_any prefix_of_sexp
|
|
10720dea03db815a0b23ced7ea68031566f851b43b707e1c5c278dc91f2fb60e | qfpl/reflex-workshop | Counter.hs | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
# LANGUAGE RecursiveDo #
module Solutions.Behaviors.Creating.Counter (
counterSolution
) where
import Control.Monad.Fix (MonadFix)
import Data.Function ((&))
import Reflex
counterSolution :: (Reflex t, MonadFix m, MonadHold t m)
=> Event t (Int -> Int)
-> m (Behavior t Int, Event t Int)
counterSolution eFn = mdo
let e = flip ($) <$> b <@> eFn
b <- hold 0 e
b < - hold 0 $ ( & ) < $ > b < @ > e
pure (b, e)
| null | https://raw.githubusercontent.com/qfpl/reflex-workshop/244ef13fb4b2e884f455eccc50072e98d1668c9e/src/Solutions/Behaviors/Creating/Counter.hs | haskell | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
# LANGUAGE RecursiveDo #
module Solutions.Behaviors.Creating.Counter (
counterSolution
) where
import Control.Monad.Fix (MonadFix)
import Data.Function ((&))
import Reflex
counterSolution :: (Reflex t, MonadFix m, MonadHold t m)
=> Event t (Int -> Int)
-> m (Behavior t Int, Event t Int)
counterSolution eFn = mdo
let e = flip ($) <$> b <@> eFn
b <- hold 0 e
b < - hold 0 $ ( & ) < $ > b < @ > e
pure (b, e)
|
|
478819c3f46edccaf90094d00235ce97fc58d24c052f70b0bf15ed78e3238f3b | ml4tp/tcoq | tactics.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Loc
open Names
open Term
open Environ
open Proof_type
open Evd
open Clenv
open Redexpr
open Globnames
open Tacexpr
open Pattern
open Unification
open Misctypes
open Locus
* Main tactics defined in ML . This file is huge and should probably be split
in more reasonable units at some point . Because of its size and age , the
implementation features various styles and stages of the proof engine .
This has to be uniformized someday .
in more reasonable units at some point. Because of its size and age, the
implementation features various styles and stages of the proof engine.
This has to be uniformized someday. *)
* { 6 General functions . }
val is_quantified_hypothesis : Id.t -> ([`NF],'b) Proofview.Goal.t -> bool
* { 6 Primitive tactics . }
val introduction : ?check:bool -> Id.t -> unit Proofview.tactic
val convert_concl : ?check:bool -> types -> cast_kind -> unit Proofview.tactic
val convert_hyp : ?check:bool -> Context.Named.Declaration.t -> unit Proofview.tactic
val convert_concl_no_check : types -> cast_kind -> unit Proofview.tactic
val convert_hyp_no_check : Context.Named.Declaration.t -> unit Proofview.tactic
val mutual_fix :
Id.t -> int -> (Id.t * int * constr) list -> int -> unit Proofview.tactic
val fix : Id.t option -> int -> unit Proofview.tactic
val mutual_cofix : Id.t -> (Id.t * constr) list -> int -> unit Proofview.tactic
val cofix : Id.t option -> unit Proofview.tactic
val convert : constr -> constr -> unit Proofview.tactic
val convert_leq : constr -> constr -> unit Proofview.tactic
* { 6 Introduction tactics . }
val fresh_id_in_env : Id.t list -> Id.t -> env -> Id.t
val fresh_id : Id.t list -> Id.t -> goal sigma -> Id.t
val find_intro_names : Context.Rel.t -> goal sigma -> Id.t list
val intro : unit Proofview.tactic
val introf : unit Proofview.tactic
val intro_move : Id.t option -> Id.t move_location -> unit Proofview.tactic
val intro_move_avoid : Id.t option -> Id.t list -> Id.t move_location -> unit Proofview.tactic
(** [intro_avoiding idl] acts as intro but prevents the new Id.t
to belong to [idl] *)
val intro_avoiding : Id.t list -> unit Proofview.tactic
val intro_replacing : Id.t -> unit Proofview.tactic
val intro_using : Id.t -> unit Proofview.tactic
val intro_mustbe_force : Id.t -> unit Proofview.tactic
val intro_then : (Id.t -> unit Proofview.tactic) -> unit Proofview.tactic
val intros_using : Id.t list -> unit Proofview.tactic
val intros_replacing : Id.t list -> unit Proofview.tactic
val intros_possibly_replacing : Id.t list -> unit Proofview.tactic
val intros : unit Proofview.tactic
(** [depth_of_quantified_hypothesis b h g] returns the index of [h] in
the conclusion of goal [g], up to head-reduction if [b] is [true] *)
val depth_of_quantified_hypothesis :
bool -> quantified_hypothesis -> ([`NF],'b) Proofview.Goal.t -> int
val intros_until : quantified_hypothesis -> unit Proofview.tactic
val intros_clearing : bool list -> unit Proofview.tactic
* Assuming a tactic [ tac ] depending on an hypothesis Id.t ,
[ try_intros_until tac arg ] first assumes that arg denotes a
quantified hypothesis ( denoted by name or by index ) and try to
introduce it in context before to apply [ tac ] , otherwise assume the
hypothesis is already in context and directly apply [ tac ]
[try_intros_until tac arg] first assumes that arg denotes a
quantified hypothesis (denoted by name or by index) and try to
introduce it in context before to apply [tac], otherwise assume the
hypothesis is already in context and directly apply [tac] *)
val try_intros_until :
(Id.t -> unit Proofview.tactic) -> quantified_hypothesis -> unit Proofview.tactic
(** Apply a tactic on a quantified hypothesis, an hypothesis in context
or a term with bindings *)
val onInductionArg :
(clear_flag -> constr with_bindings -> unit Proofview.tactic) ->
constr with_bindings destruction_arg -> unit Proofview.tactic
val force_destruction_arg : evars_flag -> env -> evar_map ->
delayed_open_constr_with_bindings destruction_arg ->
evar_map * constr with_bindings destruction_arg
(** Tell if a used hypothesis should be cleared by default or not *)
val use_clear_hyp_by_default : unit -> bool
* { 6 Introduction tactics with eliminations . }
val intro_patterns : evars_flag -> intro_patterns -> unit Proofview.tactic
val intro_patterns_to : evars_flag -> Id.t move_location -> intro_patterns ->
unit Proofview.tactic
val intro_patterns_bound_to : evars_flag -> int -> Id.t move_location -> intro_patterns ->
unit Proofview.tactic
val intro_pattern_to : evars_flag -> Id.t move_location -> delayed_open_constr intro_pattern_expr ->
unit Proofview.tactic
(** Implements user-level "intros", with [] standing for "**" *)
val intros_patterns : evars_flag -> intro_patterns -> unit Proofview.tactic
* { 6 Exact tactics . }
val assumption : unit Proofview.tactic
val exact_no_check : constr -> unit Proofview.tactic
val vm_cast_no_check : constr -> unit Proofview.tactic
val native_cast_no_check : constr -> unit Proofview.tactic
val exact_check : constr -> unit Proofview.tactic
val exact_proof : Constrexpr.constr_expr -> unit Proofview.tactic
* { 6 Reduction tactics . }
type tactic_reduction = env -> evar_map -> constr -> constr
type change_arg = patvar_map -> constr Sigma.run
val make_change_arg : constr -> change_arg
val reduct_in_hyp : ?check:bool -> tactic_reduction -> hyp_location -> unit Proofview.tactic
val reduct_option : ?check:bool -> tactic_reduction * cast_kind -> goal_location -> unit Proofview.tactic
val reduct_in_concl : tactic_reduction * cast_kind -> unit Proofview.tactic
val change_in_concl : (occurrences * constr_pattern) option -> change_arg -> unit Proofview.tactic
val change_concl : constr -> unit Proofview.tactic
val change_in_hyp : (occurrences * constr_pattern) option -> change_arg ->
hyp_location -> unit Proofview.tactic
val red_in_concl : unit Proofview.tactic
val red_in_hyp : hyp_location -> unit Proofview.tactic
val red_option : goal_location -> unit Proofview.tactic
val hnf_in_concl : unit Proofview.tactic
val hnf_in_hyp : hyp_location -> unit Proofview.tactic
val hnf_option : goal_location -> unit Proofview.tactic
val simpl_in_concl : unit Proofview.tactic
val simpl_in_hyp : hyp_location -> unit Proofview.tactic
val simpl_option : goal_location -> unit Proofview.tactic
val normalise_in_concl : unit Proofview.tactic
val normalise_in_hyp : hyp_location -> unit Proofview.tactic
val normalise_option : goal_location -> unit Proofview.tactic
val normalise_vm_in_concl : unit Proofview.tactic
val unfold_in_concl :
(occurrences * evaluable_global_reference) list -> unit Proofview.tactic
val unfold_in_hyp :
(occurrences * evaluable_global_reference) list -> hyp_location -> unit Proofview.tactic
val unfold_option :
(occurrences * evaluable_global_reference) list -> goal_location -> unit Proofview.tactic
val change :
constr_pattern option -> change_arg -> clause -> unit Proofview.tactic
val pattern_option :
(occurrences * constr) list -> goal_location -> unit Proofview.tactic
val reduce : red_expr -> clause -> unit Proofview.tactic
val unfold_constr : global_reference -> unit Proofview.tactic
* { 6 Modification of the local context . }
val clear : Id.t list -> unit Proofview.tactic
val clear_body : Id.t list -> unit Proofview.tactic
val unfold_body : Id.t -> unit Proofview.tactic
val keep : Id.t list -> unit Proofview.tactic
val apply_clear_request : clear_flag -> bool -> constr -> unit Proofview.tactic
val specialize : constr with_bindings -> intro_pattern option -> unit Proofview.tactic
val move_hyp : Id.t -> Id.t move_location -> unit Proofview.tactic
val rename_hyp : (Id.t * Id.t) list -> unit Proofview.tactic
val revert : Id.t list -> unit Proofview.tactic
* { 6 Resolution tactics . }
val apply_type : constr -> constr list -> unit Proofview.tactic
val bring_hyps : Context.Named.t -> unit Proofview.tactic
val apply : constr -> unit Proofview.tactic
val eapply : constr -> unit Proofview.tactic
val apply_with_bindings_gen :
advanced_flag -> evars_flag -> (clear_flag * constr with_bindings located) list -> unit Proofview.tactic
val apply_with_delayed_bindings_gen :
advanced_flag -> evars_flag -> (clear_flag * delayed_open_constr_with_bindings located) list -> unit Proofview.tactic
val apply_with_bindings : constr with_bindings -> unit Proofview.tactic
val eapply_with_bindings : constr with_bindings -> unit Proofview.tactic
val cut_and_apply : constr -> unit Proofview.tactic
val apply_in :
advanced_flag -> evars_flag -> Id.t ->
(clear_flag * constr with_bindings located) list ->
intro_pattern option -> unit Proofview.tactic
val apply_delayed_in :
advanced_flag -> evars_flag -> Id.t ->
(clear_flag * delayed_open_constr_with_bindings located) list ->
intro_pattern option -> unit Proofview.tactic
val run_delayed : Environ.env -> evar_map -> 'a delayed_open -> 'a * evar_map
* { 6 Elimination tactics . }
The general form of an induction principle is the following :
forall prm1 prm2 ... prmp , ( induction parameters )
forall Q1 ... ,(Qi : Ti_1 - > Ti_2 -> ... - > ... Qq , ( predicates )
, branch2 , ... , branchr , ( branches of the principle )
forall ( x1 : Ti_1 ) ( x2 : Ti_2 ) ... ( xni : Ti_ni ) , ( induction arguments )
( HI : I prm1 .. ... xni ) ( optional main induction arg )
- > ( Qi x1 ... xni HI ( f prm1 ... ... xni)).(conclusion )
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional
even if HI argument added if principle
present above generated by functional induction
[ indarg ] [ farg ]
HI is not present when the induction principle does not come directly from an
inductive type ( like when it is generated by functional induction for
example ) . HI is present otherwise BUT may not appear in the conclusion
( dependent principle ) . HI and ( f ... ) can not be both present .
Principles taken from functional induction have the final ( f ... ) .
The general form of an induction principle is the following:
forall prm1 prm2 ... prmp, (induction parameters)
forall Q1...,(Qi:Ti_1 -> Ti_2 ->...-> Ti_ni),...Qq, (predicates)
branch1, branch2, ... , branchr, (branches of the principle)
forall (x1:Ti_1) (x2:Ti_2) ... (xni:Ti_ni), (induction arguments)
(HI: I prm1..prmp x1...xni) (optional main induction arg)
-> (Qi x1...xni HI (f prm1...prmp x1...xni)).(conclusion)
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional
even if HI argument added if principle
present above generated by functional induction
[indarg] [farg]
HI is not present when the induction principle does not come directly from an
inductive type (like when it is generated by functional induction for
example). HI is present otherwise BUT may not appear in the conclusion
(dependent principle). HI and (f...) cannot be both present.
Principles taken from functional induction have the final (f...).
*)
(** [rel_contexts] and [rel_declaration] actually contain triples, and
lists are actually in reverse order to fit [compose_prod]. *)
type elim_scheme = {
elimc: constr with_bindings option;
elimt: types;
indref: global_reference option;
params: Context.Rel.t; (** (prm1,tprm1);(prm2,tprm2)...(prmp,tprmp) *)
nparams: int; (** number of parameters *)
predicates: Context.Rel.t; (** (Qq, (Tq_1 -> Tq_2 ->...-> Tq_nq)), (Q1,...) *)
npredicates: int; (** Number of predicates *)
branches: Context.Rel.t; (** branchr,...,branch1 *)
nbranches: int; (** Number of branches *)
* ( xni , Ti_ni ) ... ( x1 , Ti_1 )
nargs: int; (** number of arguments *)
* Some ( H , I prm1 .. ... xni )
if HI is in premisses , None otherwise
if HI is in premisses, None otherwise *)
concl: types; (** Qi x1...xni HI (f...), HI and (f...)
are optional and mutually exclusive *)
indarg_in_concl: bool; (** true if HI appears at the end of conclusion *)
farg_in_concl: bool; (** true if (f...) appears at the end of conclusion *)
}
val compute_elim_sig : ?elimc: constr with_bindings -> types -> elim_scheme
(** elim principle with the index of its inductive arg *)
type eliminator = {
elimindex : int option; (** None = find it automatically *)
* None = do n't rename with H - names
elimbody : constr with_bindings
}
val general_elim : evars_flag -> clear_flag ->
constr with_bindings -> eliminator -> unit Proofview.tactic
val general_elim_clause : evars_flag -> unify_flags -> identifier option ->
clausenv -> eliminator -> unit Proofview.tactic
val default_elim : evars_flag -> clear_flag -> constr with_bindings ->
unit Proofview.tactic
val simplest_elim : constr -> unit Proofview.tactic
val elim :
evars_flag -> clear_flag -> constr with_bindings -> constr with_bindings option -> unit Proofview.tactic
val simple_induct : quantified_hypothesis -> unit Proofview.tactic
val induction : evars_flag -> clear_flag -> constr -> or_and_intro_pattern option ->
constr with_bindings option -> unit Proofview.tactic
* { 6 Case analysis tactics . }
val general_case_analysis : evars_flag -> clear_flag -> constr with_bindings -> unit Proofview.tactic
val simplest_case : constr -> unit Proofview.tactic
val simple_destruct : quantified_hypothesis -> unit Proofview.tactic
val destruct : evars_flag -> clear_flag -> constr -> or_and_intro_pattern option ->
constr with_bindings option -> unit Proofview.tactic
* { 6 Generic case analysis / induction tactics . }
(** Implements user-level "destruct" and "induction" *)
val induction_destruct : rec_flag -> evars_flag ->
(delayed_open_constr_with_bindings destruction_arg
* (intro_pattern_naming option * or_and_intro_pattern option)
* clause option) list *
constr with_bindings option -> unit Proofview.tactic
* { 6 Eliminations giving the type instead of the proof . }
val case_type : types -> unit Proofview.tactic
val elim_type : types -> unit Proofview.tactic
* { 6 Constructor tactics . }
val constructor_tac : evars_flag -> int option -> int ->
constr bindings -> unit Proofview.tactic
val any_constructor : evars_flag -> unit Proofview.tactic option -> unit Proofview.tactic
val one_constructor : int -> constr bindings -> unit Proofview.tactic
val left : constr bindings -> unit Proofview.tactic
val right : constr bindings -> unit Proofview.tactic
val split : constr bindings -> unit Proofview.tactic
val left_with_bindings : evars_flag -> constr bindings -> unit Proofview.tactic
val right_with_bindings : evars_flag -> constr bindings -> unit Proofview.tactic
val split_with_bindings : evars_flag -> constr bindings list -> unit Proofview.tactic
val simplest_left : unit Proofview.tactic
val simplest_right : unit Proofview.tactic
val simplest_split : unit Proofview.tactic
* { 6 Equality tactics . }
val setoid_reflexivity : unit Proofview.tactic Hook.t
val reflexivity_red : bool -> unit Proofview.tactic
val reflexivity : unit Proofview.tactic
val intros_reflexivity : unit Proofview.tactic
val setoid_symmetry : unit Proofview.tactic Hook.t
val symmetry_red : bool -> unit Proofview.tactic
val symmetry : unit Proofview.tactic
val setoid_symmetry_in : (Id.t -> unit Proofview.tactic) Hook.t
val intros_symmetry : clause -> unit Proofview.tactic
val setoid_transitivity : (constr option -> unit Proofview.tactic) Hook.t
val transitivity_red : bool -> constr option -> unit Proofview.tactic
val transitivity : constr -> unit Proofview.tactic
val etransitivity : unit Proofview.tactic
val intros_transitivity : constr option -> unit Proofview.tactic
* { 6 Cut tactics . }
val assert_before_replacing: Id.t -> types -> unit Proofview.tactic
val assert_after_replacing : Id.t -> types -> unit Proofview.tactic
val assert_before : Name.t -> types -> unit Proofview.tactic
val assert_after : Name.t -> types -> unit Proofview.tactic
val assert_as : (* true = before *) bool ->
(* optionally tell if a specialization of some hyp: *) identifier option ->
intro_pattern option -> constr -> unit Proofview.tactic
* Implements the tactics assert , enough and pose proof ; note that " by "
applies on the first goal for both assert and enough
applies on the first goal for both assert and enough *)
val assert_by : Name.t -> types -> unit Proofview.tactic ->
unit Proofview.tactic
val enough_by : Name.t -> types -> unit Proofview.tactic ->
unit Proofview.tactic
val pose_proof : Name.t -> constr ->
unit Proofview.tactic
(** Common entry point for user-level "assert", "enough" and "pose proof" *)
val forward : bool -> unit Proofview.tactic option option ->
intro_pattern option -> constr -> unit Proofview.tactic
(** Implements the tactic cut, actually a modus ponens rule *)
val cut : types -> unit Proofview.tactic
* { 6 Tactics for adding local definitions . }
val letin_tac : (bool * intro_pattern_naming) option ->
Name.t -> constr -> types option -> clause -> unit Proofview.tactic
(** Common entry point for user-level "set", "pose" and "remember" *)
val letin_pat_tac : (bool * intro_pattern_naming) option ->
Name.t -> pending_constr -> clause -> unit Proofview.tactic
* { 6 tactics . }
val generalize : constr list -> unit Proofview.tactic
val generalize_gen : (constr Locus.with_occurrences * Name.t) list -> unit Proofview.tactic
val new_generalize_gen : ((occurrences * constr) * Name.t) list -> unit Proofview.tactic
val generalize_dep : ?with_let:bool (** Don't lose let bindings *) -> constr -> unit Proofview.tactic
* { 6 Other tactics . }
val unify : ?state:Names.transparent_state -> constr -> constr -> unit Proofview.tactic
val tclABSTRACT : Id.t option -> unit Proofview.tactic -> unit Proofview.tactic
val abstract_generalize : ?generalize_vars:bool -> ?force_dep:bool -> Id.t -> unit Proofview.tactic
val specialize_eqs : Id.t -> unit Proofview.tactic
val general_rewrite_clause :
(bool -> evars_flag -> constr with_bindings -> clause -> unit Proofview.tactic) Hook.t
val subst_one :
(bool -> Id.t -> Id.t * constr * bool -> unit Proofview.tactic) Hook.t
val declare_intro_decomp_eq :
((int -> unit Proofview.tactic) -> Coqlib.coq_eq_data * types *
(types * constr * constr) ->
constr * types -> unit Proofview.tactic) -> unit
* { 6 Simple form of basic tactics . }
module Simple : sig
(** Simplified version of some of the above tactics *)
val intro : Id.t -> unit Proofview.tactic
val apply : constr -> unit Proofview.tactic
val eapply : constr -> unit Proofview.tactic
val elim : constr -> unit Proofview.tactic
val case : constr -> unit Proofview.tactic
val apply_in : identifier -> constr -> unit Proofview.tactic
end
* { 6 Tacticals defined directly in term of Proofview }
module New : sig
val refine : ?unsafe:bool -> constr Sigma.run -> unit Proofview.tactic
(** [refine ?unsafe c] is [Refine.refine ?unsafe c]
followed by beta-iota-reduction of the conclusion. *)
val reduce_after_refine : unit Proofview.tactic
(** The reducing tactic called after {!refine}. *)
end
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/tactics/tactics.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* [intro_avoiding idl] acts as intro but prevents the new Id.t
to belong to [idl]
* [depth_of_quantified_hypothesis b h g] returns the index of [h] in
the conclusion of goal [g], up to head-reduction if [b] is [true]
* Apply a tactic on a quantified hypothesis, an hypothesis in context
or a term with bindings
* Tell if a used hypothesis should be cleared by default or not
* Implements user-level "intros", with [] standing for "**"
* [rel_contexts] and [rel_declaration] actually contain triples, and
lists are actually in reverse order to fit [compose_prod].
* (prm1,tprm1);(prm2,tprm2)...(prmp,tprmp)
* number of parameters
* (Qq, (Tq_1 -> Tq_2 ->...-> Tq_nq)), (Q1,...)
* Number of predicates
* branchr,...,branch1
* Number of branches
* number of arguments
* Qi x1...xni HI (f...), HI and (f...)
are optional and mutually exclusive
* true if HI appears at the end of conclusion
* true if (f...) appears at the end of conclusion
* elim principle with the index of its inductive arg
* None = find it automatically
* Implements user-level "destruct" and "induction"
true = before
optionally tell if a specialization of some hyp:
* Common entry point for user-level "assert", "enough" and "pose proof"
* Implements the tactic cut, actually a modus ponens rule
* Common entry point for user-level "set", "pose" and "remember"
* Don't lose let bindings
* Simplified version of some of the above tactics
* [refine ?unsafe c] is [Refine.refine ?unsafe c]
followed by beta-iota-reduction of the conclusion.
* The reducing tactic called after {!refine}. | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Loc
open Names
open Term
open Environ
open Proof_type
open Evd
open Clenv
open Redexpr
open Globnames
open Tacexpr
open Pattern
open Unification
open Misctypes
open Locus
* Main tactics defined in ML . This file is huge and should probably be split
in more reasonable units at some point . Because of its size and age , the
implementation features various styles and stages of the proof engine .
This has to be uniformized someday .
in more reasonable units at some point. Because of its size and age, the
implementation features various styles and stages of the proof engine.
This has to be uniformized someday. *)
* { 6 General functions . }
val is_quantified_hypothesis : Id.t -> ([`NF],'b) Proofview.Goal.t -> bool
* { 6 Primitive tactics . }
val introduction : ?check:bool -> Id.t -> unit Proofview.tactic
val convert_concl : ?check:bool -> types -> cast_kind -> unit Proofview.tactic
val convert_hyp : ?check:bool -> Context.Named.Declaration.t -> unit Proofview.tactic
val convert_concl_no_check : types -> cast_kind -> unit Proofview.tactic
val convert_hyp_no_check : Context.Named.Declaration.t -> unit Proofview.tactic
val mutual_fix :
Id.t -> int -> (Id.t * int * constr) list -> int -> unit Proofview.tactic
val fix : Id.t option -> int -> unit Proofview.tactic
val mutual_cofix : Id.t -> (Id.t * constr) list -> int -> unit Proofview.tactic
val cofix : Id.t option -> unit Proofview.tactic
val convert : constr -> constr -> unit Proofview.tactic
val convert_leq : constr -> constr -> unit Proofview.tactic
* { 6 Introduction tactics . }
val fresh_id_in_env : Id.t list -> Id.t -> env -> Id.t
val fresh_id : Id.t list -> Id.t -> goal sigma -> Id.t
val find_intro_names : Context.Rel.t -> goal sigma -> Id.t list
val intro : unit Proofview.tactic
val introf : unit Proofview.tactic
val intro_move : Id.t option -> Id.t move_location -> unit Proofview.tactic
val intro_move_avoid : Id.t option -> Id.t list -> Id.t move_location -> unit Proofview.tactic
val intro_avoiding : Id.t list -> unit Proofview.tactic
val intro_replacing : Id.t -> unit Proofview.tactic
val intro_using : Id.t -> unit Proofview.tactic
val intro_mustbe_force : Id.t -> unit Proofview.tactic
val intro_then : (Id.t -> unit Proofview.tactic) -> unit Proofview.tactic
val intros_using : Id.t list -> unit Proofview.tactic
val intros_replacing : Id.t list -> unit Proofview.tactic
val intros_possibly_replacing : Id.t list -> unit Proofview.tactic
val intros : unit Proofview.tactic
val depth_of_quantified_hypothesis :
bool -> quantified_hypothesis -> ([`NF],'b) Proofview.Goal.t -> int
val intros_until : quantified_hypothesis -> unit Proofview.tactic
val intros_clearing : bool list -> unit Proofview.tactic
* Assuming a tactic [ tac ] depending on an hypothesis Id.t ,
[ try_intros_until tac arg ] first assumes that arg denotes a
quantified hypothesis ( denoted by name or by index ) and try to
introduce it in context before to apply [ tac ] , otherwise assume the
hypothesis is already in context and directly apply [ tac ]
[try_intros_until tac arg] first assumes that arg denotes a
quantified hypothesis (denoted by name or by index) and try to
introduce it in context before to apply [tac], otherwise assume the
hypothesis is already in context and directly apply [tac] *)
val try_intros_until :
(Id.t -> unit Proofview.tactic) -> quantified_hypothesis -> unit Proofview.tactic
val onInductionArg :
(clear_flag -> constr with_bindings -> unit Proofview.tactic) ->
constr with_bindings destruction_arg -> unit Proofview.tactic
val force_destruction_arg : evars_flag -> env -> evar_map ->
delayed_open_constr_with_bindings destruction_arg ->
evar_map * constr with_bindings destruction_arg
val use_clear_hyp_by_default : unit -> bool
* { 6 Introduction tactics with eliminations . }
val intro_patterns : evars_flag -> intro_patterns -> unit Proofview.tactic
val intro_patterns_to : evars_flag -> Id.t move_location -> intro_patterns ->
unit Proofview.tactic
val intro_patterns_bound_to : evars_flag -> int -> Id.t move_location -> intro_patterns ->
unit Proofview.tactic
val intro_pattern_to : evars_flag -> Id.t move_location -> delayed_open_constr intro_pattern_expr ->
unit Proofview.tactic
val intros_patterns : evars_flag -> intro_patterns -> unit Proofview.tactic
* { 6 Exact tactics . }
val assumption : unit Proofview.tactic
val exact_no_check : constr -> unit Proofview.tactic
val vm_cast_no_check : constr -> unit Proofview.tactic
val native_cast_no_check : constr -> unit Proofview.tactic
val exact_check : constr -> unit Proofview.tactic
val exact_proof : Constrexpr.constr_expr -> unit Proofview.tactic
* { 6 Reduction tactics . }
type tactic_reduction = env -> evar_map -> constr -> constr
type change_arg = patvar_map -> constr Sigma.run
val make_change_arg : constr -> change_arg
val reduct_in_hyp : ?check:bool -> tactic_reduction -> hyp_location -> unit Proofview.tactic
val reduct_option : ?check:bool -> tactic_reduction * cast_kind -> goal_location -> unit Proofview.tactic
val reduct_in_concl : tactic_reduction * cast_kind -> unit Proofview.tactic
val change_in_concl : (occurrences * constr_pattern) option -> change_arg -> unit Proofview.tactic
val change_concl : constr -> unit Proofview.tactic
val change_in_hyp : (occurrences * constr_pattern) option -> change_arg ->
hyp_location -> unit Proofview.tactic
val red_in_concl : unit Proofview.tactic
val red_in_hyp : hyp_location -> unit Proofview.tactic
val red_option : goal_location -> unit Proofview.tactic
val hnf_in_concl : unit Proofview.tactic
val hnf_in_hyp : hyp_location -> unit Proofview.tactic
val hnf_option : goal_location -> unit Proofview.tactic
val simpl_in_concl : unit Proofview.tactic
val simpl_in_hyp : hyp_location -> unit Proofview.tactic
val simpl_option : goal_location -> unit Proofview.tactic
val normalise_in_concl : unit Proofview.tactic
val normalise_in_hyp : hyp_location -> unit Proofview.tactic
val normalise_option : goal_location -> unit Proofview.tactic
val normalise_vm_in_concl : unit Proofview.tactic
val unfold_in_concl :
(occurrences * evaluable_global_reference) list -> unit Proofview.tactic
val unfold_in_hyp :
(occurrences * evaluable_global_reference) list -> hyp_location -> unit Proofview.tactic
val unfold_option :
(occurrences * evaluable_global_reference) list -> goal_location -> unit Proofview.tactic
val change :
constr_pattern option -> change_arg -> clause -> unit Proofview.tactic
val pattern_option :
(occurrences * constr) list -> goal_location -> unit Proofview.tactic
val reduce : red_expr -> clause -> unit Proofview.tactic
val unfold_constr : global_reference -> unit Proofview.tactic
* { 6 Modification of the local context . }
val clear : Id.t list -> unit Proofview.tactic
val clear_body : Id.t list -> unit Proofview.tactic
val unfold_body : Id.t -> unit Proofview.tactic
val keep : Id.t list -> unit Proofview.tactic
val apply_clear_request : clear_flag -> bool -> constr -> unit Proofview.tactic
val specialize : constr with_bindings -> intro_pattern option -> unit Proofview.tactic
val move_hyp : Id.t -> Id.t move_location -> unit Proofview.tactic
val rename_hyp : (Id.t * Id.t) list -> unit Proofview.tactic
val revert : Id.t list -> unit Proofview.tactic
* { 6 Resolution tactics . }
val apply_type : constr -> constr list -> unit Proofview.tactic
val bring_hyps : Context.Named.t -> unit Proofview.tactic
val apply : constr -> unit Proofview.tactic
val eapply : constr -> unit Proofview.tactic
val apply_with_bindings_gen :
advanced_flag -> evars_flag -> (clear_flag * constr with_bindings located) list -> unit Proofview.tactic
val apply_with_delayed_bindings_gen :
advanced_flag -> evars_flag -> (clear_flag * delayed_open_constr_with_bindings located) list -> unit Proofview.tactic
val apply_with_bindings : constr with_bindings -> unit Proofview.tactic
val eapply_with_bindings : constr with_bindings -> unit Proofview.tactic
val cut_and_apply : constr -> unit Proofview.tactic
val apply_in :
advanced_flag -> evars_flag -> Id.t ->
(clear_flag * constr with_bindings located) list ->
intro_pattern option -> unit Proofview.tactic
val apply_delayed_in :
advanced_flag -> evars_flag -> Id.t ->
(clear_flag * delayed_open_constr_with_bindings located) list ->
intro_pattern option -> unit Proofview.tactic
val run_delayed : Environ.env -> evar_map -> 'a delayed_open -> 'a * evar_map
* { 6 Elimination tactics . }
The general form of an induction principle is the following :
forall prm1 prm2 ... prmp , ( induction parameters )
forall Q1 ... ,(Qi : Ti_1 - > Ti_2 -> ... - > ... Qq , ( predicates )
, branch2 , ... , branchr , ( branches of the principle )
forall ( x1 : Ti_1 ) ( x2 : Ti_2 ) ... ( xni : Ti_ni ) , ( induction arguments )
( HI : I prm1 .. ... xni ) ( optional main induction arg )
- > ( Qi x1 ... xni HI ( f prm1 ... ... xni)).(conclusion )
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional
even if HI argument added if principle
present above generated by functional induction
[ indarg ] [ farg ]
HI is not present when the induction principle does not come directly from an
inductive type ( like when it is generated by functional induction for
example ) . HI is present otherwise BUT may not appear in the conclusion
( dependent principle ) . HI and ( f ... ) can not be both present .
Principles taken from functional induction have the final ( f ... ) .
The general form of an induction principle is the following:
forall prm1 prm2 ... prmp, (induction parameters)
forall Q1...,(Qi:Ti_1 -> Ti_2 ->...-> Ti_ni),...Qq, (predicates)
branch1, branch2, ... , branchr, (branches of the principle)
forall (x1:Ti_1) (x2:Ti_2) ... (xni:Ti_ni), (induction arguments)
(HI: I prm1..prmp x1...xni) (optional main induction arg)
-> (Qi x1...xni HI (f prm1...prmp x1...xni)).(conclusion)
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional
even if HI argument added if principle
present above generated by functional induction
[indarg] [farg]
HI is not present when the induction principle does not come directly from an
inductive type (like when it is generated by functional induction for
example). HI is present otherwise BUT may not appear in the conclusion
(dependent principle). HI and (f...) cannot be both present.
Principles taken from functional induction have the final (f...).
*)
type elim_scheme = {
elimc: constr with_bindings option;
elimt: types;
indref: global_reference option;
* ( xni , Ti_ni ) ... ( x1 , Ti_1 )
* Some ( H , I prm1 .. ... xni )
if HI is in premisses , None otherwise
if HI is in premisses, None otherwise *)
}
val compute_elim_sig : ?elimc: constr with_bindings -> types -> elim_scheme
type eliminator = {
* None = do n't rename with H - names
elimbody : constr with_bindings
}
val general_elim : evars_flag -> clear_flag ->
constr with_bindings -> eliminator -> unit Proofview.tactic
val general_elim_clause : evars_flag -> unify_flags -> identifier option ->
clausenv -> eliminator -> unit Proofview.tactic
val default_elim : evars_flag -> clear_flag -> constr with_bindings ->
unit Proofview.tactic
val simplest_elim : constr -> unit Proofview.tactic
val elim :
evars_flag -> clear_flag -> constr with_bindings -> constr with_bindings option -> unit Proofview.tactic
val simple_induct : quantified_hypothesis -> unit Proofview.tactic
val induction : evars_flag -> clear_flag -> constr -> or_and_intro_pattern option ->
constr with_bindings option -> unit Proofview.tactic
* { 6 Case analysis tactics . }
val general_case_analysis : evars_flag -> clear_flag -> constr with_bindings -> unit Proofview.tactic
val simplest_case : constr -> unit Proofview.tactic
val simple_destruct : quantified_hypothesis -> unit Proofview.tactic
val destruct : evars_flag -> clear_flag -> constr -> or_and_intro_pattern option ->
constr with_bindings option -> unit Proofview.tactic
* { 6 Generic case analysis / induction tactics . }
val induction_destruct : rec_flag -> evars_flag ->
(delayed_open_constr_with_bindings destruction_arg
* (intro_pattern_naming option * or_and_intro_pattern option)
* clause option) list *
constr with_bindings option -> unit Proofview.tactic
* { 6 Eliminations giving the type instead of the proof . }
val case_type : types -> unit Proofview.tactic
val elim_type : types -> unit Proofview.tactic
* { 6 Constructor tactics . }
val constructor_tac : evars_flag -> int option -> int ->
constr bindings -> unit Proofview.tactic
val any_constructor : evars_flag -> unit Proofview.tactic option -> unit Proofview.tactic
val one_constructor : int -> constr bindings -> unit Proofview.tactic
val left : constr bindings -> unit Proofview.tactic
val right : constr bindings -> unit Proofview.tactic
val split : constr bindings -> unit Proofview.tactic
val left_with_bindings : evars_flag -> constr bindings -> unit Proofview.tactic
val right_with_bindings : evars_flag -> constr bindings -> unit Proofview.tactic
val split_with_bindings : evars_flag -> constr bindings list -> unit Proofview.tactic
val simplest_left : unit Proofview.tactic
val simplest_right : unit Proofview.tactic
val simplest_split : unit Proofview.tactic
* { 6 Equality tactics . }
val setoid_reflexivity : unit Proofview.tactic Hook.t
val reflexivity_red : bool -> unit Proofview.tactic
val reflexivity : unit Proofview.tactic
val intros_reflexivity : unit Proofview.tactic
val setoid_symmetry : unit Proofview.tactic Hook.t
val symmetry_red : bool -> unit Proofview.tactic
val symmetry : unit Proofview.tactic
val setoid_symmetry_in : (Id.t -> unit Proofview.tactic) Hook.t
val intros_symmetry : clause -> unit Proofview.tactic
val setoid_transitivity : (constr option -> unit Proofview.tactic) Hook.t
val transitivity_red : bool -> constr option -> unit Proofview.tactic
val transitivity : constr -> unit Proofview.tactic
val etransitivity : unit Proofview.tactic
val intros_transitivity : constr option -> unit Proofview.tactic
* { 6 Cut tactics . }
val assert_before_replacing: Id.t -> types -> unit Proofview.tactic
val assert_after_replacing : Id.t -> types -> unit Proofview.tactic
val assert_before : Name.t -> types -> unit Proofview.tactic
val assert_after : Name.t -> types -> unit Proofview.tactic
intro_pattern option -> constr -> unit Proofview.tactic
* Implements the tactics assert , enough and pose proof ; note that " by "
applies on the first goal for both assert and enough
applies on the first goal for both assert and enough *)
val assert_by : Name.t -> types -> unit Proofview.tactic ->
unit Proofview.tactic
val enough_by : Name.t -> types -> unit Proofview.tactic ->
unit Proofview.tactic
val pose_proof : Name.t -> constr ->
unit Proofview.tactic
val forward : bool -> unit Proofview.tactic option option ->
intro_pattern option -> constr -> unit Proofview.tactic
val cut : types -> unit Proofview.tactic
* { 6 Tactics for adding local definitions . }
val letin_tac : (bool * intro_pattern_naming) option ->
Name.t -> constr -> types option -> clause -> unit Proofview.tactic
val letin_pat_tac : (bool * intro_pattern_naming) option ->
Name.t -> pending_constr -> clause -> unit Proofview.tactic
* { 6 tactics . }
val generalize : constr list -> unit Proofview.tactic
val generalize_gen : (constr Locus.with_occurrences * Name.t) list -> unit Proofview.tactic
val new_generalize_gen : ((occurrences * constr) * Name.t) list -> unit Proofview.tactic
* { 6 Other tactics . }
val unify : ?state:Names.transparent_state -> constr -> constr -> unit Proofview.tactic
val tclABSTRACT : Id.t option -> unit Proofview.tactic -> unit Proofview.tactic
val abstract_generalize : ?generalize_vars:bool -> ?force_dep:bool -> Id.t -> unit Proofview.tactic
val specialize_eqs : Id.t -> unit Proofview.tactic
val general_rewrite_clause :
(bool -> evars_flag -> constr with_bindings -> clause -> unit Proofview.tactic) Hook.t
val subst_one :
(bool -> Id.t -> Id.t * constr * bool -> unit Proofview.tactic) Hook.t
val declare_intro_decomp_eq :
((int -> unit Proofview.tactic) -> Coqlib.coq_eq_data * types *
(types * constr * constr) ->
constr * types -> unit Proofview.tactic) -> unit
* { 6 Simple form of basic tactics . }
module Simple : sig
val intro : Id.t -> unit Proofview.tactic
val apply : constr -> unit Proofview.tactic
val eapply : constr -> unit Proofview.tactic
val elim : constr -> unit Proofview.tactic
val case : constr -> unit Proofview.tactic
val apply_in : identifier -> constr -> unit Proofview.tactic
end
* { 6 Tacticals defined directly in term of Proofview }
module New : sig
val refine : ?unsafe:bool -> constr Sigma.run -> unit Proofview.tactic
val reduce_after_refine : unit Proofview.tactic
end
|
490d1250fd4ab1cdf450727fd3dcf8adf7aaa55113873f5ac1261b0079e4a544 | kit-ty-kate/visitors | expr05.ml | open Expr02
let equal : expr -> expr -> bool =
VisitorsRuntime.wrap2 (new iter2 # visit_expr ())
| null | https://raw.githubusercontent.com/kit-ty-kate/visitors/fc53cc486178781e0b1e581eced98e07facb7d29/test/expr05.ml | ocaml | open Expr02
let equal : expr -> expr -> bool =
VisitorsRuntime.wrap2 (new iter2 # visit_expr ())
|
|
f5eadb2419776eac47e985f7a8615e6017e6d58f328fa34e7b9ca68ec42bbeba | refuge/cowdb | cowdb_log_tests.erl | %%-*- mode: erlang -*-
%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
-module(cowdb_log_tests).
-include("cowdb_tests.hrl").
-define(setup(F), {setup, fun setup/0, fun teardown/1, F}).
-define(foreach(Fs), {foreach, fun setup/0, fun teardown/1, Fs}).
setup() ->
{ok, Db} = cowdb:open(?tempfile()),
Db.
teardown(Db) ->
ok = cowdb:drop_db(Db).
log_test_() ->
{
"Test snapshotting and log features",
?foreach([
fun should_log_transactions/1,
fun should_fetch_log_in_range/1,
fun log_is_reset_after_compaction/1,
fun should_keep_last_transaction_id_after_compaction/1
])
}.
snapshot_test() ->
{ok, Db} = cowdb:open(?tempfile()),
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, Db1} = cowdb:get_snapshot(Db, 1),
?assertMatch([{ok, {a, 1}}, not_found, not_found, not_found],
cowdb:mget(Db1, [a, b, c, d])),
{ok, Db2} = cowdb:get_snapshot(Db, 2),
?assertMatch([{ok, {a, 1}}, {ok, {b, 2}}, {ok, {c, 3}}, not_found],
cowdb:mget(Db2, [a, b, c, d])),
{ok, Db3} = cowdb:get_snapshot(Db, 3),
?assertMatch([{ok, {a, 1}}, not_found, {ok, {c, 3}}, {ok, {d, 4}}],
cowdb:mget(Db3, [a, b, c, d])),
ok = cowdb:drop_db(Db).
should_log_transactions(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
LogFun = fun(Got, Acc) ->
{ok, [Got |Acc]}
end,
?_assertMatch({ok, 4, [{3, add, {d, 4}, _},
{3, remove, {b, 2}, _},
{2, add, {c, 3}, _},
{2, add, {b, 2}, _},
{1, add, {a, 1}, _}]},
cowdb:log(Db, 0, 3, LogFun, [])).
should_fetch_log_in_range(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
LogFun = fun(Got, Acc) ->
{ok, [Got |Acc]}
end,
?_assertMatch({ok, 3, [{2, add, {c, 3}, _},
{2, add, {b, 2}, _},
{1, add, {a, 1}, _}]},
cowdb:log(Db, 1, 2, LogFun, [])).
log_is_reset_after_compaction(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, DbInfo0} = cowdb:database_info(Db),
TxCount0 = proplists:get_value(tx_count, DbInfo0),
ok = cowdb:compact(Db),
timer:sleep(1000),
{ok, DbInfo2} = cowdb:database_info(Db),
TxCount2 = proplists:get_value(tx_count, DbInfo2),
?_assertEqual({4, 1}, {TxCount0, TxCount2}).
should_keep_last_transaction_id_after_compaction(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, DbInfo0} = cowdb:database_info(Db),
TxEnd0 = proplists:get_value(tx_end, DbInfo0),
ok = cowdb:compact(Db),
timer:sleep(1000),
{ok, DbInfo2} = cowdb:database_info(Db),
TxStart2 = proplists:get_value(tx_start, DbInfo2),
?_assertEqual(TxEnd0, TxStart2).
| null | https://raw.githubusercontent.com/refuge/cowdb/40d72119a59407c7dddc140443b518a90338e347/test/cowdb_log_tests.erl | erlang | -*- mode: erlang -*-
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(cowdb_log_tests).
-include("cowdb_tests.hrl").
-define(setup(F), {setup, fun setup/0, fun teardown/1, F}).
-define(foreach(Fs), {foreach, fun setup/0, fun teardown/1, Fs}).
setup() ->
{ok, Db} = cowdb:open(?tempfile()),
Db.
teardown(Db) ->
ok = cowdb:drop_db(Db).
log_test_() ->
{
"Test snapshotting and log features",
?foreach([
fun should_log_transactions/1,
fun should_fetch_log_in_range/1,
fun log_is_reset_after_compaction/1,
fun should_keep_last_transaction_id_after_compaction/1
])
}.
snapshot_test() ->
{ok, Db} = cowdb:open(?tempfile()),
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, Db1} = cowdb:get_snapshot(Db, 1),
?assertMatch([{ok, {a, 1}}, not_found, not_found, not_found],
cowdb:mget(Db1, [a, b, c, d])),
{ok, Db2} = cowdb:get_snapshot(Db, 2),
?assertMatch([{ok, {a, 1}}, {ok, {b, 2}}, {ok, {c, 3}}, not_found],
cowdb:mget(Db2, [a, b, c, d])),
{ok, Db3} = cowdb:get_snapshot(Db, 3),
?assertMatch([{ok, {a, 1}}, not_found, {ok, {c, 3}}, {ok, {d, 4}}],
cowdb:mget(Db3, [a, b, c, d])),
ok = cowdb:drop_db(Db).
should_log_transactions(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
LogFun = fun(Got, Acc) ->
{ok, [Got |Acc]}
end,
?_assertMatch({ok, 4, [{3, add, {d, 4}, _},
{3, remove, {b, 2}, _},
{2, add, {c, 3}, _},
{2, add, {b, 2}, _},
{1, add, {a, 1}, _}]},
cowdb:log(Db, 0, 3, LogFun, [])).
should_fetch_log_in_range(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
LogFun = fun(Got, Acc) ->
{ok, [Got |Acc]}
end,
?_assertMatch({ok, 3, [{2, add, {c, 3}, _},
{2, add, {b, 2}, _},
{1, add, {a, 1}, _}]},
cowdb:log(Db, 1, 2, LogFun, [])).
log_is_reset_after_compaction(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, DbInfo0} = cowdb:database_info(Db),
TxCount0 = proplists:get_value(tx_count, DbInfo0),
ok = cowdb:compact(Db),
timer:sleep(1000),
{ok, DbInfo2} = cowdb:database_info(Db),
TxCount2 = proplists:get_value(tx_count, DbInfo2),
?_assertEqual({4, 1}, {TxCount0, TxCount2}).
should_keep_last_transaction_id_after_compaction(Db) ->
{ok, 1} = cowdb:put(Db, a, 1),
{ok, 2} = cowdb:transact(Db, [{add, b, 2},
{add, c, 3}]),
{ok, 3} = cowdb:transact(Db, [{remove, b},
{add, d, 4}]),
{ok, DbInfo0} = cowdb:database_info(Db),
TxEnd0 = proplists:get_value(tx_end, DbInfo0),
ok = cowdb:compact(Db),
timer:sleep(1000),
{ok, DbInfo2} = cowdb:database_info(Db),
TxStart2 = proplists:get_value(tx_start, DbInfo2),
?_assertEqual(TxEnd0, TxStart2).
|
7914cc7665f7b1f8ce30501abe36455b8b48717d840b69dc1adc16b9b886819b | tjammer/schmu | env.mli | open Types
type key = string
type label = { index : int; typename : Path.t }
type t
type imported = string * [ `C | `Schmu ]
type value = {
typ : typ;
param : bool;
const : bool;
global : bool;
imported : imported option;
mut : bool;
}
type warn_kind = Unused | Unmutated | Unused_mod
type unused = (unit, (Path.t * warn_kind * Ast.loc) list) result
type return = {
typ : typ;
const : bool;
global : bool;
mut : bool;
imported : string option;
}
type ext = {
ext_name : string;
ext_typ : typ;
ext_cname : string option;
imported : imported option;
used : bool ref;
closure : bool;
}
(* return type for values *)
type add_kind = Aimpl | Asignature | Amodule of string
val def_value : value
(** Default value, everything is false *)
val empty : unit -> t
val add_value : key -> value -> Ast.loc -> t -> t
(** [add_value key value loc] add value [key] defined at [loc] with type [typ] to env *)
val add_external :
key ->
cname:string option ->
typ ->
imported:imported option ->
closure:bool ->
Ast.loc ->
t ->
t
(** like [add_value], but keeps track of external declarations *)
val change_type : key -> typ -> t -> t
(** To give the generalized type with closure for functions *)
val add_type : Path.t -> in_sig:bool -> typ -> t -> t
val add_record :
Path.t -> add_kind -> params:typ list -> labels:field array -> t -> t
(** [add record record_name ~param ~labels env] returns an env with an added record named [record_name]
optionally parametrized by [param] with typed [labels] *)
val add_variant :
Path.t -> add_kind -> params:typ list -> ctors:ctor array -> t -> t
(** [add_variant variant_name ~param ~ctors env] returns an env with an added variant named [variant_name]
optionally parametrized by [param] with [ctors] *)
val add_alias : Path.t -> add_kind -> typ -> t -> t
val open_function : t -> t
val close_function : t -> t * closed list * unused
* Returns the variables captured in the closed function scope , and first unused var
val open_module : t -> Ast.loc -> string -> t
(** Doesn't actually open the module, but makes the env ready for matching the following adds to a module *)
val finish_module : t -> t
val close_module : t -> t
val find_val : key -> t -> return
val find_val_opt : key -> t -> return option
val query_val_opt : key -> t -> return option
* [ key env ] is like find_val_opt , but marks [ key ] as
being used in the current scope ( e.g. a closure )
being used in the current scope (e.g. a closure) *)
val open_mutation : t -> unit
val close_mutation : t -> unit
val find_type_opt : Path.t -> t -> (typ * bool) option
val find_type : Path.t -> t -> typ * bool
val query_type : instantiate:(typ -> typ) -> Path.t -> t -> typ
(** [query_type name env] is like [find_type], but instantiates new types for parametrized types*)
val find_label_opt : key -> t -> label option
* [ env ] returns the name of first record with a matching label
val find_labelset_opt : string list -> t -> typ option
* [ find_labelset_opt labelnames env ] returns the first record type with a matching labelset
val find_ctor_opt : key -> t -> label option
(** [find_ctor_opt cname env] returns the variant of which the ctor is part of
as well as the type of the ctor if it has data *)
val mod_fn_name : mname:key -> key -> key
val externals : t -> ext list
(** [externals env] returns a list of all external function declarations *)
| null | https://raw.githubusercontent.com/tjammer/schmu/7f179e93ff88ef7d3509d73a56db066cf3fa145f/lib/typing/env.mli | ocaml | return type for values
* Default value, everything is false
* [add_value key value loc] add value [key] defined at [loc] with type [typ] to env
* like [add_value], but keeps track of external declarations
* To give the generalized type with closure for functions
* [add record record_name ~param ~labels env] returns an env with an added record named [record_name]
optionally parametrized by [param] with typed [labels]
* [add_variant variant_name ~param ~ctors env] returns an env with an added variant named [variant_name]
optionally parametrized by [param] with [ctors]
* Doesn't actually open the module, but makes the env ready for matching the following adds to a module
* [query_type name env] is like [find_type], but instantiates new types for parametrized types
* [find_ctor_opt cname env] returns the variant of which the ctor is part of
as well as the type of the ctor if it has data
* [externals env] returns a list of all external function declarations | open Types
type key = string
type label = { index : int; typename : Path.t }
type t
type imported = string * [ `C | `Schmu ]
type value = {
typ : typ;
param : bool;
const : bool;
global : bool;
imported : imported option;
mut : bool;
}
type warn_kind = Unused | Unmutated | Unused_mod
type unused = (unit, (Path.t * warn_kind * Ast.loc) list) result
type return = {
typ : typ;
const : bool;
global : bool;
mut : bool;
imported : string option;
}
type ext = {
ext_name : string;
ext_typ : typ;
ext_cname : string option;
imported : imported option;
used : bool ref;
closure : bool;
}
type add_kind = Aimpl | Asignature | Amodule of string
val def_value : value
val empty : unit -> t
val add_value : key -> value -> Ast.loc -> t -> t
val add_external :
key ->
cname:string option ->
typ ->
imported:imported option ->
closure:bool ->
Ast.loc ->
t ->
t
val change_type : key -> typ -> t -> t
val add_type : Path.t -> in_sig:bool -> typ -> t -> t
val add_record :
Path.t -> add_kind -> params:typ list -> labels:field array -> t -> t
val add_variant :
Path.t -> add_kind -> params:typ list -> ctors:ctor array -> t -> t
val add_alias : Path.t -> add_kind -> typ -> t -> t
val open_function : t -> t
val close_function : t -> t * closed list * unused
* Returns the variables captured in the closed function scope , and first unused var
val open_module : t -> Ast.loc -> string -> t
val finish_module : t -> t
val close_module : t -> t
val find_val : key -> t -> return
val find_val_opt : key -> t -> return option
val query_val_opt : key -> t -> return option
* [ key env ] is like find_val_opt , but marks [ key ] as
being used in the current scope ( e.g. a closure )
being used in the current scope (e.g. a closure) *)
val open_mutation : t -> unit
val close_mutation : t -> unit
val find_type_opt : Path.t -> t -> (typ * bool) option
val find_type : Path.t -> t -> typ * bool
val query_type : instantiate:(typ -> typ) -> Path.t -> t -> typ
val find_label_opt : key -> t -> label option
* [ env ] returns the name of first record with a matching label
val find_labelset_opt : string list -> t -> typ option
* [ find_labelset_opt labelnames env ] returns the first record type with a matching labelset
val find_ctor_opt : key -> t -> label option
val mod_fn_name : mname:key -> key -> key
val externals : t -> ext list
|
cdb6a5c80b278f8077fbd334105f9ef8794856121b4ed3f0e957b94fea24b9d5 | locusmath/locus | impl.clj | (ns locus.variety.projective.impl
(:require [locus.set.logic.core.set :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.algebra.commutative.semigroup.object :refer :all]
[locus.algebra.semigroup.core.object :refer :all]
[locus.algebra.semigroup.monoid.object :refer :all]
[locus.algebra.group.core.object :refer :all]
[locus.algebra.semigroup.free.free-semigroup :refer :all]
[locus.additive.base.generic.arithmetic :as arith]
[locus.additive.base.core.protocols :refer :all]
[locus.additive.ring.core.object :refer :all]
[locus.additive.ring.ideal.object :refer :all]
[locus.additive.ring.core.quotient-ring :refer :all]
[locus.semigroup-algebra.core.object :refer :all]))
; A projective variety much like an affine variety is determined by a set of polynomials.
; However, in this case, the projective variety should be determined by a set of
homogeneous polynomials whose terms all have the same degree with one another . Then
; the homogeneous polynomials determine a variety in projective space.
(deftype ProjectiveVariety [ring vars polynomials])
(defmethod coordinate-ring ProjectiveVariety
[variety]
(->QuotientRing
(semigroup-algebra (.ring variety) (free-commutative-monoid (.vars variety)))
(.polynomials variety)))
| null | https://raw.githubusercontent.com/locusmath/locus/b232579217be4e39458410893827a84d744168e4/src/clojure/locus/variety/projective/impl.clj | clojure | A projective variety much like an affine variety is determined by a set of polynomials.
However, in this case, the projective variety should be determined by a set of
the homogeneous polynomials determine a variety in projective space. | (ns locus.variety.projective.impl
(:require [locus.set.logic.core.set :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.algebra.commutative.semigroup.object :refer :all]
[locus.algebra.semigroup.core.object :refer :all]
[locus.algebra.semigroup.monoid.object :refer :all]
[locus.algebra.group.core.object :refer :all]
[locus.algebra.semigroup.free.free-semigroup :refer :all]
[locus.additive.base.generic.arithmetic :as arith]
[locus.additive.base.core.protocols :refer :all]
[locus.additive.ring.core.object :refer :all]
[locus.additive.ring.ideal.object :refer :all]
[locus.additive.ring.core.quotient-ring :refer :all]
[locus.semigroup-algebra.core.object :refer :all]))
homogeneous polynomials whose terms all have the same degree with one another . Then
(deftype ProjectiveVariety [ring vars polynomials])
(defmethod coordinate-ring ProjectiveVariety
[variety]
(->QuotientRing
(semigroup-algebra (.ring variety) (free-commutative-monoid (.vars variety)))
(.polynomials variety)))
|
9f9bc6fd9d8b71804e488d0f0f460e9cace1c2f4e1a9e514c15f1d1b6d8c5309 | OCamlPro/ocplib-resto | restoDirectory.mli | (**************************************************************************)
(* ocplib-resto *)
Copyright ( C ) 2016 , OCamlPro .
(* *)
(* All rights reserved. This file is distributed under the terms *)
of the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Resto
module Answer : sig
(** Return type for service handler *)
type 'a answer =
{ code : int ;
body : 'a output ;
}
and 'a output =
| Empty
| Single of 'a
| Stream of 'a stream
and 'a stream = {
next: unit -> 'a option Lwt.t ;
shutdown: unit -> unit ;
}
val ok: 'a -> 'a answer
val ok_stream: 'a stream -> 'a answer
val return: 'a -> 'a answer Lwt.t
val return_stream: 'a stream -> 'a answer Lwt.t
end
(** Possible error while registring services. *)
type step =
| Static of string
| Dynamic of Arg.descr
type conflict =
| CService | CDir | CBuilder | CCustom
| CTypes of Arg.descr *
Arg.descr
| CType of Arg.descr * string list
exception Conflict of step list * conflict
exception Cannot_parse of Arg.descr * string * string list
module Make (Repr : Json_repr.Repr) : sig
(** Dispatch tree *)
type 'prefix directory
(** Empty tree *)
val empty: 'prefix directory
val map: ('a -> 'b) -> 'b directory -> 'a directory
val prefix: ('pr, 'p) Path.path -> 'p directory -> 'pr directory
val merge: 'a directory -> 'a directory -> 'a directory
(** Resolve a service. *)
val lookup:
'prefix directory -> 'prefix -> string list ->
(Repr.value option -> Repr.value Answer.answer Lwt.t) Lwt.t
(** Registring handler in service tree. *)
val register:
'prefix directory ->
('prefix, 'params, 'input, 'output) service ->
('params -> 'input -> 'output Answer.answer Lwt.t) ->
'prefix directory
* Registring handler in service tree . variant .
val register0:
unit directory ->
(unit, unit, 'i, 'o) service ->
('i -> 'o Answer.answer Lwt.t) ->
unit directory
val register1:
'prefix directory ->
('prefix, unit * 'a, 'i, 'o) service ->
('a -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register2:
'prefix directory ->
('prefix, (unit * 'a) * 'b, 'i, 'o) service ->
('a -> 'b -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register3:
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c, 'i, 'o) service ->
('a -> 'b -> 'c -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register4:
'prefix directory ->
('prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'i, 'o) service ->
('a -> 'b -> 'c -> 'd -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register5:
'prefix directory ->
('prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'i, 'o) service ->
('a -> 'b -> 'c -> 'd -> 'e -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
(** Registring dynamic subtree. *)
val register_dynamic_directory:
?descr:string ->
'prefix directory ->
('prefix, 'a) Path.path -> ('a -> 'a directory Lwt.t) ->
'prefix directory
* Registring dynamic subtree . ( variant )
val register_dynamic_directory1:
?descr:string ->
'prefix directory ->
('prefix, unit * 'a) Path.path ->
('a -> (unit * 'a) directory Lwt.t) ->
'prefix directory
val register_dynamic_directory2:
?descr:string ->
'prefix directory ->
('prefix, (unit * 'a) * 'b) Path.path ->
('a -> 'b -> ((unit * 'a) * 'b) directory Lwt.t) ->
'prefix directory
val register_dynamic_directory3:
?descr:string ->
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c) Path.path ->
('a -> 'b -> 'c -> (((unit * 'a) * 'b) * 'c) directory Lwt.t) ->
'prefix directory
(** Registring custom directory lookup. *)
type custom_lookup =
| CustomService of Description.service_descr *
(Repr.value option -> Repr.value Answer.answer Lwt.t)
| CustomDirectory of Description.directory_descr
val register_custom_lookup:
?descr:string ->
'prefix directory ->
('prefix, 'params) Path.path ->
('params -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup1:
?descr:string ->
'prefix directory ->
('prefix, unit * 'a) Path.path ->
('a -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup2:
?descr:string ->
'prefix directory ->
('prefix, (unit * 'a) * 'b) Path.path ->
('a -> 'b -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup3:
?descr:string ->
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c) Path.path ->
('a -> 'b -> 'c -> string list -> custom_lookup Lwt.t) ->
'prefix directory
(** Registring a description service. *)
val register_describe_directory_service:
'prefix directory ->
('prefix, 'prefix, bool option, Description.directory_descr) service ->
'prefix directory
end
include (module type of Make (Json_repr.Ezjsonm))
module Internal : sig
type (_,_,_,_,_,_) conv =
| Z : (unit, 'g, 'g, unit, 'f, 'f) conv
| S : ('t, 'g, 'b * 's, 'rt, 'f, 'r) conv ->
('t * 'b, 'g, 's, 'a * 'rt, 'a -> 'f, 'r) conv
val curry : ('a, 'b, unit, 'b, 'c, 'd) conv -> 'c -> 'a -> 'd
end
| null | https://raw.githubusercontent.com/OCamlPro/ocplib-resto/9c84b7432cee499a2057e372c4d619daeb0b9d6a/src/restoDirectory.mli | ocaml | ************************************************************************
ocplib-resto
All rights reserved. This file is distributed under the terms
special exception on linking described in the file LICENSE.
************************************************************************
* Return type for service handler
* Possible error while registring services.
* Dispatch tree
* Empty tree
* Resolve a service.
* Registring handler in service tree.
* Registring dynamic subtree.
* Registring custom directory lookup.
* Registring a description service. | Copyright ( C ) 2016 , OCamlPro .
of the GNU Lesser General Public License version 2.1 , with the
open Resto
module Answer : sig
type 'a answer =
{ code : int ;
body : 'a output ;
}
and 'a output =
| Empty
| Single of 'a
| Stream of 'a stream
and 'a stream = {
next: unit -> 'a option Lwt.t ;
shutdown: unit -> unit ;
}
val ok: 'a -> 'a answer
val ok_stream: 'a stream -> 'a answer
val return: 'a -> 'a answer Lwt.t
val return_stream: 'a stream -> 'a answer Lwt.t
end
type step =
| Static of string
| Dynamic of Arg.descr
type conflict =
| CService | CDir | CBuilder | CCustom
| CTypes of Arg.descr *
Arg.descr
| CType of Arg.descr * string list
exception Conflict of step list * conflict
exception Cannot_parse of Arg.descr * string * string list
module Make (Repr : Json_repr.Repr) : sig
type 'prefix directory
val empty: 'prefix directory
val map: ('a -> 'b) -> 'b directory -> 'a directory
val prefix: ('pr, 'p) Path.path -> 'p directory -> 'pr directory
val merge: 'a directory -> 'a directory -> 'a directory
val lookup:
'prefix directory -> 'prefix -> string list ->
(Repr.value option -> Repr.value Answer.answer Lwt.t) Lwt.t
val register:
'prefix directory ->
('prefix, 'params, 'input, 'output) service ->
('params -> 'input -> 'output Answer.answer Lwt.t) ->
'prefix directory
* Registring handler in service tree . variant .
val register0:
unit directory ->
(unit, unit, 'i, 'o) service ->
('i -> 'o Answer.answer Lwt.t) ->
unit directory
val register1:
'prefix directory ->
('prefix, unit * 'a, 'i, 'o) service ->
('a -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register2:
'prefix directory ->
('prefix, (unit * 'a) * 'b, 'i, 'o) service ->
('a -> 'b -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register3:
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c, 'i, 'o) service ->
('a -> 'b -> 'c -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register4:
'prefix directory ->
('prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'i, 'o) service ->
('a -> 'b -> 'c -> 'd -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register5:
'prefix directory ->
('prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'i, 'o) service ->
('a -> 'b -> 'c -> 'd -> 'e -> 'i -> 'o Answer.answer Lwt.t) ->
'prefix directory
val register_dynamic_directory:
?descr:string ->
'prefix directory ->
('prefix, 'a) Path.path -> ('a -> 'a directory Lwt.t) ->
'prefix directory
* Registring dynamic subtree . ( variant )
val register_dynamic_directory1:
?descr:string ->
'prefix directory ->
('prefix, unit * 'a) Path.path ->
('a -> (unit * 'a) directory Lwt.t) ->
'prefix directory
val register_dynamic_directory2:
?descr:string ->
'prefix directory ->
('prefix, (unit * 'a) * 'b) Path.path ->
('a -> 'b -> ((unit * 'a) * 'b) directory Lwt.t) ->
'prefix directory
val register_dynamic_directory3:
?descr:string ->
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c) Path.path ->
('a -> 'b -> 'c -> (((unit * 'a) * 'b) * 'c) directory Lwt.t) ->
'prefix directory
type custom_lookup =
| CustomService of Description.service_descr *
(Repr.value option -> Repr.value Answer.answer Lwt.t)
| CustomDirectory of Description.directory_descr
val register_custom_lookup:
?descr:string ->
'prefix directory ->
('prefix, 'params) Path.path ->
('params -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup1:
?descr:string ->
'prefix directory ->
('prefix, unit * 'a) Path.path ->
('a -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup2:
?descr:string ->
'prefix directory ->
('prefix, (unit * 'a) * 'b) Path.path ->
('a -> 'b -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_custom_lookup3:
?descr:string ->
'prefix directory ->
('prefix, ((unit * 'a) * 'b) * 'c) Path.path ->
('a -> 'b -> 'c -> string list -> custom_lookup Lwt.t) ->
'prefix directory
val register_describe_directory_service:
'prefix directory ->
('prefix, 'prefix, bool option, Description.directory_descr) service ->
'prefix directory
end
include (module type of Make (Json_repr.Ezjsonm))
module Internal : sig
type (_,_,_,_,_,_) conv =
| Z : (unit, 'g, 'g, unit, 'f, 'f) conv
| S : ('t, 'g, 'b * 's, 'rt, 'f, 'r) conv ->
('t * 'b, 'g, 's, 'a * 'rt, 'a -> 'f, 'r) conv
val curry : ('a, 'b, unit, 'b, 'c, 'd) conv -> 'c -> 'a -> 'd
end
|
c394dec2c474455ebcb7b610b3f99a41b1ff3471013ab864a22ec34041895f0d | eltex-ecss/pt_scripts | pt_record_properties.erl | %%%----------------------------------------------------------------------------
@author platinumthinker < >
%%% @doc
%%% Add several function in module:
all_records returned list all name records defined for this module .
%%% properties returned list strings property composed from record name and
%%% property name.
%%%
%%% Supports custom attributes:
%%% -include_records([record_a, record_b]).
%%% -exclude_records([record_c]).
%%%
%%% List records = (All records or Include records (if defined)) - Exclude records.
%%% @end
%%%----------------------------------------------------------------------------
-module(pt_record_properties).
-include_lib("pt_lib/include/pt_error_macro.hrl").
-include_lib("pt_lib/include/pt_lib.hrl").
-include_lib("pt_scripts/include/pt_recompilable.hrl").
-export([
parse_transform/2,
format_error/1
]).
-spec parse_transform(AST :: list(), Options :: list()) -> list().
parse_transform(AST, _Options) ->
Include = pt_lib:get_attribute_value(include_records, AST),
Exclude = pt_lib:get_attribute_value(exclude_records, AST),
Records = [ Rec || Rec = {R, _} <- pt_lib:get_attribute_value(record, AST),
(Include == [] orelse lists:member(R, Include))
andalso not lists:member(R, Exclude)],
Types = [ {Name, Type} || {{record, Name}, Type, _} <-
pt_lib:get_attribute_value(type, AST) ],
RecordsName = lists:sort(lists:foldl(
fun({RecName, _}, Acc) -> [RecName | Acc];
(_, Acc) -> Acc
end, [], Records)),
AST1 = pt_lib:add_function(AST, ast("all_records() -> @RecordsName.", 0)),
Function = lists:foldl(
fun({RecName, RecInfo}, AccFunctions) ->
RecType = proplists:get_value(RecName, Types, []),
RecFields = parse_record(RecType, RecInfo, Exclude),
Clause = io_lib:fwrite("properties(~p) -> ~w", [RecName, RecFields]),
[ lists:flatten(Clause) | AccFunctions];
(_, Acc) -> Acc
end, [], Records),
[ASTFuncProperties] = pt_lib:str2ast(string:join(Function, ";") ++
";properties(_) -> {error, no_found}.", 0),
pt_lib:add_function(AST1, ASTFuncProperties).
-spec format_error(Error :: term()) -> string().
format_error(Error) ->
io:format("Error: ~p~n", [Error]).
parse_record(RecType, RecInfo, Exclude) ->
lists:reverse(parse_record(RecType, RecInfo, [], Exclude)).
parse_record(RecType, [{record_field, _, {atom, _, Name}} | Tile],
Acc, Exclude) ->
Type = get_type(Name, RecType, Exclude),
parse_record(RecType, Tile, [{Name, Type} | Acc], Exclude);
parse_record(RecType, [{record_field, _, {atom, _, Name}, _} | Tile],
Acc, Exclude) ->
Type = get_type(Name, RecType, Exclude),
parse_record(RecType, Tile, [{Name, Type} | Acc], Exclude);
parse_record(RecType, [_ | Tile], Acc, Exclude) ->
parse_record(RecType, Tile, Acc, Exclude);
parse_record(_, [], Acc, _Exclude) -> Acc.
get_type(Name, [{record_field, _, {atom, _, Name}, _} | _], _) ->
undefined;
get_type(Name, [{record_field, _, {atom, _, Name}} | _], _) ->
undefined;
get_type(Name, [{typed_record_field, {record_field, _, {atom, _, Name}, _},
TypeInfo} | _], Exclude) ->
get_type(TypeInfo, Exclude);
get_type(Name, [{typed_record_field, {record_field, _, {atom, _, Name}},
TypeInfo} | _], Exclude) ->
get_type(TypeInfo, Exclude);
get_type(Name, [_ | Tail], Exclude) ->
get_type(Name, Tail, Exclude);
get_type(_, [], _) -> undefined.
get_type({type, _, record, [{atom, _, Name}]}, Exclude) ->
case lists:member(Name, Exclude) of
false ->
{record, Name};
true ->
undefined
end;
get_type({type, _, Type, []}, _Exclude) ->
Type;
get_type({type, _, list, Info}, Exclude) ->
lists:map(
fun(TypeInf) ->
get_type(TypeInf, Exclude)
end, Info);
get_type({type, _, union, Info}, Exclude) ->
Res = lists:filter(fun filter_undefined/1,
lists:map(
fun(TypeInf) ->
get_type(TypeInf, Exclude)
end, Info)),
case lists:usort(Res) of
[SingleType] -> SingleType;
[] -> undefined;
Other -> {union, Other}
end;
get_type(_, _) ->
undefined.
filter_undefined(undefined) -> false;
filter_undefined(_) -> true.
| null | https://raw.githubusercontent.com/eltex-ecss/pt_scripts/c05a0c67608f67a5e0bd23324ba0386f27186342/src/pt_record_properties.erl | erlang | ----------------------------------------------------------------------------
@doc
Add several function in module:
properties returned list strings property composed from record name and
property name.
Supports custom attributes:
-include_records([record_a, record_b]).
-exclude_records([record_c]).
List records = (All records or Include records (if defined)) - Exclude records.
@end
---------------------------------------------------------------------------- | @author platinumthinker < >
all_records returned list all name records defined for this module .
-module(pt_record_properties).
-include_lib("pt_lib/include/pt_error_macro.hrl").
-include_lib("pt_lib/include/pt_lib.hrl").
-include_lib("pt_scripts/include/pt_recompilable.hrl").
-export([
parse_transform/2,
format_error/1
]).
-spec parse_transform(AST :: list(), Options :: list()) -> list().
parse_transform(AST, _Options) ->
Include = pt_lib:get_attribute_value(include_records, AST),
Exclude = pt_lib:get_attribute_value(exclude_records, AST),
Records = [ Rec || Rec = {R, _} <- pt_lib:get_attribute_value(record, AST),
(Include == [] orelse lists:member(R, Include))
andalso not lists:member(R, Exclude)],
Types = [ {Name, Type} || {{record, Name}, Type, _} <-
pt_lib:get_attribute_value(type, AST) ],
RecordsName = lists:sort(lists:foldl(
fun({RecName, _}, Acc) -> [RecName | Acc];
(_, Acc) -> Acc
end, [], Records)),
AST1 = pt_lib:add_function(AST, ast("all_records() -> @RecordsName.", 0)),
Function = lists:foldl(
fun({RecName, RecInfo}, AccFunctions) ->
RecType = proplists:get_value(RecName, Types, []),
RecFields = parse_record(RecType, RecInfo, Exclude),
Clause = io_lib:fwrite("properties(~p) -> ~w", [RecName, RecFields]),
[ lists:flatten(Clause) | AccFunctions];
(_, Acc) -> Acc
end, [], Records),
[ASTFuncProperties] = pt_lib:str2ast(string:join(Function, ";") ++
";properties(_) -> {error, no_found}.", 0),
pt_lib:add_function(AST1, ASTFuncProperties).
-spec format_error(Error :: term()) -> string().
format_error(Error) ->
io:format("Error: ~p~n", [Error]).
parse_record(RecType, RecInfo, Exclude) ->
lists:reverse(parse_record(RecType, RecInfo, [], Exclude)).
parse_record(RecType, [{record_field, _, {atom, _, Name}} | Tile],
Acc, Exclude) ->
Type = get_type(Name, RecType, Exclude),
parse_record(RecType, Tile, [{Name, Type} | Acc], Exclude);
parse_record(RecType, [{record_field, _, {atom, _, Name}, _} | Tile],
Acc, Exclude) ->
Type = get_type(Name, RecType, Exclude),
parse_record(RecType, Tile, [{Name, Type} | Acc], Exclude);
parse_record(RecType, [_ | Tile], Acc, Exclude) ->
parse_record(RecType, Tile, Acc, Exclude);
parse_record(_, [], Acc, _Exclude) -> Acc.
get_type(Name, [{record_field, _, {atom, _, Name}, _} | _], _) ->
undefined;
get_type(Name, [{record_field, _, {atom, _, Name}} | _], _) ->
undefined;
get_type(Name, [{typed_record_field, {record_field, _, {atom, _, Name}, _},
TypeInfo} | _], Exclude) ->
get_type(TypeInfo, Exclude);
get_type(Name, [{typed_record_field, {record_field, _, {atom, _, Name}},
TypeInfo} | _], Exclude) ->
get_type(TypeInfo, Exclude);
get_type(Name, [_ | Tail], Exclude) ->
get_type(Name, Tail, Exclude);
get_type(_, [], _) -> undefined.
get_type({type, _, record, [{atom, _, Name}]}, Exclude) ->
case lists:member(Name, Exclude) of
false ->
{record, Name};
true ->
undefined
end;
get_type({type, _, Type, []}, _Exclude) ->
Type;
get_type({type, _, list, Info}, Exclude) ->
lists:map(
fun(TypeInf) ->
get_type(TypeInf, Exclude)
end, Info);
get_type({type, _, union, Info}, Exclude) ->
Res = lists:filter(fun filter_undefined/1,
lists:map(
fun(TypeInf) ->
get_type(TypeInf, Exclude)
end, Info)),
case lists:usort(Res) of
[SingleType] -> SingleType;
[] -> undefined;
Other -> {union, Other}
end;
get_type(_, _) ->
undefined.
filter_undefined(undefined) -> false;
filter_undefined(_) -> true.
|
fbac839a2c94f27320a22c3128074883ce07889891fd7b61af32d35aee746786 | ghc/ghc | ExtraObj.hs | -----------------------------------------------------------------------------
--
GHC Extra object linking code
--
( c ) The GHC Team 2017
--
-----------------------------------------------------------------------------
module GHC.Linker.ExtraObj
( mkExtraObj
, mkExtraObjToLinkIntoBinary
, mkNoteObjsToLinkIntoBinary
, checkLinkInfo
, getLinkInfo
, getCompilerInfo
, ghcLinkInfoSectionName
, ghcLinkInfoNoteName
, platformSupportsSavingLinkOpts
, haveRtsOptsFlags
)
where
import GHC.Prelude
import GHC.Platform
import GHC.Unit
import GHC.Unit.Env
import GHC.Utils.Asm
import GHC.Utils.Error
import GHC.Utils.Misc
import GHC.Utils.Outputable as Outputable
import GHC.Utils.Logger
import GHC.Utils.TmpFs
import GHC.Driver.Session
import GHC.Driver.Ppr
import qualified GHC.Data.ShortText as ST
import GHC.SysTools.Elf
import GHC.SysTools.Tasks
import GHC.SysTools.Info
import GHC.Linker.Unit
import Control.Monad.IO.Class
import Control.Monad
import Data.Maybe
mkExtraObj :: Logger -> TmpFs -> DynFlags -> UnitState -> Suffix -> String -> IO FilePath
mkExtraObj logger tmpfs dflags unit_state extn xs
= do cFile <- newTempName logger tmpfs (tmpDir dflags) TFL_CurrentModule extn
oFile <- newTempName logger tmpfs (tmpDir dflags) TFL_GhcSession "o"
writeFile cFile xs
ccInfo <- liftIO $ getCompilerInfo logger dflags
runCc Nothing logger tmpfs dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ if extn /= "s"
then cOpts
else asmOpts ccInfo)
return oFile
where
Pass a different set of options to the C compiler depending one whether
-- we're compiling C or assembler. When compiling C, we pass the usual
-- set of include directories and PIC flags.
cOpts = map Option (picCCOpts dflags)
++ map (FileOption "-I" . ST.unpack)
(unitIncludeDirs $ unsafeLookupUnit unit_state rtsUnit)
-- When compiling assembler code, we drop the usual C options, and if the
-- compiler is Clang, we add an extra argument to tell Clang to ignore
unused command line options . See trac # 11684 .
asmOpts ccInfo =
if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [Option "-Qunused-arguments"]
else []
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
binary and pass the -rtsopts setting directly to the RTS ( # 5373 )
--
On Windows , when making a shared library we also may need a .
--
mkExtraObjToLinkIntoBinary :: Logger -> TmpFs -> DynFlags -> UnitState -> IO (Maybe FilePath)
mkExtraObjToLinkIntoBinary logger tmpfs dflags unit_state = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $
logInfo logger $ withPprStyle defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
case ghcLink dflags of
-- Don't try to build the extra object if it is not needed. Compiling the
-- extra object assumes the presence of the RTS in the unit database
( because the extra object imports Rts.h ) but GHC 's build system may try
-- to build some helper programs before building and registering the RTS!
-- See #18938 for an example where hp2ps failed to build because of a failed
-- (unsafe) lookup for the RTS in the unit db.
_ | gopt Opt_NoHsMain dflags
-> return Nothing
LinkDynLib
| OSMinGW32 <- platformOS (targetPlatform dflags)
-> mk_extra_obj dllMain
| otherwise
-> return Nothing
_ -> mk_extra_obj exeMain
where
mk_extra_obj = fmap Just . mkExtraObj logger tmpfs dflags unit_state "c" . showSDoc dflags
exeMain = vcat [
text "#include <Rts.h>",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> (if rtsOptsSuggestions dflags
then text "true"
else text "false") <> semi,
text "__conf.keep_cafs = "
<> (if gopt Opt_KeepCAFs dflags
then text "true"
else text "false") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> text " __conf.rts_opts= " <>
text (show opts) <> semi,
text " __conf.rts_hs_main = true;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
char '\n' -- final newline, to keep gcc happy
]
dllMain = vcat [
text "#include <Rts.h>",
text "#include <windows.h>",
text "#include <stdbool.h>",
char '\n',
text "bool",
text "WINAPI",
text "DllMain ( HINSTANCE hInstance STG_UNUSED",
text " , DWORD reason STG_UNUSED",
text " , LPVOID reserved STG_UNUSED",
text " )",
text "{",
text " return true;",
text "}",
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: Logger -> TmpFs -> DynFlags -> UnitEnv -> [UnitId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary logger tmpfs dflags unit_env dep_packages = do
link_info <- getLinkInfo dflags unit_env dep_packages
if (platformSupportsSavingLinkOpts (platformOS platform ))
then fmap (:[]) $ mkExtraObj logger tmpfs dflags unit_state "s" (showSDoc dflags (link_opts link_info))
else return []
where
unit_state = ue_units unit_env
platform = ue_platform unit_env
link_opts info = hcat
" link info " section ( see Note [ LinkInfo section ] )
makeElfNote platform ghcLinkInfoSectionName ghcLinkInfoNoteName 0 info
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
" GHC.CmmToAsm " for another instance
-- where we need to do this.
, if platformHasGnuNonexecStack platform
then text ".section .note.GNU-stack,\"\","
<> sectionType platform "progbits" <> char '\n'
else Outputable.empty
]
-- | Return the "link info" string
--
See Note [ LinkInfo section ]
getLinkInfo :: DynFlags -> UnitEnv -> [UnitId] -> IO String
getLinkInfo dflags unit_env dep_packages = do
package_link_opts <- getUnitLinkOpts (ghcNameVersion dflags) (ways dflags) unit_env dep_packages
pkg_frameworks <- if not (platformUsesFrameworks (ue_platform unit_env))
then return []
else do
ps <- mayThrowUnitErr (preloadUnitsInfo' unit_env dep_packages)
return (collectFrameworks ps)
let link_info =
( package_link_opts
, pkg_frameworks
, rtsOpts dflags
, rtsOptsEnabled dflags
, gopt Opt_NoHsMain dflags
, map showOpt (ldInputs dflags)
, getOpts dflags opt_l
)
return (show link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
see # 5382
| otherwise = osElfTarget os
See Note [ LinkInfo section ]
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
Identifier for the note ( see Note [ LinkInfo section ] )
ghcLinkInfoNoteName :: String
ghcLinkInfoNoteName = "GHC link info"
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: Logger -> DynFlags -> UnitEnv -> [UnitId] -> FilePath -> IO Bool
checkLinkInfo logger dflags unit_env pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (ue_platform unit_env)))
ToDo : Windows and OS X do not use the ELF binary format , so
readelf does not work there . We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags unit_env pkg_deps
debugTraceMsg logger 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfNoteAsString logger exe_file
ghcLinkInfoSectionName ghcLinkInfoNoteName
let sameLinkInfo = (Just link_info == m_exe_link_info)
debugTraceMsg logger 3 $ case m_exe_link_info of
Nothing -> text "Exe link info: Not found"
Just s
| sameLinkInfo -> text ("Exe link info is the same")
| otherwise -> text ("Exe link info is different: " ++ s)
return (not sameLinkInfo)
Note [ LinkInfo section ]
~~~~~~~~~~~~~~~~~~~~~~~
The " link info " is a string representing the parameters of the link . We save
this information in the binary , and the next time we link , if nothing else has
changed , we use the link info stored in the existing binary to decide whether
to re - link or not .
The " link info " string is stored in a ELF section called " .debug - ghc - link - info "
( see ghcLinkInfoSectionName ) with the SHT_NOTE type . For some time , it used to
not follow the specified record - based format ( see # 11022 ) .
~~~~~~~~~~~~~~~~~~~~~~~
The "link info" is a string representing the parameters of the link. We save
this information in the binary, and the next time we link, if nothing else has
changed, we use the link info stored in the existing binary to decide whether
to re-link or not.
The "link info" string is stored in a ELF section called ".debug-ghc-link-info"
(see ghcLinkInfoSectionName) with the SHT_NOTE type. For some time, it used to
not follow the specified record-based format (see #11022).
-}
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
| null | https://raw.githubusercontent.com/ghc/ghc/37cfe3c0f4fb16189bbe3bb735f758cd6e3d9157/compiler/GHC/Linker/ExtraObj.hs | haskell | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
we're compiling C or assembler. When compiling C, we pass the usual
set of include directories and PIC flags.
When compiling assembler code, we drop the usual C options, and if the
compiler is Clang, we add an extra argument to tell Clang to ignore
When linking a binary, we need to create a C main() function that
starts everything off. This used to be compiled statically as part
of the RTS, but that made it hard to change the -rtsopts setting,
so now we generate and compile a main() stub as part of every
Don't try to build the extra object if it is not needed. Compiling the
extra object assumes the presence of the RTS in the unit database
to build some helper programs before building and registering the RTS!
See #18938 for an example where hp2ps failed to build because of a failed
(unsafe) lookup for the RTS in the unit db.
final newline, to keep gcc happy
final newline, to keep gcc happy
Write out the link info section into a new assembly file. Previously
this was included as inline assembly in the main.c file but this
is pretty fragile. gas gets upset trying to calculate relative offsets
that span the .note section (notably .text) when debug info is present
ALL generated assembly must have this section to disable
executable stacks. See also
where we need to do this.
| Return the "link info" string
if we use the ".debug" prefix, then strip will strip it by default
Returns 'False' if it was, and we can avoid linking, because the
previous binary was linked with "the same options".
this.
conservatively we should return True, but not
linking in this case was the behaviour for a long
time so we leave it as-is. | GHC Extra object linking code
( c ) The GHC Team 2017
module GHC.Linker.ExtraObj
( mkExtraObj
, mkExtraObjToLinkIntoBinary
, mkNoteObjsToLinkIntoBinary
, checkLinkInfo
, getLinkInfo
, getCompilerInfo
, ghcLinkInfoSectionName
, ghcLinkInfoNoteName
, platformSupportsSavingLinkOpts
, haveRtsOptsFlags
)
where
import GHC.Prelude
import GHC.Platform
import GHC.Unit
import GHC.Unit.Env
import GHC.Utils.Asm
import GHC.Utils.Error
import GHC.Utils.Misc
import GHC.Utils.Outputable as Outputable
import GHC.Utils.Logger
import GHC.Utils.TmpFs
import GHC.Driver.Session
import GHC.Driver.Ppr
import qualified GHC.Data.ShortText as ST
import GHC.SysTools.Elf
import GHC.SysTools.Tasks
import GHC.SysTools.Info
import GHC.Linker.Unit
import Control.Monad.IO.Class
import Control.Monad
import Data.Maybe
mkExtraObj :: Logger -> TmpFs -> DynFlags -> UnitState -> Suffix -> String -> IO FilePath
mkExtraObj logger tmpfs dflags unit_state extn xs
= do cFile <- newTempName logger tmpfs (tmpDir dflags) TFL_CurrentModule extn
oFile <- newTempName logger tmpfs (tmpDir dflags) TFL_GhcSession "o"
writeFile cFile xs
ccInfo <- liftIO $ getCompilerInfo logger dflags
runCc Nothing logger tmpfs dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ if extn /= "s"
then cOpts
else asmOpts ccInfo)
return oFile
where
Pass a different set of options to the C compiler depending one whether
cOpts = map Option (picCCOpts dflags)
++ map (FileOption "-I" . ST.unpack)
(unitIncludeDirs $ unsafeLookupUnit unit_state rtsUnit)
unused command line options . See trac # 11684 .
asmOpts ccInfo =
if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [Option "-Qunused-arguments"]
else []
binary and pass the -rtsopts setting directly to the RTS ( # 5373 )
On Windows , when making a shared library we also may need a .
mkExtraObjToLinkIntoBinary :: Logger -> TmpFs -> DynFlags -> UnitState -> IO (Maybe FilePath)
mkExtraObjToLinkIntoBinary logger tmpfs dflags unit_state = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $
logInfo logger $ withPprStyle defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
case ghcLink dflags of
( because the extra object imports Rts.h ) but GHC 's build system may try
_ | gopt Opt_NoHsMain dflags
-> return Nothing
LinkDynLib
| OSMinGW32 <- platformOS (targetPlatform dflags)
-> mk_extra_obj dllMain
| otherwise
-> return Nothing
_ -> mk_extra_obj exeMain
where
mk_extra_obj = fmap Just . mkExtraObj logger tmpfs dflags unit_state "c" . showSDoc dflags
exeMain = vcat [
text "#include <Rts.h>",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> (if rtsOptsSuggestions dflags
then text "true"
else text "false") <> semi,
text "__conf.keep_cafs = "
<> (if gopt Opt_KeepCAFs dflags
then text "true"
else text "false") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> text " __conf.rts_opts= " <>
text (show opts) <> semi,
text " __conf.rts_hs_main = true;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
]
dllMain = vcat [
text "#include <Rts.h>",
text "#include <windows.h>",
text "#include <stdbool.h>",
char '\n',
text "bool",
text "WINAPI",
text "DllMain ( HINSTANCE hInstance STG_UNUSED",
text " , DWORD reason STG_UNUSED",
text " , LPVOID reserved STG_UNUSED",
text " )",
text "{",
text " return true;",
text "}",
]
mkNoteObjsToLinkIntoBinary :: Logger -> TmpFs -> DynFlags -> UnitEnv -> [UnitId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary logger tmpfs dflags unit_env dep_packages = do
link_info <- getLinkInfo dflags unit_env dep_packages
if (platformSupportsSavingLinkOpts (platformOS platform ))
then fmap (:[]) $ mkExtraObj logger tmpfs dflags unit_state "s" (showSDoc dflags (link_opts link_info))
else return []
where
unit_state = ue_units unit_env
platform = ue_platform unit_env
link_opts info = hcat
" link info " section ( see Note [ LinkInfo section ] )
makeElfNote platform ghcLinkInfoSectionName ghcLinkInfoNoteName 0 info
" GHC.CmmToAsm " for another instance
, if platformHasGnuNonexecStack platform
then text ".section .note.GNU-stack,\"\","
<> sectionType platform "progbits" <> char '\n'
else Outputable.empty
]
See Note [ LinkInfo section ]
getLinkInfo :: DynFlags -> UnitEnv -> [UnitId] -> IO String
getLinkInfo dflags unit_env dep_packages = do
package_link_opts <- getUnitLinkOpts (ghcNameVersion dflags) (ways dflags) unit_env dep_packages
pkg_frameworks <- if not (platformUsesFrameworks (ue_platform unit_env))
then return []
else do
ps <- mayThrowUnitErr (preloadUnitsInfo' unit_env dep_packages)
return (collectFrameworks ps)
let link_info =
( package_link_opts
, pkg_frameworks
, rtsOpts dflags
, rtsOptsEnabled dflags
, gopt Opt_NoHsMain dflags
, map showOpt (ldInputs dflags)
, getOpts dflags opt_l
)
return (show link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
see # 5382
| otherwise = osElfTarget os
See Note [ LinkInfo section ]
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
Identifier for the note ( see Note [ LinkInfo section ] )
ghcLinkInfoNoteName :: String
ghcLinkInfoNoteName = "GHC link info"
checkLinkInfo :: Logger -> DynFlags -> UnitEnv -> [UnitId] -> FilePath -> IO Bool
checkLinkInfo logger dflags unit_env pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (ue_platform unit_env)))
ToDo : Windows and OS X do not use the ELF binary format , so
readelf does not work there . We need to find another way to do
| otherwise
= do
link_info <- getLinkInfo dflags unit_env pkg_deps
debugTraceMsg logger 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfNoteAsString logger exe_file
ghcLinkInfoSectionName ghcLinkInfoNoteName
let sameLinkInfo = (Just link_info == m_exe_link_info)
debugTraceMsg logger 3 $ case m_exe_link_info of
Nothing -> text "Exe link info: Not found"
Just s
| sameLinkInfo -> text ("Exe link info is the same")
| otherwise -> text ("Exe link info is different: " ++ s)
return (not sameLinkInfo)
Note [ LinkInfo section ]
~~~~~~~~~~~~~~~~~~~~~~~
The " link info " is a string representing the parameters of the link . We save
this information in the binary , and the next time we link , if nothing else has
changed , we use the link info stored in the existing binary to decide whether
to re - link or not .
The " link info " string is stored in a ELF section called " .debug - ghc - link - info "
( see ghcLinkInfoSectionName ) with the SHT_NOTE type . For some time , it used to
not follow the specified record - based format ( see # 11022 ) .
~~~~~~~~~~~~~~~~~~~~~~~
The "link info" is a string representing the parameters of the link. We save
this information in the binary, and the next time we link, if nothing else has
changed, we use the link info stored in the existing binary to decide whether
to re-link or not.
The "link info" string is stored in a ELF section called ".debug-ghc-link-info"
(see ghcLinkInfoSectionName) with the SHT_NOTE type. For some time, it used to
not follow the specified record-based format (see #11022).
-}
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
|
faecde2ba768c27df7c1c56c6bbe847be70949945e1d5d9d6ca0a343c5412509 | bjorng/wings | wings_light.erl | %%
%% wings_light.erl --
%%
%% Implementation of lights.
%%
Copyright ( c ) 2002 - 2011
%%
%% See the file "license.terms" for information on usage and redistribution
%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
%%
%% $Id$
%%
-module(wings_light).
-export([init/0, init/1, init_opengl/0, load_env_image/1,
light_types/0,menu/3,command/2,is_any_light_selected/1,
any_enabled_lights/0,info/1,setup_light/2,
create/2,update_dynamic/2,update_matrix/2,update/1,
global_lights/1,
export/1,export_bc/1,export_camera_lights/0,
import/2,import/1,shape_materials/2,
light_pos/1]).
-define(NEED_OPENGL, 1).
-include("wings.hrl").
-include_lib("wings/e3d/e3d.hrl").
-include_lib("wings/e3d/e3d_image.hrl").
-import(lists, [reverse/1,foldl/3,foldr/3,member/2,keydelete/3,sort/1]).
-define(DEF_X, 0.0).
-define(DEF_Y, 3.0).
-define(DEF_Z, 0.0).
-define(DEF_POS, {?DEF_X,?DEF_Y,?DEF_Z}).
%% Light record in We.
%%
The type field must be the first field , since it is used by
%% the light testing macros in wings.hrl.
-record(light,
{type, %Type. (DO NOT MOVE.)
diffuse={1.0,1.0,1.0,1.0},
ambient={0.0,0.0,0.0,1.0},
specular={1.0,1.0,1.0,1.0},
aim, %Aim point for spot/infinite.
lin_att, %Linear attenuation.
quad_att, %Quadratic attenuation.
spot_angle,
spot_exp, %Spot exponent.
prop=[] %Extra properties.
}).
def_envmap() ->
DefEnvMap = "grandcanyon.png",
DefPath = filename:join(wings_util:lib_dir(wings), "textures"),
filename:join(DefPath, DefEnvMap).
init() ->
wings_pref:set_default(show_bg, false),
wings_pref:set_default(show_bg_blur, 0.5),
wings_pref:set_default(bg_image, def_envmap()),
EnvImgRec = load_env_file(wings_pref:get_value(bg_image)),
init(false, EnvImgRec).
init(Recompile) -> %% Debug
EnvImgRec = load_env_file(wings_pref:get_value(bg_image)),
init(Recompile, EnvImgRec).
init(Recompile, EnvImgRec) ->
AreaMatTagId = load_area_light_tab(),
EnvIds = case wings:is_fast_start() orelse cl_setup(Recompile) of
true ->
fake_envmap(load_env_file(def_envmap()));
{error, _} ->
ErrorStr = ?__(1, "Could not initialize OpenCL: env lighting limited ~n"),
io:format(ErrorStr,[]),
wings_status:message(geom, ErrorStr),
fake_envmap(load_env_file(def_envmap()));
CL ->
make_envmap(CL, EnvImgRec)
end,
[?SET(Tag, Id) || {Tag,Id} <- [AreaMatTagId|EnvIds]],
init_opengl(),
wings_develop:gl_error_check({?MODULE,?FUNCTION_NAME}),
ok.
-spec load_env_image(FileName::string()) -> ok | {file_error, {error, term()}} | {cl_error, {error, term()}}.
load_env_image(FileName) ->
try load_env_image_1(FileName)
catch throw:Error ->
Error
end.
load_env_image_1(FileName) ->
EnvImgRec = wings_image:image_read([{filename, FileName}]),
is_record(EnvImgRec, e3d_image) orelse throw({file_error, EnvImgRec}),
CL = case cl_setup(false) of
{error, _} = Error ->
throw({cl_error, Error});
CL0 -> CL0
end,
EnvIds = make_envmap(CL, EnvImgRec),
[?SET(Tag, Id) || {Tag,Id} <- EnvIds],
init_opengl(),
wings_develop:gl_error_check({?MODULE,?FUNCTION_NAME}),
ok.
init_opengl() ->
%% Bind textures to units
Ids = [{areamatrix_tex, ?AREA_LTC_MAT_UNIT},
{brdf_tex, ?ENV_BRDF_MAP_UNIT},
{env_diffuse_tex, ?ENV_DIFF_MAP_UNIT},
{env_spec_tex, ?ENV_SPEC_MAP_UNIT}],
SetupUnit = fun({Tag, Unit}) ->
case ?GET(Tag) of
undefined -> ignore;
ImId ->
TxId = wings_image:txid(ImId),
gl:activeTexture(?GL_TEXTURE0 + Unit),
is_integer(TxId) andalso gl:bindTexture(?GL_TEXTURE_2D, TxId),
gl:activeTexture(?GL_TEXTURE0)
end
end,
_ = [SetupUnit(Id) || Id <- Ids],
ok.
load_env_file(FileName) ->
case wings_image:image_read([{filename, FileName}]) of
#e3d_image{} = Img ->
Img;
_Error ->
?dbg("Could not load env image: ~p~n", [FileName]),
wings_image:image_read([{filename, def_envmap()}])
end.
command({move_light,Type}, St) ->
wings_move:setup(Type, St);
command(color, St) ->
color(St);
command({position_highlight,Data}, St) ->
position_highlight(Data, St);
command({attenuation,Type}, St) ->
attenuation(Type, St);
command(spot_angle, St) ->
spot_angle(St);
command(spot_falloff, St) ->
spot_falloff(St);
command(edit, St) ->
edit(St);
command({edit,Id}, St) ->
edit(Id, St);
command(delete, St) ->
{save_state,delete(St)};
command({duplicate,Dir}, St) ->
duplicate(Dir, St).
-spec is_any_light_selected(#st{}) -> boolean().
is_any_light_selected(St) ->
MF = fun(_, We) -> ?IS_LIGHT(We) end,
RF = fun erlang:'or'/2,
wings_sel:dfold(MF, RF, false, St).
any_enabled_lights() ->
wings_dl:fold(fun(#dlo{src_we=We}, Bool) ->
Bool orelse ?IS_ANY_LIGHT(We)
end, false).
-spec info(#we{}) -> iolist().
info(#we{name=Name,light=#light{type=Type}=L}=We) ->
Info0 = io_lib:format(?__(1,"Light ~ts"), [Name]),
case Type of
ambient -> Info0;
_ ->
Pos = light_pos(We),
Info = [Info0|io_lib:format(?__(2,": Pos ~s"),
[wings_util:nice_vector(Pos)])],
[Info|info_1(Type, Pos, L)]
end.
info_1(point, _, _) -> [];
info_1(Type, Pos, #light{aim=Aim,spot_angle=A}) ->
Dir = e3d_vec:norm_sub(Aim, Pos),
Info = io_lib:format(?__(1,". Aim ~s. Dir ~s"),
[wings_util:nice_vector(Aim),
wings_util:nice_vector(Dir)]),
[Info|case Type of
spot -> io_lib:format(?__(2,". Angle ~s~c"),
[wings_util:nice_float(A),?DEGREE]);
_ -> []
end].
%%%
%%% Light Commands.
%%%
color(St0) ->
{St,Flags} =
wings_sel:mapfold(
fun(_, #we{light=L}=We, []) when ?IS_LIGHT(We) ->
{R,G,B,A} = get_light_color(L),
{H,S,V} = wings_color:rgb_to_hsv(R, G, B),
ColorFun = fun({finish,C}, D) -> color(C, D, A);
(C, D) -> color(C, D, A)
end,
Flags = [{initial,[H,V,S]}],
{We#we{temp=ColorFun},Flags};
(_, We, _) when ?IS_LIGHT(We) ->
wings_u:error_msg(?__(1,"Select only one light."));
(_, _, A) -> A
end, [], St0),
Units = [{angle,{0.0,359.9999}},
{percent,{0.0,1.0}},
{percent,{0.0,1.0}}],
DF = fun(#we{temp=General}) -> General end,
wings_drag:general(DF, Units, Flags, St).
color([H,V,S], #dlo{src_we=#we{light=L0}=We0}=D, A) ->
{R,G,B} = wings_color:hsv_to_rgb(H, S, V),
Col = {R,G,B,A},
L = update_color(L0, Col),
We = We0#we{light=L},
update(D#dlo{work=none,src_we=We}).
get_light_color(#light{type=ambient,ambient=Col}) -> Col;
get_light_color(#light{diffuse=Diff}) -> Diff.
update_color(#light{type=ambient}=L, Col) -> L#light{ambient=Col};
update_color(L, Col) -> L#light{diffuse=Col}.
position_highlight({'ASK',Ask}, St) ->
wings:ask(Ask, St, fun position_highlight/2);
position_highlight(Center, St) ->
{save_state,
wings_sel:map(fun(_, We) when ?IS_LIGHT(We) ->
position_highlight_1(Center, We);
(_, We) -> We
end, St)}.
position_highlight_1(Center, #we{light=L0}=We) ->
case L0 of
#light{type=point} ->
move_light(Center, We);
_ ->
L = L0#light{aim=Center},
We#we{light=L}
end.
spot_angle(St) ->
case selected_light(St) of
#light{type=spot,spot_angle=SpotAngle} ->
SpotFun0 = fun([Angle|_], L) -> L#light{spot_angle=Angle} end,
DF = fun(_) -> adjust_fun(SpotFun0) end,
Units = [{angle,{0.1,89.9}}],
Flags = [{initial,[SpotAngle]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a spotlight."))
end.
spot_falloff(St) ->
case selected_light(St) of
#light{type=spot,spot_exp=SpotExp} ->
SpotFun0 = fun([Exp|_], L) -> L#light{spot_exp=Exp} end,
DF = fun(_) -> adjust_fun(SpotFun0) end,
Units = [{number,{0.0,128.0}}],
Flags = [{initial,[SpotExp]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a spotlight."))
end.
attenuation(Type, St) ->
case selected_light(St) of
#light{type=Ltype}=L when Ltype =:= point; Ltype =:= spot ->
Initial = att_initial(Type, L),
DF = fun(_) -> adjust_fun(att_fun(Type)) end,
Units = [{dx,att_range(Type)}],
Flags = [{initial,[Initial]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a point light or spotlight."))
end.
att_initial(linear, #light{lin_att=LinAtt}) -> LinAtt;
att_initial(quadratic, #light{quad_att=QuadAtt}) -> QuadAtt.
att_fun(linear) -> fun([V|_], L) -> L#light{lin_att=V} end;
att_fun(quadratic) -> fun([V|_], L) -> L#light{quad_att=V} end.
att_range(linear) -> {0.0,1.0};
att_range(quadratic) -> {0.0,0.5}.
selected_light(St) ->
MF = fun(_, #we{light=L}=We) when ?IS_LIGHT(We) ->
[L];
(_, #we{}) ->
[]
end,
RF = fun erlang:'++'/2,
case wings_sel:dfold(MF, RF, [], St) of
[Selected] ->
Selected;
[_|_] ->
wings_u:error_msg(?__(1,"Select only one light."))
end.
adjust_fun(AdjFun) ->
fun({finish,Ds}, D) -> adjust_fun_1(AdjFun, Ds, D);
(Ds, D) -> adjust_fun_1(AdjFun, Ds, D)
end.
adjust_fun_1(AdjFun, Ds, #dlo{src_we=#we{light=L0}=We0}=D) ->
L = AdjFun(Ds, L0),
We = We0#we{light=L},
update(D#dlo{work=none,src_we=We}).
%%
The Edit Properties command .
%%
edit(St) ->
case wings_sel:selected_ids(St) of
[Id] ->
edit(Id, St);
[_|_] ->
wings_u:error_msg(?__(1,"Select only one light."))
end.
edit(Id, St) ->
Obj = wings_obj:get(Id, St),
case Obj of
#{light:=#light{type=ambient}} ->
{_, Prop} = get_light(Obj, false, St),
{dialog,Qs,Fun} = edit_ambient_dialog(Obj, Prop, St),
wings_dialog:dialog(?__(2,"Ambient Light Properties"), Qs, Fun);
#{light:=#light{}} ->
{_, Prop} = get_light(Obj, false, St),
{dialog,Qs,Fun} = edit_dialog(Obj, Prop, St),
wings_dialog:dialog(?__(3,"Light Properties"), Qs, Fun);
_ ->
wings_u:error_msg(?__(4,"Select one area light."))
end.
edit_ambient_dialog(Obj, Prop0, St) ->
#{name:=Name,light:=L0} = Obj,
#light{ambient=Amb0} = L0,
Qs0 = {vframe,
[{hframe,
[{label_column,
[{?__(1,"Ambient"),{color,Amb0}}]}],
[{title,?__(2,"Color")}]}|qs_specific(L0)]},
Qs1 = wings_plugin:dialog({light_editor_setup,Name,Prop0}, [{"Wings 3D", Qs0}]),
Qs = {vframe_dialog,
[{oframe, Qs1, 1, [{style, buttons}]}],
[{buttons, [ok, cancel]}, {key, result}]},
Fun = fun([Amb|Res]) ->
{ok,Prop} = plugin_results(Name, Prop0, Res),
L = L0#light{ambient=Amb,prop=Prop},
wings_obj:put(Obj#{light:=L}, St)
end,
{dialog,Qs,Fun}.
edit_dialog(Obj, Prop0, St) ->
#{name:=Name,light:=L0} = Obj,
#light{diffuse=Diff0,specular=Spec0} = L0,
Qs0 = {vframe,
[{hframe,
[{label_column,
[{?__(1,"Diffuse"),{color,Diff0}},
{?__(3,"Specular"),{color,Spec0}}]}],
[{title,?__(4,"Colors")}]}|qs_specific(L0)]},
Qs1 = wings_plugin:dialog({light_editor_setup,Name,Prop0}, [{"Wings 3D", Qs0}]),
Qs = {vframe_dialog,
[{oframe, Qs1, 1, [{style, buttons}]}],
[{buttons, [ok, cancel]}, {key, result}]},
Fun = fun([Diff,Spec|More0]) ->
L1 = L0#light{diffuse=Diff,specular=Spec},
{L2,More} = edit_specific(More0, L1),
case plugin_results(Name, Prop0, More) of
{ok,Prop} ->
L = L2#light{prop=Prop},
wings_obj:put(Obj#{light:=L}, St)
end
end,
{dialog,Qs,Fun}.
plugin_results(Name, Prop0, Res0) ->
case wings_plugin:dialog_result({light_editor_result,Name,Prop0}, Res0) of
{Prop,[{result, ok}]} ->
{ok,keydelete(opengl, 1, Prop)};
{_,Res} ->
io:format(?__(1,
"Light editor plugin(s) left garbage:~n ~P~n"),
[Res,20]),
wings_u:error_msg(?__(2,"Plugin(s) left garbage"))
end.
qs_specific(#light{type=spot,spot_angle=Angle,spot_exp=SpotExp}=L) ->
Spot = [{vframe,
[{label_column,
[{?__(1, "Angle"), {slider, {text, Angle, [{range, {0.0, 89.9}}]}}},
{?__(2, "Falloff"), {slider, {text, SpotExp, [{range, {0.0, 128.0}}]}}}]
}],
[{title,?__(3,"Spot Parameters")}]}],
qs_att(L, Spot);
qs_specific(#light{type=point}=L) -> qs_att(L, []);
qs_specific(#light{type=area}=L) -> qs_att(L, []);
qs_specific(_) -> [].
qs_att(#light{lin_att=Lin,quad_att=Quad}, Tail) ->
[{vframe,
[{label_column,
[{?__(1,"Linear"),{slider,{text,Lin,[{range,{0.0,1.0}}]}}},
{?__(2,"Quadratic"),{slider,{text,Quad,[{range,{0.0,0.5}}]}}}]
}],
[{title,?__(3,"Attenuation")}]}|Tail].
edit_specific([LinAtt,QuadAtt,Angle,SpotExp|More], #light{type=spot}=L) ->
{L#light{spot_angle=Angle,spot_exp=SpotExp,lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific([LinAtt,QuadAtt|More], #light{type=point}=L) ->
{L#light{lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific([LinAtt,QuadAtt|More], #light{type=area}=L) ->
{L#light{lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific(More, L) -> {L,More}.
%%%
%%% The Delete command.
%%%
delete(St) ->
wings_sel:map_update_sel(
fun(_, _) ->
{#we{},gb_sets:empty()}
end, St).
%%%
The Duplicate command .
%%%
duplicate(Dir, St0) ->
CF = fun(Items, We) ->
Empty = gb_sets:empty(),
New = [{We,Items,copy}],
{We,Empty,New}
end,
St = wings_sel:clone(CF, St0),
case Dir of
none -> St;
_ -> wings_move:setup(Dir, St)
end.
%%%
%%% Creating lights.
%%%
create(Type, #st{onext=Oid}=St) ->
Prefix = atom_to_list(Type),
Name = Prefix++integer_to_list(Oid),
import([{Name,[{opengl,[{type,Type}]}]}], St).
%%%
%%% Updating, drawing and rendering lights.
%%%
update_dynamic(#dlo{src_we=We0}=D, Vtab0) ->
Vtab = array:from_orddict(sort(Vtab0)),
We = We0#we{vp=Vtab},
update_1(We, D#dlo{src_we=We}).
update_matrix(#dlo{src_we=We0}=D, Matrix) ->
We = wings_we:transform_vs(Matrix, We0),
update_1(We, D#dlo{transparent=We}).
update(#dlo{work=W,src_sel=Sel,src_we=#we{light=#light{}}=We}=D) ->
IsSel = Sel =/= none,
HaveW = W =/= none andalso not is_list(W),
HaveS = is_list(W),
if
W =:= none -> update_1(We, D);
IsSel andalso HaveS -> D;
(not IsSel) andalso HaveW -> D;
true -> update_1(We, D)
end;
update(D) -> D.
update_1(#we{light=#light{type=Type}}=We, #dlo{src_sel=Sel}=D) ->
IsSel = Sel =/= none,
SelColor = case IsSel of
false -> {0.0,0.0,1.0,1.0};
true -> {R,G,B} = wings_pref:get_value(selected_color),
{R,G,B,1.0}
end,
Draw = update_fun(Type, SelColor, We),
case IsSel of
true ->
%% Use a list of ops to indicate selected color
D#dlo{work=[Draw], sel=Draw};
false ->
D#dlo{work=Draw, sel=none}
end.
update_fun(infinite, SelColor, #we{light=#light{aim=Aim}}=We) ->
LightPos = light_pos(We),
LightCol = get_light_col(We),
Vec = e3d_vec:norm_sub(Aim, LightPos),
Data = [e3d_vec:mul(Vec, 0.2),e3d_vec:mul(Vec, 0.6)],
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
D = fun(RS) ->
gl:lineWidth(1.5),
gl:pushMatrix(),
{X,Y,Z} = LightPos,
gl:translatef(X, Y, Z),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:drawArrays(?GL_TRIANGLES, 2, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
gl:drawArrays(?GL_LINES, 0, 2),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Data++Tris);
update_fun(point, SelColor, We) ->
LightPos = light_pos(We),
LightCol = get_light_col(We),
Data0 = [{1.0,0.0,0.0},
{0.0,1.0,0.0},
{0.0,0.0,1.0},
{0.71,0.71,0.0},
{0.71,0.0,0.71},
{0.0,0.71,0.71}],
N = length(Data0) * 4,
Data = lines(Data0),
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
D = fun(RS) ->
gl:lineWidth(1.0),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:pushMatrix(),
{X,Y,Z} = LightPos,
gl:translatef(X, Y, Z),
gl:drawArrays(?GL_TRIANGLES, N, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
gl:drawArrays(?GL_LINES, 0, N),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Data++Tris);
update_fun(spot, SelColor, #we{light=#light{aim=Aim,spot_angle=Angle}}=We) ->
Top = light_pos(We),
LightCol = get_light_col(We),
SpotDir0 = e3d_vec:norm_sub(Aim, Top),
SpotDir = case e3d_vec:is_zero(SpotDir0) of
false -> SpotDir0;
true -> {0.0,1.0,0.0}
end,
Rad = Angle*math:pi()/180,
R = math:sin(Rad),
H = math:cos(Rad),
Translate = e3d_vec:mul(SpotDir, H),
Rot = e3d_mat:rotate_s_to_t({0.0,0.0,1.0}, e3d_vec:neg(SpotDir)),
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
CylLines = cylinder_lines(R, 0.08, H, 3),
N = length(CylLines),
D = fun(RS) ->
gl:lineWidth(1.0),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:pushMatrix(),
{Tx,Ty,Tz} = Top,
gl:translatef(Tx, Ty, Tz),
gl:drawArrays(?GL_TRIANGLES, 0, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
{Dx,Dy,Dz} = Translate,
gl:translatef(Dx, Dy, Dz),
gl:multMatrixd(Rot),
gl:drawArrays(?GL_LINES, Len*3, N),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Tris ++ CylLines);
update_fun(ambient, _, _) ->
fun(RS) -> RS end.
lines([Vec|Vecs]) ->
[e3d_vec:mul(Vec, 0.2),
e3d_vec:mul(Vec, 0.6),
e3d_vec:mul(Vec, -0.2),
e3d_vec:mul(Vec, -0.6)|lines(Vecs)];
lines([]) -> [].
cylinder_lines(BaseR, TopR, H, Levels) ->
Quad = [{0.0,1.0,0.0},{-1.0,0.0,0.0},{0.0,-1.0,0.0},{1.0,0.0,0.0}],
Subd = subd_cyl(Quad, Levels),
Orig = mk_lines(Subd, hd(Subd)),
Base = [e3d_vec:mul(V, BaseR) || V <- Orig],
Top = [e3d_vec:add_prod({0.0, 0.0, H}, V, TopR) || V <- Orig],
Connect = lists:foldl(fun({A,B}, Acc) -> [A,B|Acc] end, [], lists:zip(Base,Top)),
Base ++ Top ++ Connect.
subd_cyl(List, Level) when Level > 1 ->
New = subd_cyl(List, hd(List), []),
subd_cyl(New, Level-1);
subd_cyl(List, _) ->
List.
subd_cyl([V1|[V2|_]=Rest], First, Acc) ->
M = e3d_vec:norm(e3d_vec:average(V1, V2)),
subd_cyl(Rest, First, [M, V1|Acc]);
subd_cyl([V1], V2, Acc) ->
M = e3d_vec:norm(e3d_vec:average(V1, V2)),
[M, V1|Acc].
mk_lines([V1|[V2|_]=Rest],First) ->
[V1,V2|mk_lines(Rest,First)];
mk_lines([V1], V2) ->
[V1,V2].
get_light_col(#we{light=#light{diffuse=Diff}}) ->
Diff.
%%%
%%% Exporting lights.
%%%
%% For exporters.
export(St) ->
export(St, false).
%% For saving in .wings files.
export_bc(St) ->
export(St, true).
export(St, BackwardsCompatible) ->
F = fun(#{light:=_}=Obj, A) ->
[get_light(Obj, BackwardsCompatible, St)|A];
(_, A) ->
A
end,
L = wings_obj:fold(F, [], St),
reverse(L).
export_camera_lights() ->
Amb = {?__(1,"Ambient"), camera_ambient()},
Ls = case wings_pref:get_value(number_of_lights) of
1 ->
[{?__(2,"Infinite"),camera_infinite_1_0()}];
2 ->
[{?__(3,"Infinite1"),camera_infinite_2_0()},
{?__(4,"Infinite2"),camera_infinite_2_1()}]
end,
#view{origin=Aim} = wings_view:current(),
CameraPos = wings_view:eye_point(),
GL = fun({Name,Li = #light{aim=Diff}}) ->
LPos = e3d_vec:add(CameraPos,Diff),
We = #we{name = Name,
vp = array:from_orddict([{1, LPos}]),
light = Li#light{aim=Aim}},
get_light(We, false)
end,
[GL(Light) || Light <- [Amb|Ls]].
get_light(#{id:=Id,name:=Name,perm:=P,light:=Light}, BC, St) ->
F = fun(We) -> get_light_1(Light, We, BC) end,
Ps0 = wings_obj:with_we(F, Id, St),
Ps = export_perm(P, Ps0),
{Name,Ps}.
get_light(#we{name=Name,perm=P,light=Light}=We, BC) ->
Ps0 = get_light_1(Light, We, BC),
Ps = export_perm(P, Ps0),
{Name,Ps}.
get_light_1(#light{type=ambient,ambient=Amb,prop=Prop}, #we{pst=Pst}=We, _) ->
P = light_pos(We),
OpenGL = [{type,ambient},{ambient,Amb},{position,P},{pst,Pst}],
[{opengl,OpenGL}|Prop];
get_light_1(L, #we{pst=Pst}=We, BC) ->
#light{type=Type,diffuse=Diff,ambient=Amb,specular=Spec,
aim=Aim,spot_angle=Angle,spot_exp=SpotExp,
lin_att=LinAtt,quad_att=QuadAtt,prop=Prop} = L,
P = light_pos(We),
Common = [{type,Type},{position,P},{aim_point,Aim},
{diffuse,Diff},{ambient,Amb},{specular,Spec},{pst,Pst}],
OpenGL0 = case Type of
spot ->
[{cone_angle,Angle},{spot_exponent,SpotExp}|Common];
_ ->
Common
end,
OpenGL1 = if
Type =:= point; Type =:= spot; Type =:= area ->
[{linear_attenuation,LinAtt},
{quadratic_attenuation,QuadAtt}|OpenGL0];
true -> OpenGL0
end,
OpenGL = case Type of
area -> [{mesh,export_mesh(We, BC)}|OpenGL1];
_ -> OpenGL1
end,
[{opengl,OpenGL}|Prop].
export_perm({_,_}, Ps) ->
[{visible,false},{locked,false}|Ps];
export_perm(P, Ps) when is_integer(P) ->
[{visible,P < 2},{locked,(P band 1) =/= 0}|Ps].
%% This is the classic definition of e3d_face{}, as it was defined
%% before 1.1.9.
-record(classic_e3d_face,
{vs=[], %List of vertex indices.
Vertex color indices .
tx=[], %List of texture indices.
ns=[], %List of normal indices.
mat=[], %Materials for face.
vis=-1}). %Visible edges (as in 3DS).
export_mesh(We, BC) ->
#e3d_mesh{fs=Fs0} = Mesh = wings_export:make_mesh(We, []),
Fs = case BC of
false ->
Fs0;
true ->
[export_fix_face(F) || F <- Fs0]
end,
Mesh#e3d_mesh{fs=Fs}.
export_fix_face(#e3d_face{vs=Vs,mat=Mat}) ->
%% Fix the face record so that it looks like the classic
%% definition of #e3d_face{} (before 1.1.9).
FaceRec = #classic_e3d_face{vs=Vs,mat=Mat},
%% Patch the record type.
setelement(1, FaceRec, e3d_face).
%%%
%%% Importing lights.
%%%
import(Lights, St) ->
foldl(fun import_fun/2, St, Lights).
import_fun({Name,Ps}, St) ->
wings_obj:new(Name, import(Ps), St).
import(Ps) ->
Visible = proplists:get_value(visible, Ps, []),
Locked = proplists:get_value(locked, Ps, []),
Prop1 = proplists:delete(visible, Ps),
Prop0 = proplists:delete(locked, Prop1),
OpenGL = proplists:get_value(opengl, Prop0, []),
Type = proplists:get_value(type, OpenGL, point),
Pos = proplists:get_value(position, OpenGL, ?DEF_POS),
Diff = proplists:get_value(diffuse, OpenGL, {1.0,1.0,1.0,1.0}),
Amb = import_ambient(Type, OpenGL),
Spec = proplists:get_value(specular, OpenGL, {1.0,1.0,1.0,1.0}),
Aim = proplists:get_value(aim_point, OpenGL, {0.0,0.0,0.0}),
LinAtt = proplists:get_value(linear_attenuation, OpenGL, 0.0),
QuadAtt = proplists:get_value(quadratic_attenuation, OpenGL, 0.0),
Angle = proplists:get_value(cone_angle, OpenGL, 30.0),
SpotExp = proplists:get_value(spot_exponent, OpenGL, 0.0),
Prop = proplists:delete(opengl, Prop0),
Light = #light{type=Type,diffuse=Diff,ambient=Amb,specular=Spec,
aim=Aim,lin_att=LinAtt,quad_att=QuadAtt,
spot_angle=Angle,spot_exp=SpotExp,prop=Prop},
We=import_we(Light, OpenGL, Pos),
We#we{perm=import_perm(Visible, Locked)}.
import_ambient(ambient, OpenGL) ->
proplists:get_value(ambient, OpenGL, {0.1,0.1,0.1,1.0});
import_ambient(_, OpenGL) ->
proplists:get_value(ambient, OpenGL, {0.0,0.0,0.0,1.0}).
import_we(#light{type=area}=Light, OpenGL, {X,Y,Z}) ->
Mesh =
case proplists:lookup(mesh, OpenGL) of
none ->
#e3d_mesh{type=polygon,
fs=[#e3d_face{vs=[0,1,2,3],
mat=[default]}],
vs=[{X+1.0,Y,Z+1.0},{X-1.0,Y,Z+1.0},
{X-1.0,Y,Z-1.0},{X+1.0,Y,Z-1.0}]};
{mesh,M} -> import_fix_mesh(M)
end,
We = wings_import:import_mesh(material, Mesh),
Pst = proplists:get_value(pst, OpenGL, gb_trees:empty()),
We#we{light=Light,pst=Pst};
import_we(#light{}=Light, OpenGL, {X,Y,Z}) ->
%% We used to put all vertices at the same position, but with
%% then rewritten pick handling we need a vertex array for picking.
%% The cube will be slightly larger than the sphere that is shown
%% for the light. The position of the light will be the centroid
%% of the cube.
Fs = [[0,3,2,1],[2,3,7,6],[0,4,7,3],[1,2,6,5],[4,5,6,7],[0,1,5,4]],
S = 0.07,
Vs = [{X-S,Y-S,Z+S},{X-S,Y+S,Z+S},{X+S,Y+S,Z+S},{X+S,Y-S,Z+S},
{X-S,Y-S,Z-S},{X-S,Y+S,Z-S},{X+S,Y+S,Z-S},{X+S,Y-S,Z-S}],
We = wings_we:build(Fs, Vs),
Pst = proplists:get_value(pst, OpenGL, gb_trees:empty()),
We#we{light=Light,pst=Pst}.
import_perm([]=_Visible,[]=_Locked) -> % it will match when a new light is added to project
import_perm(true,false);
import_perm(false,false) ->
?PERM_HIDDEN_BIT;
import_perm(true,false) ->
0;
import_perm(true,true) ->
?PERM_LOCKED_BIT;
import_perm(false,true) ->
?PERM_HIDDEN_BIT bor ?PERM_LOCKED_BIT.
import_fix_mesh(#e3d_mesh{fs=Fs0}=Mesh0) ->
Fs = [import_fix_face(F) || F <- Fs0],
Mesh1 = Mesh0#e3d_mesh{fs=Fs},
Mesh = e3d_mesh:clean_faces(Mesh1),
e3d_mesh:transform(Mesh).
import_fix_face(FaceRec) when is_tuple(FaceRec) ->
Different versions of Wings can have # e3d_face { }
%% records of different size (for example, in Wings 1.1.9
%% a new 'sg' field was added to #e3d_face{}). We know
%% that the fields we are interested in are in the same
%% place, so we can retrieve them using element/2.
%%
e3d_face = element(1, FaceRec), %Crash on unknown record type.
Vs = element(#e3d_face.vs, FaceRec),
Mat = element(#e3d_face.mat, FaceRec),
#e3d_face{vs=Vs,mat=Mat}.
%%%
%%% Setting up lights.
%%%
global_lights(Lights0) ->
Lights = lists:map(fun scene_lights_fun/1, Lights0),
IsAL = fun(#{light:=#light{type=Type}}) -> Type =:= ambient end,
lists:partition(IsAL, Lights).
camera_ambient() ->
#light{type = ambient,
aim = {0.0,0.0,0.0},
ambient = {0.1,0.1,0.1,1.0}}.
camera_infinite_1_0() ->
#light{type = infinite,
diffuse = {0.7,0.7,0.7,1},
specular = {0.2,0.2,0.2,1},
ambient = {0,0,0,1.0},
aim = {0.110,0.0,0.994}
}.
camera_infinite_2_0() ->
#light{type = infinite,
diffuse = {1,1,1,1},
specular = {0.3,0.3,0.3,1},
ambient = {0,0,0,1.0},
aim = {0.71,0.71,0.0}
}.
camera_infinite_2_1() ->
#light{type = infinite,
diffuse = {0.5,0.5,0.5,0.5},
specular = {0.3,0.3,0.3,1},
ambient = {0,0,0,1.0},
aim = {-0.71,-0.71,0.0}
}.
scene_lights_fun(#dlo{transparent=#we{light=L}=We}) ->
%% This happens when dragging a light in Body selection mode.
%% (Not area light.)
prepare_light(L, We, none);
scene_lights_fun(#dlo{drag=Drag,src_we=We0}=D) ->
%% Area lights handled here in all selection modes +
%% other lights in vertex/edge/face modes.
We = case We0 of
#we{light=#light{type=area}} ->
%% For an area light it looks better in vertex/edge/face
%% modes to emulate with the static non-split shape
%% during drag. It would be more correct if the area light
%% updating would use the resulting #we{}, but it does not
%% exist until the drag is done.
wings_draw:original_we(D);
_ ->
%% Non-area lights drag the whole shape so they can use
%% the dynamic part of the split shape
%% (which is the whole shape).
We0
end,
M = case Drag of
{matrix,_Tr,_M0,M1} -> M1;
_ -> none
end,
prepare_light(We#we.light, We, M).
prepare_light(#light{type=ambient}=L, _We, _M) ->
#{light=>L};
prepare_light(#light{type=infinite,aim=Aim}=L, We, _M) ->
{X,Y,Z} = e3d_vec:norm_sub(light_pos(We), Aim),
#{light=>L, pos=>{X,Y,Z,0.0}};
prepare_light(#light{type=point}=L, We, _M) ->
{X,Y,Z} = light_pos(We),
#{light=>L, pos=>{X,Y,Z,1.0}};
prepare_light(#light{type=spot,aim=Aim}=L, We, _M) ->
Pos = {X,Y,Z} = light_pos(We),
Dir = e3d_vec:norm_sub(Aim, Pos),
#{light=>L, pos=>{X,Y,Z,1.0}, dir=>Dir};
prepare_light(#light{type=area}=L, We, M) ->
case arealight_props(We) of
{area, Corners} -> #{light=>L, points=>[mul_point(M,P)||P<-Corners]};
{point, C} ->
{X,Y,Z} = mul_point(M, C),
#{light=>L#light{type=point}, pos=>{X,Y,Z,1.0}};
{spot, Dir0, C} ->
{X,Y,Z} = mul_point(M, C),
Dir = mul_point(M, Dir0),
#{light=>L#light{type=spot}, pos=>{X,Y,Z,1.0}, dir=>Dir}
end.
setup_light(#{light:=#light{type=ambient,ambient=Amb}}, RS0) ->
RS = wings_shaders:use_prog(ambient_light, RS0),
wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Amb), RS);
setup_light(#{light:=#light{type=infinite, diffuse=Diff, specular=Spec},
pos:=Pos}, RS0) ->
RS1 = wings_shaders:use_prog(infinite_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS3);
setup_light(#{light:=#light{type=point, diffuse=Diff,specular=Spec,
lin_att=Lin,quad_att=Quad},
pos:=Pos}, RS0) ->
RS1 = wings_shaders:use_prog(point_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
RS4 = wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS3),
wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS4);
setup_light(#{light:=#light{type=spot, diffuse=Diff,specular=Spec,
lin_att=Lin,quad_att=Quad,
spot_angle=Angle,spot_exp=Exp},
pos:=Pos, dir:=Dir}, RS0) ->
RS1 = wings_shaders:use_prog(spot_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
RS4 = wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS3),
RS5 = wings_shaders:set_uloc(light_dir, Dir, RS4),
RS6 = wings_shaders:set_uloc(light_angle, math:cos(Angle*math:pi()/180.0), RS5),
RS7 = wings_shaders:set_uloc(light_exp, Exp, RS6),
wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS7);
setup_light(#{light:=#light{type=area, diffuse=Diff, specular=Spec,
lin_att=Lin,quad_att=Quad},
points:=Points}, RS0) ->
RS1 = wings_shaders:use_prog(area_light, RS0),
RS2 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS1),
RS3 = wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS2),
RS4 = wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS3),
wings_shaders:set_uloc(light_points, Points, RS4).
light_pos(We) ->
wings_vertex:center(We).
arealight_props(#we{light=#light{type=area}}=We) ->
case wings_we:visible(We) of
[Face] ->
Vs0 = wings_face:vertex_positions(Face, We),
case lists:reverse(Vs0) of
[_,_,_,_] = Vs -> {area, Vs};
[A,B,C] -> {area, [A,B,C,C]};
[A,B,C,D,_] -> {area, [A,B,C,D]}; %% Could do better here
_ ->
N = wings_face:normal(Face, We),
C = wings_face:center(Face, We),
{spot, N, C}
end;
Fs ->
C = wings_vertex:center(We),
Ns = [wings_face:normal(F, We) || F <- Fs],
N = e3d_vec:average(Ns),
case e3d_vec:len(N) > 0.5 of
true -> {spot, e3d_vec:norm(N), C};
false -> {point, C}
end
end.
move_light(Pos, #we{vp=Vtab0}=We) ->
Vtab = array:sparse_map(fun(_, _) -> Pos end, Vtab0),
We#we{vp=Vtab}.
shape_materials(#we{id=Id, light=#light{diffuse=Front}}, #st{mat=Mtab}=St) ->
St#st{mat=gb_trees:insert({'_area_light_',Id},[Front],Mtab)}.
mul_point(none, Pos) -> Pos;
mul_point({1.0,0.0,0.0, 0.0,1.0,0.0, 0.0,0.0,1.0, Tx,Ty,Tz}, {X,Y,Z}) ->
{X+Tx,Y+Ty,Z+Tz};
mul_point(M, P) -> e3d_mat:mul_point(M, P).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
load_area_light_tab() ->
Path = filename:join(wings_util:lib_dir(wings), "textures"),
LTCmatFile = "areal_ltcmat.bin",
{ok, LTCmat} = file:read_file(filename:join(Path, LTCmatFile)),
64*64*4*4 = byte_size(LTCmat),
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(area_mat,
#e3d_image{type=r32g32b32a32f, bytes_pp=16,
width=64,height=64,
image=LTCmat,
extra=Opts
}),
?CHECK_ERROR(),
{areamatrix_tex, ImId}.
fake_envmap(EnvImgRec) ->
%% Poor mans version with blurred images
Path = filename:join(wings_util:lib_dir(wings), "textures"),
SpecBG = wings_image:e3d_to_wxImage(EnvImgRec),
wxImage:rescale(SpecBG, 512, 256, [{quality, ?wxIMAGE_QUALITY_HIGH}]),
tone_map(SpecBG),
SBG0 = wings_image:wxImage_to_e3d(SpecBG),
SpecBG1 = wxImage:copy(SpecBG),
MMs = make_mipmaps(SpecBG1, 1, 256, 128),
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
SBG = SBG0#e3d_image{name="Fake Spec", extra=Opts},
SpecId = wings_image:new_hidden(env_spec_tex, SBG),
wxImage:rescale(SpecBG, 64, 32, [{quality, ?wxIMAGE_QUALITY_HIGH}]),
DiffBG = wxImage:blur(SpecBG, 10),
blur_edges(DiffBG),
DBG0 = wings_image:wxImage_to_e3d(DiffBG),
DBG = DBG0#e3d_image{name="Fake diffuse", extra=[{wrap, {repeat,repeat}},
{filter, {linear,linear}}]},
wxImage:destroy(SpecBG),
wxImage:destroy(DiffBG),
{ok, BrdfBin0} = file:read_file(filename:join(Path,"brdf_tab.bin")),
128*128*2 = byte_size(BrdfBin0),
BrdfBin = << << R,G,0 >> || << R,G >> <= BrdfBin0 >>,
OptsB = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
Brdf = #e3d_image{width=128,height=128,image=BrdfBin,extra=OptsB},
wings_image : debug_display(brdf , ) ,
wings_image : debug_display(spec , SBG ) ,
wings_image : debug_display(diff , ) ,
[{env_spec_tex, SpecId},
{env_diffuse_tex, wings_image:new_hidden(env_diffuse_tex, DBG)},
{brdf_tex, wings_image:new_hidden(brdf_tex, Brdf)}].
tone_map(Image) ->
RGB0 = wxImage:getData(Image),
RGB = << << (pixel_tonemap(R/256,G/256,B/256)):24 >> || <<R:8,G:8,B:8>> <= RGB0 >>,
wxImage:setData(Image, RGB).
pixel_tonemap(R0,G0,B0) ->
Lum = (1.0+(R0 * 0.2126 + G0 * 0.72152 + B0 * 0.0722)),
R = min(255, trunc(Lum * R0 * 255.0)),
G = min(255, trunc(Lum * G0 * 255.0)),
B = min(255, trunc(Lum * B0 * 255.0)),
(R bsl 16) bor (G bsl 8) bor B.
blur_edges(Image) ->
RGB0 = wxImage:getData(Image),
RowSz = wxImage:getWidth(Image)*3,
BlobSz = (wxImage:getHeight(Image)-2)*RowSz,
<<First0:RowSz/binary, Center:BlobSz/binary, Last0:RowSz/binary>> = RGB0,
First = blur_row(First0),
Last = blur_row(Last0),
RGB1 = <<First:RowSz/binary, Center:BlobSz/binary, Last:RowSz/binary>>,
RGB = << << (blur_edge(Row, RowSz))/binary >> || <<Row:RowSz/binary>> <= RGB1 >>,
wxImage:setData(Image, RGB).
-define(A(C0,C1,C2), (round((C0+C1+C2)/3))).
blur_row(Bin) ->
List = binary_to_list(Bin),
{R0,G0,B0} = average(List, 0,0,0,0),
blur_pixel(List, R0,G0,B0, <<>>).
average([R,G,B|Rest], R0,G0,B0,N) ->
average(Rest, R0+R,G0+G,B0+B,N+1);
average([], R0,G0,B0,N) ->
{R0 div N, G0 div N, B0 div N}.
blur_pixel([R,G,B|Rest], R0,G0,B0, Bin) ->
Acc = <<Bin/binary, ((R+R0) div 2):8, ((G+G0) div 2):8, ((B+B0) div 2):8>>,
blur_pixel(Rest, R0,G0,B0, Acc);
blur_pixel([], _R0,_G0,_B0, Bin) ->
Bin.
blur_edge(Row0, Bytes) ->
Skip = Bytes-18,
<<R0:8,G0:8,B0:8, R1:8,G1:8,B1:8, R2:8,G2:8,B2:8,
Bin:Skip/bytes,
R7:8,G7:8,B7:8, R8:8,G8:8,B8:8, R9:8,G9:8,B9:8>> = Row0,
R00 = ?A(R0,R1,R9), G00=?A(G0,G1,G9), B00=?A(B0,B1,B9),
R90 = ?A(R0,R8,R9), G90=?A(G0,G8,G9), B90=?A(B0,B8,B9),
R10 = ?A(R00,R1,R2), G10=?A(G00,G1,G2), B10=?A(B00,B1,B2),
R80 = ?A(R90,R8,R7), G80=?A(G90,G8,G7), B80=?A(B90,B8,B7),
R01 = ?A(R00,R10,R90), G01=?A(G00,G10,G90), B01=?A(B00,B10,B90),
R91 = ?A(R00,R80,R90), G91=?A(G00,G80,G90), B91=?A(B00,B80,B90),
<<R01:8,G01:8,B01:8,
R10:8,G10:8,B10:8,
R2:8,G2:8,B2:8,
Bin:Skip/bytes,
R7:8,G7:8,B7:8,
R80:8,G80:8,B80:8,
R91:8,G91:8,B91:8
>>.
make_mipmaps(Img0, Level, W, H) when Level < 6 ->
wxImage:rescale(Img0, W, H),
Img = wxImage:blur(Img0, 4),
wxImage:destroy(Img0),
Bin = wxImage:getData(Img),
%% wings_image:debug_display(1000-Level,
# e3d_image{width = W , height = H , image = , order = upper_left ,
name="Fake Spec : " + + integer_to_list(Level ) } ) ,
[{Bin, W, H, Level} | make_mipmaps(Img, Level+1, W div 2, H div 2)];
make_mipmaps(Img, _, _, _) ->
wxImage:destroy(Img),
[].
make_envmap(CL, #e3d_image{filename=FileName}=EnvImgRec) ->
EnvIds =
case load_cached_envmap(FileName) of
[] ->
Cached = make_envmap_1(CL, EnvImgRec),
save_cached_envmap(FileName, Cached),
[TagId || {TagId,_} <- Cached];
Cached ->
Cached
end,
wings_cl:working(),
EnvIds.
make_envmap_1(CL, EnvImgRec0) ->
wings_pb:start(?__(1, "Building envmaps")),
EnvImgRec = e3d_image:convert(EnvImgRec0, r8g8b8a8, 1, lower_left),
wings_pb:update(0.1),
W = 512, H = 256, %% Sizes for result images
OrigImg = wings_cl:image(EnvImgRec, CL),
Buff0 = wings_cl:buff(2048*1024*4*4, [read_write], CL),
Buff1 = wings_cl:buff(2048*1024*4*4, [read_write], CL),
BrdfId = make_brdf(Buff0, 512, 512, CL),
wings_pb:update(0.5),
DiffId = make_diffuse(OrigImg, Buff0, Buff1, W, H, CL),
wings_pb:update(0.9),
SpecId = make_spec(OrigImg, Buff0, Buff1, 2048, 1024, CL),
wings_pb:done(),
cl:release_mem_object(OrigImg),
cl:release_mem_object(Buff0),
cl:release_mem_object(Buff1),
[DiffId,SpecId,BrdfId].
make_brdf(Buff, W, H, CL) ->
CC = wings_cl:cast(schlick_brdf, [Buff, W, H], [W,H], [], CL),
Read = wings_cl:read(Buff, W*H*4*2, [CC], CL),
{ok, BrdfData} = cl:wait(Read),
Img = << << (round(X*255)), (round(Y*255)), 0 >>
|| <<X:32/float-native, Y:32/float-native>> <= BrdfData >>,
wings_image : debug_display(brdf,#e3d_image{width = W , height = H , image = , name="BRDF " } ) ,
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(brdf_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{{brdf_tex, ImId}, Img}.
make_diffuse(OrigImg, Buff0, Buff1, W, H, CL) ->
Fill0 = wings_cl:fill(Buff0, <<0:(32*4)>>, W*H*4*4, CL),
Fill1 = wings_cl:fill(Buff1, <<0:(32*4)>>, W*H*4*4, CL),
{B0,B1,Pre} = cl_multipass(make_diffuse, [OrigImg, W, H], Buff0, Buff1, 0, 10,
[W,H], [Fill0, Fill1], CL),
CC = wings_cl:cast(color_convert, [B0,B1,W,H], [W,H], Pre, CL),
Read = wings_cl:read(B1, W*H*4*4, [CC], CL),
{ok, DiffData} = cl:wait(Read),
Img = << << (round(R*255)), (round(G*255)), (round(B*255)) >> ||
<<R:32/float-native, G:32/float-native, B:32/float-native, _:32>> <= DiffData >>,
wings_image : debug_display(1000+W,#e3d_image{width = W , height = H , image = , name="Diffuse " } ) ,
Opts = [{wrap, {repeat,repeat}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(env_diffuse_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{{env_diffuse_tex, ImId}, Img}.
make_spec(OrigImg, Buff0, Buff1, W0, H0, CL) ->
NoMipMaps = trunc(math:log2(min(W0,H0))),
[{Img,W0,H0,0}|MMs] = make_spec(0, NoMipMaps, OrigImg, Buff0, Buff1, W0, H0, CL),
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
? : ~p ~p = > ~w mipmaps ~ n",[W0,H0,length(MMs ) ] ) ,
ImId = wings_image:new_hidden(env_spec_tex, #e3d_image{width=W0,height=H0,image=Img,extra=Opts}),
{{env_spec_tex, ImId}, {Img,MMs}}.
make_spec(Level, Max, OrigImg, Buff0, Buff1, W, H, CL) when Level =< Max ->
Step = Level/Max,
Fill0 = wings_cl:fill(Buff0, <<0:(32*4)>>, W*H*4*4, CL),
Fill1 = wings_cl:fill(Buff1, <<0:(32*4)>>, W*H*4*4, CL),
{B0,B1,Pre} = cl_multipass(make_specular, [OrigImg, W, H, Step],
Buff0, Buff1, 0, 10, [W,H], [Fill0, Fill1], CL),
CC = wings_cl:cast(color_convert, [B0,B1,W,H], [W,H], Pre, CL),
Read = wings_cl:read(B1, W*H*4*4, [CC], CL),
{ok, SpecData} = cl:wait(Read),
Img = << << (round(R*255)), (round(G*255)), (round(B*255)) >> ||
<<R:32/float-native, G:32/float-native, B:32/float-native, _:32>> <= SpecData >>,
%% io:format("~p: ~p ~p ~.3f~n", [Level, W, H, Step]),
Level < 3 andalso
wings_image : debug_display(900 - Level , # e3d_image{width = W , height = H , image = ,
%% name="Spec: " ++ integer_to_list(Level)}),
[{Img,W,H,Level} | make_spec(Level+1, Max, OrigImg, Buff0, Buff1, W div 2, H div 2, CL)];
make_spec(_Level, _Max, _OrigImg, _B0, _B1, _W, _H, _CL) ->
[].
save_cached_envmap(FileName0, Cached0) ->
FileName = env_map_cache_name(FileName0),
case file:open(FileName, [write, raw, binary]) of
{ok,File} ->
Cached = [{Tag,Img} || {{Tag,_},Img} <- Cached0],
Bin = term_to_binary(Cached),
file:write(File,Bin),
file:close(File);
_ ->
ok
end,
ok.
%% By not being able to load the file we return an empty list
%% signing that a new environment map needs to be computed
load_cached_envmap(FileName0) ->
FileName = env_map_cache_name(FileName0),
case filelib:is_file(FileName) of
true ->
case file:read_file(FileName) of
{ok,Bin} ->
Cached = binary_to_term(Bin),
[rebuild_cached_img(Buf) || Buf <- Cached];
_ ->
file:delete(FileName),
[]
end;
false -> []
end.
env_map_cache_name(FileName) ->
CacheName = filename:rootname(filename:basename(FileName))++".emc",
filename:join(wings_u:basedir(user_cache), CacheName).
rebuild_cached_img({Tag,Img}) ->
case Tag of
brdf_tex -> cached_brdf(Img, 512, 512);
env_diffuse_tex -> cached_diffuse(Img, 512, 256);
env_spec_tex -> cached_spec(Img, 2048, 1024)
end.
cached_brdf(Img, W, H) ->
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(brdf_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{brdf_tex, ImId}.
cached_diffuse(Img, W, H) ->
Opts = [{wrap, {repeat,repeat}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(env_diffuse_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{env_diffuse_tex, ImId}.
cached_spec({Img,MMs}, W, H) ->
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
ImId = wings_image:new_hidden(env_spec_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{env_spec_tex, ImId}.
cl_multipass(Kernel, Args, Buff0, Buff1, N, Tot, No, Wait, CL) when N < Tot ->
Next = wings_cl:cast(Kernel, Args ++ [Buff0, Buff1, N, Tot], No, Wait, CL),
cl_multipass(Kernel, Args, Buff1, Buff0, N+1, Tot, No, [Next], CL);
cl_multipass(_Kernel, _Args, Buff0, Buff1, _N, _Tot, _No, Wait, _CL) ->
{Buff0, Buff1, Wait}.
cl_setup(Recompile) ->
case ?GET(opencl) of
undefined ->
case wings_cl:is_available(true) of
true ->
try cl_setup_1()
catch _:Reason:ST ->
io:format("CL setup error: ~p ~p~n",
[Reason, ST]),
{error, no_openCL}
end;
false -> {error, no_openCL}
end;
CL0 when Recompile ->
try
CL = wings_cl:compile("img_lib.cl", CL0),
?SET(opencl, CL),
CL
catch _:Reason:ST ->
io:format("CL compile error: ~p ~p~n",
[Reason, ST]),
CL0
end;
CL ->
CL
end.
cl_setup_1() ->
CL0 = wings_cl:setup(),
case wings_cl:have_image_support(CL0) of
true ->
CL = wings_cl:compile("img_lib.cl", CL0),
?SET(opencl, CL),
CL;
false ->
?SET(opencl, CL0),
{error, no_openCL_image}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
light_types() ->
[{?__(1,"Infinite"),infinite,
?__(2,"Create a far-away, directional light (like the sun)")},
{?__(3,"Point"),point,
?__(4,"Create a light that radiates light in every direction")},
{?__(5,"Spot"),spot,
?__(6,"Create a spotlight")},
{?__(7,"Ambient"),ambient,
?__(8,"Create an ambient light source")},
{?__(9,"Area"),area,
?__(10,"Create an area that radiates light")}].
menu(X, Y, St) ->
SpotOnly = {iff,[spot]},
NotAmb = {iff,[spot,infinite,point,area]},
One = one_light,
Dir = wings_menu_util:directions(St#st{selmode=body}),
Menu0 = [{?__(2,"Move"),{move_light,Dir}},
{NotAmb,separator},
{NotAmb,{?__(3,"Position Highlight"),
{'VALUE',{position_highlight,{'ASK',{[point],[]}}}},
?__(4,"Position the aim point or location of light")}},
{NotAmb,{?__(5,"Color"),color,
?__(6,"Interactively adjust hue, value, and saturation")}},
{NotAmb,
{?__(7,"Attenuation"),
{attenuation,
[{?__(8,"Linear"),linear,
?__(9,"Interactively adjust how much light weakens as it travels away from its source (linear factor)")},
{?__(10,"Quadratic"),quadratic,
?__(11,"Interactively adjust how much light weakens as it travels away from its source (quadratic factor)")}]}}},
{SpotOnly,separator},
{SpotOnly,{?__(12,"Spot Angle"),spot_angle,
?__(13,"Interactively adjust the angle of the spotlight cone")}},
{SpotOnly,{?__(14,"Spot Falloff"),spot_falloff,
?__(15,"Interactively adjust how much light weakens farther away from the center of the spotlight cone")}},
{One,separator},
{One,{?__(16,"Edit Properties..."),edit,
?__(17,"Edit light properties")}}|body_menu(Dir, St)],
Menu = filter_menu(Menu0, St),
wings_menu:popup_menu(X, Y, light, Menu).
body_menu(Dir, #st{selmode=body}) ->
[separator,
{?STR(menu,18,"Duplicate"),{duplicate,Dir},
?STR(menu,19,"Duplicate and move selected lights")},
{?STR(menu,20,"Delete"),delete,
?STR(menu,21,"Delete selected lights")}];
body_menu(_, _) -> [].
filter_menu(Menu, St) ->
MF = fun(_, #we{light=#light{type=Type}}) -> Type;
(_, #we{}) -> not_light
end,
RF = fun(Type, []) -> Type;
(Type, Type) -> Type;
(_, _) -> mixed
end,
T = wings_sel:dfold(MF, RF, [], St),
foldr(fun({one_light,_}, A) when T =:= mixed -> A;
({one_light,Entry}, A) -> [Entry|A];
({{iff,[_|_]=Types},Entry}, A) ->
case member(T, Types) of
true -> [Entry|A];
false -> A
end;
(Entry, A) -> [Entry|A]
end, [], Menu).
| null | https://raw.githubusercontent.com/bjorng/wings/0ebe43abfcb094344c015589d8cd072b643d80ed/src/wings_light.erl | erlang |
wings_light.erl --
Implementation of lights.
See the file "license.terms" for information on usage and redistribution
of this file, and for a DISCLAIMER OF ALL WARRANTIES.
$Id$
Light record in We.
the light testing macros in wings.hrl.
Type. (DO NOT MOVE.)
Aim point for spot/infinite.
Linear attenuation.
Quadratic attenuation.
Spot exponent.
Extra properties.
Debug
Bind textures to units
Light Commands.
The Delete command.
Creating lights.
Updating, drawing and rendering lights.
Use a list of ops to indicate selected color
Exporting lights.
For exporters.
For saving in .wings files.
This is the classic definition of e3d_face{}, as it was defined
before 1.1.9.
List of vertex indices.
List of texture indices.
List of normal indices.
Materials for face.
Visible edges (as in 3DS).
Fix the face record so that it looks like the classic
definition of #e3d_face{} (before 1.1.9).
Patch the record type.
Importing lights.
We used to put all vertices at the same position, but with
then rewritten pick handling we need a vertex array for picking.
The cube will be slightly larger than the sphere that is shown
for the light. The position of the light will be the centroid
of the cube.
it will match when a new light is added to project
records of different size (for example, in Wings 1.1.9
a new 'sg' field was added to #e3d_face{}). We know
that the fields we are interested in are in the same
place, so we can retrieve them using element/2.
Crash on unknown record type.
Setting up lights.
This happens when dragging a light in Body selection mode.
(Not area light.)
Area lights handled here in all selection modes +
other lights in vertex/edge/face modes.
For an area light it looks better in vertex/edge/face
modes to emulate with the static non-split shape
during drag. It would be more correct if the area light
updating would use the resulting #we{}, but it does not
exist until the drag is done.
Non-area lights drag the whole shape so they can use
the dynamic part of the split shape
(which is the whole shape).
Could do better here
Poor mans version with blurred images
wings_image:debug_display(1000-Level,
Sizes for result images
io:format("~p: ~p ~p ~.3f~n", [Level, W, H, Step]),
name="Spec: " ++ integer_to_list(Level)}),
By not being able to load the file we return an empty list
signing that a new environment map needs to be computed
| Copyright ( c ) 2002 - 2011
-module(wings_light).
-export([init/0, init/1, init_opengl/0, load_env_image/1,
light_types/0,menu/3,command/2,is_any_light_selected/1,
any_enabled_lights/0,info/1,setup_light/2,
create/2,update_dynamic/2,update_matrix/2,update/1,
global_lights/1,
export/1,export_bc/1,export_camera_lights/0,
import/2,import/1,shape_materials/2,
light_pos/1]).
-define(NEED_OPENGL, 1).
-include("wings.hrl").
-include_lib("wings/e3d/e3d.hrl").
-include_lib("wings/e3d/e3d_image.hrl").
-import(lists, [reverse/1,foldl/3,foldr/3,member/2,keydelete/3,sort/1]).
-define(DEF_X, 0.0).
-define(DEF_Y, 3.0).
-define(DEF_Z, 0.0).
-define(DEF_POS, {?DEF_X,?DEF_Y,?DEF_Z}).
The type field must be the first field , since it is used by
-record(light,
diffuse={1.0,1.0,1.0,1.0},
ambient={0.0,0.0,0.0,1.0},
specular={1.0,1.0,1.0,1.0},
spot_angle,
}).
def_envmap() ->
DefEnvMap = "grandcanyon.png",
DefPath = filename:join(wings_util:lib_dir(wings), "textures"),
filename:join(DefPath, DefEnvMap).
init() ->
wings_pref:set_default(show_bg, false),
wings_pref:set_default(show_bg_blur, 0.5),
wings_pref:set_default(bg_image, def_envmap()),
EnvImgRec = load_env_file(wings_pref:get_value(bg_image)),
init(false, EnvImgRec).
EnvImgRec = load_env_file(wings_pref:get_value(bg_image)),
init(Recompile, EnvImgRec).
init(Recompile, EnvImgRec) ->
AreaMatTagId = load_area_light_tab(),
EnvIds = case wings:is_fast_start() orelse cl_setup(Recompile) of
true ->
fake_envmap(load_env_file(def_envmap()));
{error, _} ->
ErrorStr = ?__(1, "Could not initialize OpenCL: env lighting limited ~n"),
io:format(ErrorStr,[]),
wings_status:message(geom, ErrorStr),
fake_envmap(load_env_file(def_envmap()));
CL ->
make_envmap(CL, EnvImgRec)
end,
[?SET(Tag, Id) || {Tag,Id} <- [AreaMatTagId|EnvIds]],
init_opengl(),
wings_develop:gl_error_check({?MODULE,?FUNCTION_NAME}),
ok.
-spec load_env_image(FileName::string()) -> ok | {file_error, {error, term()}} | {cl_error, {error, term()}}.
load_env_image(FileName) ->
try load_env_image_1(FileName)
catch throw:Error ->
Error
end.
load_env_image_1(FileName) ->
EnvImgRec = wings_image:image_read([{filename, FileName}]),
is_record(EnvImgRec, e3d_image) orelse throw({file_error, EnvImgRec}),
CL = case cl_setup(false) of
{error, _} = Error ->
throw({cl_error, Error});
CL0 -> CL0
end,
EnvIds = make_envmap(CL, EnvImgRec),
[?SET(Tag, Id) || {Tag,Id} <- EnvIds],
init_opengl(),
wings_develop:gl_error_check({?MODULE,?FUNCTION_NAME}),
ok.
init_opengl() ->
Ids = [{areamatrix_tex, ?AREA_LTC_MAT_UNIT},
{brdf_tex, ?ENV_BRDF_MAP_UNIT},
{env_diffuse_tex, ?ENV_DIFF_MAP_UNIT},
{env_spec_tex, ?ENV_SPEC_MAP_UNIT}],
SetupUnit = fun({Tag, Unit}) ->
case ?GET(Tag) of
undefined -> ignore;
ImId ->
TxId = wings_image:txid(ImId),
gl:activeTexture(?GL_TEXTURE0 + Unit),
is_integer(TxId) andalso gl:bindTexture(?GL_TEXTURE_2D, TxId),
gl:activeTexture(?GL_TEXTURE0)
end
end,
_ = [SetupUnit(Id) || Id <- Ids],
ok.
load_env_file(FileName) ->
case wings_image:image_read([{filename, FileName}]) of
#e3d_image{} = Img ->
Img;
_Error ->
?dbg("Could not load env image: ~p~n", [FileName]),
wings_image:image_read([{filename, def_envmap()}])
end.
command({move_light,Type}, St) ->
wings_move:setup(Type, St);
command(color, St) ->
color(St);
command({position_highlight,Data}, St) ->
position_highlight(Data, St);
command({attenuation,Type}, St) ->
attenuation(Type, St);
command(spot_angle, St) ->
spot_angle(St);
command(spot_falloff, St) ->
spot_falloff(St);
command(edit, St) ->
edit(St);
command({edit,Id}, St) ->
edit(Id, St);
command(delete, St) ->
{save_state,delete(St)};
command({duplicate,Dir}, St) ->
duplicate(Dir, St).
-spec is_any_light_selected(#st{}) -> boolean().
is_any_light_selected(St) ->
MF = fun(_, We) -> ?IS_LIGHT(We) end,
RF = fun erlang:'or'/2,
wings_sel:dfold(MF, RF, false, St).
any_enabled_lights() ->
wings_dl:fold(fun(#dlo{src_we=We}, Bool) ->
Bool orelse ?IS_ANY_LIGHT(We)
end, false).
-spec info(#we{}) -> iolist().
info(#we{name=Name,light=#light{type=Type}=L}=We) ->
Info0 = io_lib:format(?__(1,"Light ~ts"), [Name]),
case Type of
ambient -> Info0;
_ ->
Pos = light_pos(We),
Info = [Info0|io_lib:format(?__(2,": Pos ~s"),
[wings_util:nice_vector(Pos)])],
[Info|info_1(Type, Pos, L)]
end.
info_1(point, _, _) -> [];
info_1(Type, Pos, #light{aim=Aim,spot_angle=A}) ->
Dir = e3d_vec:norm_sub(Aim, Pos),
Info = io_lib:format(?__(1,". Aim ~s. Dir ~s"),
[wings_util:nice_vector(Aim),
wings_util:nice_vector(Dir)]),
[Info|case Type of
spot -> io_lib:format(?__(2,". Angle ~s~c"),
[wings_util:nice_float(A),?DEGREE]);
_ -> []
end].
color(St0) ->
{St,Flags} =
wings_sel:mapfold(
fun(_, #we{light=L}=We, []) when ?IS_LIGHT(We) ->
{R,G,B,A} = get_light_color(L),
{H,S,V} = wings_color:rgb_to_hsv(R, G, B),
ColorFun = fun({finish,C}, D) -> color(C, D, A);
(C, D) -> color(C, D, A)
end,
Flags = [{initial,[H,V,S]}],
{We#we{temp=ColorFun},Flags};
(_, We, _) when ?IS_LIGHT(We) ->
wings_u:error_msg(?__(1,"Select only one light."));
(_, _, A) -> A
end, [], St0),
Units = [{angle,{0.0,359.9999}},
{percent,{0.0,1.0}},
{percent,{0.0,1.0}}],
DF = fun(#we{temp=General}) -> General end,
wings_drag:general(DF, Units, Flags, St).
color([H,V,S], #dlo{src_we=#we{light=L0}=We0}=D, A) ->
{R,G,B} = wings_color:hsv_to_rgb(H, S, V),
Col = {R,G,B,A},
L = update_color(L0, Col),
We = We0#we{light=L},
update(D#dlo{work=none,src_we=We}).
get_light_color(#light{type=ambient,ambient=Col}) -> Col;
get_light_color(#light{diffuse=Diff}) -> Diff.
update_color(#light{type=ambient}=L, Col) -> L#light{ambient=Col};
update_color(L, Col) -> L#light{diffuse=Col}.
position_highlight({'ASK',Ask}, St) ->
wings:ask(Ask, St, fun position_highlight/2);
position_highlight(Center, St) ->
{save_state,
wings_sel:map(fun(_, We) when ?IS_LIGHT(We) ->
position_highlight_1(Center, We);
(_, We) -> We
end, St)}.
position_highlight_1(Center, #we{light=L0}=We) ->
case L0 of
#light{type=point} ->
move_light(Center, We);
_ ->
L = L0#light{aim=Center},
We#we{light=L}
end.
spot_angle(St) ->
case selected_light(St) of
#light{type=spot,spot_angle=SpotAngle} ->
SpotFun0 = fun([Angle|_], L) -> L#light{spot_angle=Angle} end,
DF = fun(_) -> adjust_fun(SpotFun0) end,
Units = [{angle,{0.1,89.9}}],
Flags = [{initial,[SpotAngle]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a spotlight."))
end.
spot_falloff(St) ->
case selected_light(St) of
#light{type=spot,spot_exp=SpotExp} ->
SpotFun0 = fun([Exp|_], L) -> L#light{spot_exp=Exp} end,
DF = fun(_) -> adjust_fun(SpotFun0) end,
Units = [{number,{0.0,128.0}}],
Flags = [{initial,[SpotExp]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a spotlight."))
end.
attenuation(Type, St) ->
case selected_light(St) of
#light{type=Ltype}=L when Ltype =:= point; Ltype =:= spot ->
Initial = att_initial(Type, L),
DF = fun(_) -> adjust_fun(att_fun(Type)) end,
Units = [{dx,att_range(Type)}],
Flags = [{initial,[Initial]}],
wings_drag:general(DF, Units, Flags, St);
_ ->
wings_u:error_msg(?__(1,"Not a point light or spotlight."))
end.
att_initial(linear, #light{lin_att=LinAtt}) -> LinAtt;
att_initial(quadratic, #light{quad_att=QuadAtt}) -> QuadAtt.
att_fun(linear) -> fun([V|_], L) -> L#light{lin_att=V} end;
att_fun(quadratic) -> fun([V|_], L) -> L#light{quad_att=V} end.
att_range(linear) -> {0.0,1.0};
att_range(quadratic) -> {0.0,0.5}.
selected_light(St) ->
MF = fun(_, #we{light=L}=We) when ?IS_LIGHT(We) ->
[L];
(_, #we{}) ->
[]
end,
RF = fun erlang:'++'/2,
case wings_sel:dfold(MF, RF, [], St) of
[Selected] ->
Selected;
[_|_] ->
wings_u:error_msg(?__(1,"Select only one light."))
end.
adjust_fun(AdjFun) ->
fun({finish,Ds}, D) -> adjust_fun_1(AdjFun, Ds, D);
(Ds, D) -> adjust_fun_1(AdjFun, Ds, D)
end.
adjust_fun_1(AdjFun, Ds, #dlo{src_we=#we{light=L0}=We0}=D) ->
L = AdjFun(Ds, L0),
We = We0#we{light=L},
update(D#dlo{work=none,src_we=We}).
The Edit Properties command .
edit(St) ->
case wings_sel:selected_ids(St) of
[Id] ->
edit(Id, St);
[_|_] ->
wings_u:error_msg(?__(1,"Select only one light."))
end.
edit(Id, St) ->
Obj = wings_obj:get(Id, St),
case Obj of
#{light:=#light{type=ambient}} ->
{_, Prop} = get_light(Obj, false, St),
{dialog,Qs,Fun} = edit_ambient_dialog(Obj, Prop, St),
wings_dialog:dialog(?__(2,"Ambient Light Properties"), Qs, Fun);
#{light:=#light{}} ->
{_, Prop} = get_light(Obj, false, St),
{dialog,Qs,Fun} = edit_dialog(Obj, Prop, St),
wings_dialog:dialog(?__(3,"Light Properties"), Qs, Fun);
_ ->
wings_u:error_msg(?__(4,"Select one area light."))
end.
edit_ambient_dialog(Obj, Prop0, St) ->
#{name:=Name,light:=L0} = Obj,
#light{ambient=Amb0} = L0,
Qs0 = {vframe,
[{hframe,
[{label_column,
[{?__(1,"Ambient"),{color,Amb0}}]}],
[{title,?__(2,"Color")}]}|qs_specific(L0)]},
Qs1 = wings_plugin:dialog({light_editor_setup,Name,Prop0}, [{"Wings 3D", Qs0}]),
Qs = {vframe_dialog,
[{oframe, Qs1, 1, [{style, buttons}]}],
[{buttons, [ok, cancel]}, {key, result}]},
Fun = fun([Amb|Res]) ->
{ok,Prop} = plugin_results(Name, Prop0, Res),
L = L0#light{ambient=Amb,prop=Prop},
wings_obj:put(Obj#{light:=L}, St)
end,
{dialog,Qs,Fun}.
edit_dialog(Obj, Prop0, St) ->
#{name:=Name,light:=L0} = Obj,
#light{diffuse=Diff0,specular=Spec0} = L0,
Qs0 = {vframe,
[{hframe,
[{label_column,
[{?__(1,"Diffuse"),{color,Diff0}},
{?__(3,"Specular"),{color,Spec0}}]}],
[{title,?__(4,"Colors")}]}|qs_specific(L0)]},
Qs1 = wings_plugin:dialog({light_editor_setup,Name,Prop0}, [{"Wings 3D", Qs0}]),
Qs = {vframe_dialog,
[{oframe, Qs1, 1, [{style, buttons}]}],
[{buttons, [ok, cancel]}, {key, result}]},
Fun = fun([Diff,Spec|More0]) ->
L1 = L0#light{diffuse=Diff,specular=Spec},
{L2,More} = edit_specific(More0, L1),
case plugin_results(Name, Prop0, More) of
{ok,Prop} ->
L = L2#light{prop=Prop},
wings_obj:put(Obj#{light:=L}, St)
end
end,
{dialog,Qs,Fun}.
plugin_results(Name, Prop0, Res0) ->
case wings_plugin:dialog_result({light_editor_result,Name,Prop0}, Res0) of
{Prop,[{result, ok}]} ->
{ok,keydelete(opengl, 1, Prop)};
{_,Res} ->
io:format(?__(1,
"Light editor plugin(s) left garbage:~n ~P~n"),
[Res,20]),
wings_u:error_msg(?__(2,"Plugin(s) left garbage"))
end.
qs_specific(#light{type=spot,spot_angle=Angle,spot_exp=SpotExp}=L) ->
Spot = [{vframe,
[{label_column,
[{?__(1, "Angle"), {slider, {text, Angle, [{range, {0.0, 89.9}}]}}},
{?__(2, "Falloff"), {slider, {text, SpotExp, [{range, {0.0, 128.0}}]}}}]
}],
[{title,?__(3,"Spot Parameters")}]}],
qs_att(L, Spot);
qs_specific(#light{type=point}=L) -> qs_att(L, []);
qs_specific(#light{type=area}=L) -> qs_att(L, []);
qs_specific(_) -> [].
qs_att(#light{lin_att=Lin,quad_att=Quad}, Tail) ->
[{vframe,
[{label_column,
[{?__(1,"Linear"),{slider,{text,Lin,[{range,{0.0,1.0}}]}}},
{?__(2,"Quadratic"),{slider,{text,Quad,[{range,{0.0,0.5}}]}}}]
}],
[{title,?__(3,"Attenuation")}]}|Tail].
edit_specific([LinAtt,QuadAtt,Angle,SpotExp|More], #light{type=spot}=L) ->
{L#light{spot_angle=Angle,spot_exp=SpotExp,lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific([LinAtt,QuadAtt|More], #light{type=point}=L) ->
{L#light{lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific([LinAtt,QuadAtt|More], #light{type=area}=L) ->
{L#light{lin_att=LinAtt,quad_att=QuadAtt},More};
edit_specific(More, L) -> {L,More}.
delete(St) ->
wings_sel:map_update_sel(
fun(_, _) ->
{#we{},gb_sets:empty()}
end, St).
The Duplicate command .
duplicate(Dir, St0) ->
CF = fun(Items, We) ->
Empty = gb_sets:empty(),
New = [{We,Items,copy}],
{We,Empty,New}
end,
St = wings_sel:clone(CF, St0),
case Dir of
none -> St;
_ -> wings_move:setup(Dir, St)
end.
create(Type, #st{onext=Oid}=St) ->
Prefix = atom_to_list(Type),
Name = Prefix++integer_to_list(Oid),
import([{Name,[{opengl,[{type,Type}]}]}], St).
update_dynamic(#dlo{src_we=We0}=D, Vtab0) ->
Vtab = array:from_orddict(sort(Vtab0)),
We = We0#we{vp=Vtab},
update_1(We, D#dlo{src_we=We}).
update_matrix(#dlo{src_we=We0}=D, Matrix) ->
We = wings_we:transform_vs(Matrix, We0),
update_1(We, D#dlo{transparent=We}).
update(#dlo{work=W,src_sel=Sel,src_we=#we{light=#light{}}=We}=D) ->
IsSel = Sel =/= none,
HaveW = W =/= none andalso not is_list(W),
HaveS = is_list(W),
if
W =:= none -> update_1(We, D);
IsSel andalso HaveS -> D;
(not IsSel) andalso HaveW -> D;
true -> update_1(We, D)
end;
update(D) -> D.
update_1(#we{light=#light{type=Type}}=We, #dlo{src_sel=Sel}=D) ->
IsSel = Sel =/= none,
SelColor = case IsSel of
false -> {0.0,0.0,1.0,1.0};
true -> {R,G,B} = wings_pref:get_value(selected_color),
{R,G,B,1.0}
end,
Draw = update_fun(Type, SelColor, We),
case IsSel of
true ->
D#dlo{work=[Draw], sel=Draw};
false ->
D#dlo{work=Draw, sel=none}
end.
update_fun(infinite, SelColor, #we{light=#light{aim=Aim}}=We) ->
LightPos = light_pos(We),
LightCol = get_light_col(We),
Vec = e3d_vec:norm_sub(Aim, LightPos),
Data = [e3d_vec:mul(Vec, 0.2),e3d_vec:mul(Vec, 0.6)],
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
D = fun(RS) ->
gl:lineWidth(1.5),
gl:pushMatrix(),
{X,Y,Z} = LightPos,
gl:translatef(X, Y, Z),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:drawArrays(?GL_TRIANGLES, 2, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
gl:drawArrays(?GL_LINES, 0, 2),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Data++Tris);
update_fun(point, SelColor, We) ->
LightPos = light_pos(We),
LightCol = get_light_col(We),
Data0 = [{1.0,0.0,0.0},
{0.0,1.0,0.0},
{0.0,0.0,1.0},
{0.71,0.71,0.0},
{0.71,0.0,0.71},
{0.0,0.71,0.71}],
N = length(Data0) * 4,
Data = lines(Data0),
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
D = fun(RS) ->
gl:lineWidth(1.0),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:pushMatrix(),
{X,Y,Z} = LightPos,
gl:translatef(X, Y, Z),
gl:drawArrays(?GL_TRIANGLES, N, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
gl:drawArrays(?GL_LINES, 0, N),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Data++Tris);
update_fun(spot, SelColor, #we{light=#light{aim=Aim,spot_angle=Angle}}=We) ->
Top = light_pos(We),
LightCol = get_light_col(We),
SpotDir0 = e3d_vec:norm_sub(Aim, Top),
SpotDir = case e3d_vec:is_zero(SpotDir0) of
false -> SpotDir0;
true -> {0.0,1.0,0.0}
end,
Rad = Angle*math:pi()/180,
R = math:sin(Rad),
H = math:cos(Rad),
Translate = e3d_vec:mul(SpotDir, H),
Rot = e3d_mat:rotate_s_to_t({0.0,0.0,1.0}, e3d_vec:neg(SpotDir)),
#{size:=Len, tris:=Tris} = wings_shapes:tri_sphere(#{subd=>3, scale=>0.08}),
CylLines = cylinder_lines(R, 0.08, H, 3),
N = length(CylLines),
D = fun(RS) ->
gl:lineWidth(1.0),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(LightCol), RS),
gl:pushMatrix(),
{Tx,Ty,Tz} = Top,
gl:translatef(Tx, Ty, Tz),
gl:drawArrays(?GL_TRIANGLES, 0, Len*3),
wings_shaders:set_uloc(light_color, wings_color:srgb_to_linear(SelColor), RS),
{Dx,Dy,Dz} = Translate,
gl:translatef(Dx, Dy, Dz),
gl:multMatrixd(Rot),
gl:drawArrays(?GL_LINES, Len*3, N),
gl:popMatrix(),
RS
end,
wings_vbo:new(D, Tris ++ CylLines);
update_fun(ambient, _, _) ->
fun(RS) -> RS end.
lines([Vec|Vecs]) ->
[e3d_vec:mul(Vec, 0.2),
e3d_vec:mul(Vec, 0.6),
e3d_vec:mul(Vec, -0.2),
e3d_vec:mul(Vec, -0.6)|lines(Vecs)];
lines([]) -> [].
cylinder_lines(BaseR, TopR, H, Levels) ->
Quad = [{0.0,1.0,0.0},{-1.0,0.0,0.0},{0.0,-1.0,0.0},{1.0,0.0,0.0}],
Subd = subd_cyl(Quad, Levels),
Orig = mk_lines(Subd, hd(Subd)),
Base = [e3d_vec:mul(V, BaseR) || V <- Orig],
Top = [e3d_vec:add_prod({0.0, 0.0, H}, V, TopR) || V <- Orig],
Connect = lists:foldl(fun({A,B}, Acc) -> [A,B|Acc] end, [], lists:zip(Base,Top)),
Base ++ Top ++ Connect.
subd_cyl(List, Level) when Level > 1 ->
New = subd_cyl(List, hd(List), []),
subd_cyl(New, Level-1);
subd_cyl(List, _) ->
List.
subd_cyl([V1|[V2|_]=Rest], First, Acc) ->
M = e3d_vec:norm(e3d_vec:average(V1, V2)),
subd_cyl(Rest, First, [M, V1|Acc]);
subd_cyl([V1], V2, Acc) ->
M = e3d_vec:norm(e3d_vec:average(V1, V2)),
[M, V1|Acc].
mk_lines([V1|[V2|_]=Rest],First) ->
[V1,V2|mk_lines(Rest,First)];
mk_lines([V1], V2) ->
[V1,V2].
get_light_col(#we{light=#light{diffuse=Diff}}) ->
Diff.
export(St) ->
export(St, false).
export_bc(St) ->
export(St, true).
export(St, BackwardsCompatible) ->
F = fun(#{light:=_}=Obj, A) ->
[get_light(Obj, BackwardsCompatible, St)|A];
(_, A) ->
A
end,
L = wings_obj:fold(F, [], St),
reverse(L).
export_camera_lights() ->
Amb = {?__(1,"Ambient"), camera_ambient()},
Ls = case wings_pref:get_value(number_of_lights) of
1 ->
[{?__(2,"Infinite"),camera_infinite_1_0()}];
2 ->
[{?__(3,"Infinite1"),camera_infinite_2_0()},
{?__(4,"Infinite2"),camera_infinite_2_1()}]
end,
#view{origin=Aim} = wings_view:current(),
CameraPos = wings_view:eye_point(),
GL = fun({Name,Li = #light{aim=Diff}}) ->
LPos = e3d_vec:add(CameraPos,Diff),
We = #we{name = Name,
vp = array:from_orddict([{1, LPos}]),
light = Li#light{aim=Aim}},
get_light(We, false)
end,
[GL(Light) || Light <- [Amb|Ls]].
get_light(#{id:=Id,name:=Name,perm:=P,light:=Light}, BC, St) ->
F = fun(We) -> get_light_1(Light, We, BC) end,
Ps0 = wings_obj:with_we(F, Id, St),
Ps = export_perm(P, Ps0),
{Name,Ps}.
get_light(#we{name=Name,perm=P,light=Light}=We, BC) ->
Ps0 = get_light_1(Light, We, BC),
Ps = export_perm(P, Ps0),
{Name,Ps}.
get_light_1(#light{type=ambient,ambient=Amb,prop=Prop}, #we{pst=Pst}=We, _) ->
P = light_pos(We),
OpenGL = [{type,ambient},{ambient,Amb},{position,P},{pst,Pst}],
[{opengl,OpenGL}|Prop];
get_light_1(L, #we{pst=Pst}=We, BC) ->
#light{type=Type,diffuse=Diff,ambient=Amb,specular=Spec,
aim=Aim,spot_angle=Angle,spot_exp=SpotExp,
lin_att=LinAtt,quad_att=QuadAtt,prop=Prop} = L,
P = light_pos(We),
Common = [{type,Type},{position,P},{aim_point,Aim},
{diffuse,Diff},{ambient,Amb},{specular,Spec},{pst,Pst}],
OpenGL0 = case Type of
spot ->
[{cone_angle,Angle},{spot_exponent,SpotExp}|Common];
_ ->
Common
end,
OpenGL1 = if
Type =:= point; Type =:= spot; Type =:= area ->
[{linear_attenuation,LinAtt},
{quadratic_attenuation,QuadAtt}|OpenGL0];
true -> OpenGL0
end,
OpenGL = case Type of
area -> [{mesh,export_mesh(We, BC)}|OpenGL1];
_ -> OpenGL1
end,
[{opengl,OpenGL}|Prop].
export_perm({_,_}, Ps) ->
[{visible,false},{locked,false}|Ps];
export_perm(P, Ps) when is_integer(P) ->
[{visible,P < 2},{locked,(P band 1) =/= 0}|Ps].
-record(classic_e3d_face,
Vertex color indices .
export_mesh(We, BC) ->
#e3d_mesh{fs=Fs0} = Mesh = wings_export:make_mesh(We, []),
Fs = case BC of
false ->
Fs0;
true ->
[export_fix_face(F) || F <- Fs0]
end,
Mesh#e3d_mesh{fs=Fs}.
export_fix_face(#e3d_face{vs=Vs,mat=Mat}) ->
FaceRec = #classic_e3d_face{vs=Vs,mat=Mat},
setelement(1, FaceRec, e3d_face).
import(Lights, St) ->
foldl(fun import_fun/2, St, Lights).
import_fun({Name,Ps}, St) ->
wings_obj:new(Name, import(Ps), St).
import(Ps) ->
Visible = proplists:get_value(visible, Ps, []),
Locked = proplists:get_value(locked, Ps, []),
Prop1 = proplists:delete(visible, Ps),
Prop0 = proplists:delete(locked, Prop1),
OpenGL = proplists:get_value(opengl, Prop0, []),
Type = proplists:get_value(type, OpenGL, point),
Pos = proplists:get_value(position, OpenGL, ?DEF_POS),
Diff = proplists:get_value(diffuse, OpenGL, {1.0,1.0,1.0,1.0}),
Amb = import_ambient(Type, OpenGL),
Spec = proplists:get_value(specular, OpenGL, {1.0,1.0,1.0,1.0}),
Aim = proplists:get_value(aim_point, OpenGL, {0.0,0.0,0.0}),
LinAtt = proplists:get_value(linear_attenuation, OpenGL, 0.0),
QuadAtt = proplists:get_value(quadratic_attenuation, OpenGL, 0.0),
Angle = proplists:get_value(cone_angle, OpenGL, 30.0),
SpotExp = proplists:get_value(spot_exponent, OpenGL, 0.0),
Prop = proplists:delete(opengl, Prop0),
Light = #light{type=Type,diffuse=Diff,ambient=Amb,specular=Spec,
aim=Aim,lin_att=LinAtt,quad_att=QuadAtt,
spot_angle=Angle,spot_exp=SpotExp,prop=Prop},
We=import_we(Light, OpenGL, Pos),
We#we{perm=import_perm(Visible, Locked)}.
import_ambient(ambient, OpenGL) ->
proplists:get_value(ambient, OpenGL, {0.1,0.1,0.1,1.0});
import_ambient(_, OpenGL) ->
proplists:get_value(ambient, OpenGL, {0.0,0.0,0.0,1.0}).
import_we(#light{type=area}=Light, OpenGL, {X,Y,Z}) ->
Mesh =
case proplists:lookup(mesh, OpenGL) of
none ->
#e3d_mesh{type=polygon,
fs=[#e3d_face{vs=[0,1,2,3],
mat=[default]}],
vs=[{X+1.0,Y,Z+1.0},{X-1.0,Y,Z+1.0},
{X-1.0,Y,Z-1.0},{X+1.0,Y,Z-1.0}]};
{mesh,M} -> import_fix_mesh(M)
end,
We = wings_import:import_mesh(material, Mesh),
Pst = proplists:get_value(pst, OpenGL, gb_trees:empty()),
We#we{light=Light,pst=Pst};
import_we(#light{}=Light, OpenGL, {X,Y,Z}) ->
Fs = [[0,3,2,1],[2,3,7,6],[0,4,7,3],[1,2,6,5],[4,5,6,7],[0,1,5,4]],
S = 0.07,
Vs = [{X-S,Y-S,Z+S},{X-S,Y+S,Z+S},{X+S,Y+S,Z+S},{X+S,Y-S,Z+S},
{X-S,Y-S,Z-S},{X-S,Y+S,Z-S},{X+S,Y+S,Z-S},{X+S,Y-S,Z-S}],
We = wings_we:build(Fs, Vs),
Pst = proplists:get_value(pst, OpenGL, gb_trees:empty()),
We#we{light=Light,pst=Pst}.
import_perm(true,false);
import_perm(false,false) ->
?PERM_HIDDEN_BIT;
import_perm(true,false) ->
0;
import_perm(true,true) ->
?PERM_LOCKED_BIT;
import_perm(false,true) ->
?PERM_HIDDEN_BIT bor ?PERM_LOCKED_BIT.
import_fix_mesh(#e3d_mesh{fs=Fs0}=Mesh0) ->
Fs = [import_fix_face(F) || F <- Fs0],
Mesh1 = Mesh0#e3d_mesh{fs=Fs},
Mesh = e3d_mesh:clean_faces(Mesh1),
e3d_mesh:transform(Mesh).
import_fix_face(FaceRec) when is_tuple(FaceRec) ->
Different versions of Wings can have # e3d_face { }
Vs = element(#e3d_face.vs, FaceRec),
Mat = element(#e3d_face.mat, FaceRec),
#e3d_face{vs=Vs,mat=Mat}.
global_lights(Lights0) ->
Lights = lists:map(fun scene_lights_fun/1, Lights0),
IsAL = fun(#{light:=#light{type=Type}}) -> Type =:= ambient end,
lists:partition(IsAL, Lights).
camera_ambient() ->
#light{type = ambient,
aim = {0.0,0.0,0.0},
ambient = {0.1,0.1,0.1,1.0}}.
camera_infinite_1_0() ->
#light{type = infinite,
diffuse = {0.7,0.7,0.7,1},
specular = {0.2,0.2,0.2,1},
ambient = {0,0,0,1.0},
aim = {0.110,0.0,0.994}
}.
camera_infinite_2_0() ->
#light{type = infinite,
diffuse = {1,1,1,1},
specular = {0.3,0.3,0.3,1},
ambient = {0,0,0,1.0},
aim = {0.71,0.71,0.0}
}.
camera_infinite_2_1() ->
#light{type = infinite,
diffuse = {0.5,0.5,0.5,0.5},
specular = {0.3,0.3,0.3,1},
ambient = {0,0,0,1.0},
aim = {-0.71,-0.71,0.0}
}.
scene_lights_fun(#dlo{transparent=#we{light=L}=We}) ->
prepare_light(L, We, none);
scene_lights_fun(#dlo{drag=Drag,src_we=We0}=D) ->
We = case We0 of
#we{light=#light{type=area}} ->
wings_draw:original_we(D);
_ ->
We0
end,
M = case Drag of
{matrix,_Tr,_M0,M1} -> M1;
_ -> none
end,
prepare_light(We#we.light, We, M).
prepare_light(#light{type=ambient}=L, _We, _M) ->
#{light=>L};
prepare_light(#light{type=infinite,aim=Aim}=L, We, _M) ->
{X,Y,Z} = e3d_vec:norm_sub(light_pos(We), Aim),
#{light=>L, pos=>{X,Y,Z,0.0}};
prepare_light(#light{type=point}=L, We, _M) ->
{X,Y,Z} = light_pos(We),
#{light=>L, pos=>{X,Y,Z,1.0}};
prepare_light(#light{type=spot,aim=Aim}=L, We, _M) ->
Pos = {X,Y,Z} = light_pos(We),
Dir = e3d_vec:norm_sub(Aim, Pos),
#{light=>L, pos=>{X,Y,Z,1.0}, dir=>Dir};
prepare_light(#light{type=area}=L, We, M) ->
case arealight_props(We) of
{area, Corners} -> #{light=>L, points=>[mul_point(M,P)||P<-Corners]};
{point, C} ->
{X,Y,Z} = mul_point(M, C),
#{light=>L#light{type=point}, pos=>{X,Y,Z,1.0}};
{spot, Dir0, C} ->
{X,Y,Z} = mul_point(M, C),
Dir = mul_point(M, Dir0),
#{light=>L#light{type=spot}, pos=>{X,Y,Z,1.0}, dir=>Dir}
end.
setup_light(#{light:=#light{type=ambient,ambient=Amb}}, RS0) ->
RS = wings_shaders:use_prog(ambient_light, RS0),
wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Amb), RS);
setup_light(#{light:=#light{type=infinite, diffuse=Diff, specular=Spec},
pos:=Pos}, RS0) ->
RS1 = wings_shaders:use_prog(infinite_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS3);
setup_light(#{light:=#light{type=point, diffuse=Diff,specular=Spec,
lin_att=Lin,quad_att=Quad},
pos:=Pos}, RS0) ->
RS1 = wings_shaders:use_prog(point_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
RS4 = wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS3),
wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS4);
setup_light(#{light:=#light{type=spot, diffuse=Diff,specular=Spec,
lin_att=Lin,quad_att=Quad,
spot_angle=Angle,spot_exp=Exp},
pos:=Pos, dir:=Dir}, RS0) ->
RS1 = wings_shaders:use_prog(spot_light, RS0),
RS2 = wings_shaders:set_uloc(ws_lightpos, Pos, RS1),
RS3 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS2),
RS4 = wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS3),
RS5 = wings_shaders:set_uloc(light_dir, Dir, RS4),
RS6 = wings_shaders:set_uloc(light_angle, math:cos(Angle*math:pi()/180.0), RS5),
RS7 = wings_shaders:set_uloc(light_exp, Exp, RS6),
wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS7);
setup_light(#{light:=#light{type=area, diffuse=Diff, specular=Spec,
lin_att=Lin,quad_att=Quad},
points:=Points}, RS0) ->
RS1 = wings_shaders:use_prog(area_light, RS0),
RS2 = wings_shaders:set_uloc(light_diffuse, wings_color:srgb_to_linear(Diff), RS1),
RS3 = wings_shaders:set_uloc(light_specular, wings_color:srgb_to_linear(Spec), RS2),
RS4 = wings_shaders:set_uloc(light_att, {0.8, Lin, Quad}, RS3),
wings_shaders:set_uloc(light_points, Points, RS4).
light_pos(We) ->
wings_vertex:center(We).
arealight_props(#we{light=#light{type=area}}=We) ->
case wings_we:visible(We) of
[Face] ->
Vs0 = wings_face:vertex_positions(Face, We),
case lists:reverse(Vs0) of
[_,_,_,_] = Vs -> {area, Vs};
[A,B,C] -> {area, [A,B,C,C]};
_ ->
N = wings_face:normal(Face, We),
C = wings_face:center(Face, We),
{spot, N, C}
end;
Fs ->
C = wings_vertex:center(We),
Ns = [wings_face:normal(F, We) || F <- Fs],
N = e3d_vec:average(Ns),
case e3d_vec:len(N) > 0.5 of
true -> {spot, e3d_vec:norm(N), C};
false -> {point, C}
end
end.
move_light(Pos, #we{vp=Vtab0}=We) ->
Vtab = array:sparse_map(fun(_, _) -> Pos end, Vtab0),
We#we{vp=Vtab}.
shape_materials(#we{id=Id, light=#light{diffuse=Front}}, #st{mat=Mtab}=St) ->
St#st{mat=gb_trees:insert({'_area_light_',Id},[Front],Mtab)}.
mul_point(none, Pos) -> Pos;
mul_point({1.0,0.0,0.0, 0.0,1.0,0.0, 0.0,0.0,1.0, Tx,Ty,Tz}, {X,Y,Z}) ->
{X+Tx,Y+Ty,Z+Tz};
mul_point(M, P) -> e3d_mat:mul_point(M, P).
load_area_light_tab() ->
Path = filename:join(wings_util:lib_dir(wings), "textures"),
LTCmatFile = "areal_ltcmat.bin",
{ok, LTCmat} = file:read_file(filename:join(Path, LTCmatFile)),
64*64*4*4 = byte_size(LTCmat),
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(area_mat,
#e3d_image{type=r32g32b32a32f, bytes_pp=16,
width=64,height=64,
image=LTCmat,
extra=Opts
}),
?CHECK_ERROR(),
{areamatrix_tex, ImId}.
fake_envmap(EnvImgRec) ->
Path = filename:join(wings_util:lib_dir(wings), "textures"),
SpecBG = wings_image:e3d_to_wxImage(EnvImgRec),
wxImage:rescale(SpecBG, 512, 256, [{quality, ?wxIMAGE_QUALITY_HIGH}]),
tone_map(SpecBG),
SBG0 = wings_image:wxImage_to_e3d(SpecBG),
SpecBG1 = wxImage:copy(SpecBG),
MMs = make_mipmaps(SpecBG1, 1, 256, 128),
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
SBG = SBG0#e3d_image{name="Fake Spec", extra=Opts},
SpecId = wings_image:new_hidden(env_spec_tex, SBG),
wxImage:rescale(SpecBG, 64, 32, [{quality, ?wxIMAGE_QUALITY_HIGH}]),
DiffBG = wxImage:blur(SpecBG, 10),
blur_edges(DiffBG),
DBG0 = wings_image:wxImage_to_e3d(DiffBG),
DBG = DBG0#e3d_image{name="Fake diffuse", extra=[{wrap, {repeat,repeat}},
{filter, {linear,linear}}]},
wxImage:destroy(SpecBG),
wxImage:destroy(DiffBG),
{ok, BrdfBin0} = file:read_file(filename:join(Path,"brdf_tab.bin")),
128*128*2 = byte_size(BrdfBin0),
BrdfBin = << << R,G,0 >> || << R,G >> <= BrdfBin0 >>,
OptsB = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
Brdf = #e3d_image{width=128,height=128,image=BrdfBin,extra=OptsB},
wings_image : debug_display(brdf , ) ,
wings_image : debug_display(spec , SBG ) ,
wings_image : debug_display(diff , ) ,
[{env_spec_tex, SpecId},
{env_diffuse_tex, wings_image:new_hidden(env_diffuse_tex, DBG)},
{brdf_tex, wings_image:new_hidden(brdf_tex, Brdf)}].
tone_map(Image) ->
RGB0 = wxImage:getData(Image),
RGB = << << (pixel_tonemap(R/256,G/256,B/256)):24 >> || <<R:8,G:8,B:8>> <= RGB0 >>,
wxImage:setData(Image, RGB).
pixel_tonemap(R0,G0,B0) ->
Lum = (1.0+(R0 * 0.2126 + G0 * 0.72152 + B0 * 0.0722)),
R = min(255, trunc(Lum * R0 * 255.0)),
G = min(255, trunc(Lum * G0 * 255.0)),
B = min(255, trunc(Lum * B0 * 255.0)),
(R bsl 16) bor (G bsl 8) bor B.
blur_edges(Image) ->
RGB0 = wxImage:getData(Image),
RowSz = wxImage:getWidth(Image)*3,
BlobSz = (wxImage:getHeight(Image)-2)*RowSz,
<<First0:RowSz/binary, Center:BlobSz/binary, Last0:RowSz/binary>> = RGB0,
First = blur_row(First0),
Last = blur_row(Last0),
RGB1 = <<First:RowSz/binary, Center:BlobSz/binary, Last:RowSz/binary>>,
RGB = << << (blur_edge(Row, RowSz))/binary >> || <<Row:RowSz/binary>> <= RGB1 >>,
wxImage:setData(Image, RGB).
-define(A(C0,C1,C2), (round((C0+C1+C2)/3))).
blur_row(Bin) ->
List = binary_to_list(Bin),
{R0,G0,B0} = average(List, 0,0,0,0),
blur_pixel(List, R0,G0,B0, <<>>).
average([R,G,B|Rest], R0,G0,B0,N) ->
average(Rest, R0+R,G0+G,B0+B,N+1);
average([], R0,G0,B0,N) ->
{R0 div N, G0 div N, B0 div N}.
blur_pixel([R,G,B|Rest], R0,G0,B0, Bin) ->
Acc = <<Bin/binary, ((R+R0) div 2):8, ((G+G0) div 2):8, ((B+B0) div 2):8>>,
blur_pixel(Rest, R0,G0,B0, Acc);
blur_pixel([], _R0,_G0,_B0, Bin) ->
Bin.
blur_edge(Row0, Bytes) ->
Skip = Bytes-18,
<<R0:8,G0:8,B0:8, R1:8,G1:8,B1:8, R2:8,G2:8,B2:8,
Bin:Skip/bytes,
R7:8,G7:8,B7:8, R8:8,G8:8,B8:8, R9:8,G9:8,B9:8>> = Row0,
R00 = ?A(R0,R1,R9), G00=?A(G0,G1,G9), B00=?A(B0,B1,B9),
R90 = ?A(R0,R8,R9), G90=?A(G0,G8,G9), B90=?A(B0,B8,B9),
R10 = ?A(R00,R1,R2), G10=?A(G00,G1,G2), B10=?A(B00,B1,B2),
R80 = ?A(R90,R8,R7), G80=?A(G90,G8,G7), B80=?A(B90,B8,B7),
R01 = ?A(R00,R10,R90), G01=?A(G00,G10,G90), B01=?A(B00,B10,B90),
R91 = ?A(R00,R80,R90), G91=?A(G00,G80,G90), B91=?A(B00,B80,B90),
<<R01:8,G01:8,B01:8,
R10:8,G10:8,B10:8,
R2:8,G2:8,B2:8,
Bin:Skip/bytes,
R7:8,G7:8,B7:8,
R80:8,G80:8,B80:8,
R91:8,G91:8,B91:8
>>.
make_mipmaps(Img0, Level, W, H) when Level < 6 ->
wxImage:rescale(Img0, W, H),
Img = wxImage:blur(Img0, 4),
wxImage:destroy(Img0),
Bin = wxImage:getData(Img),
# e3d_image{width = W , height = H , image = , order = upper_left ,
name="Fake Spec : " + + integer_to_list(Level ) } ) ,
[{Bin, W, H, Level} | make_mipmaps(Img, Level+1, W div 2, H div 2)];
make_mipmaps(Img, _, _, _) ->
wxImage:destroy(Img),
[].
make_envmap(CL, #e3d_image{filename=FileName}=EnvImgRec) ->
EnvIds =
case load_cached_envmap(FileName) of
[] ->
Cached = make_envmap_1(CL, EnvImgRec),
save_cached_envmap(FileName, Cached),
[TagId || {TagId,_} <- Cached];
Cached ->
Cached
end,
wings_cl:working(),
EnvIds.
make_envmap_1(CL, EnvImgRec0) ->
wings_pb:start(?__(1, "Building envmaps")),
EnvImgRec = e3d_image:convert(EnvImgRec0, r8g8b8a8, 1, lower_left),
wings_pb:update(0.1),
OrigImg = wings_cl:image(EnvImgRec, CL),
Buff0 = wings_cl:buff(2048*1024*4*4, [read_write], CL),
Buff1 = wings_cl:buff(2048*1024*4*4, [read_write], CL),
BrdfId = make_brdf(Buff0, 512, 512, CL),
wings_pb:update(0.5),
DiffId = make_diffuse(OrigImg, Buff0, Buff1, W, H, CL),
wings_pb:update(0.9),
SpecId = make_spec(OrigImg, Buff0, Buff1, 2048, 1024, CL),
wings_pb:done(),
cl:release_mem_object(OrigImg),
cl:release_mem_object(Buff0),
cl:release_mem_object(Buff1),
[DiffId,SpecId,BrdfId].
make_brdf(Buff, W, H, CL) ->
CC = wings_cl:cast(schlick_brdf, [Buff, W, H], [W,H], [], CL),
Read = wings_cl:read(Buff, W*H*4*2, [CC], CL),
{ok, BrdfData} = cl:wait(Read),
Img = << << (round(X*255)), (round(Y*255)), 0 >>
|| <<X:32/float-native, Y:32/float-native>> <= BrdfData >>,
wings_image : debug_display(brdf,#e3d_image{width = W , height = H , image = , name="BRDF " } ) ,
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(brdf_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{{brdf_tex, ImId}, Img}.
make_diffuse(OrigImg, Buff0, Buff1, W, H, CL) ->
Fill0 = wings_cl:fill(Buff0, <<0:(32*4)>>, W*H*4*4, CL),
Fill1 = wings_cl:fill(Buff1, <<0:(32*4)>>, W*H*4*4, CL),
{B0,B1,Pre} = cl_multipass(make_diffuse, [OrigImg, W, H], Buff0, Buff1, 0, 10,
[W,H], [Fill0, Fill1], CL),
CC = wings_cl:cast(color_convert, [B0,B1,W,H], [W,H], Pre, CL),
Read = wings_cl:read(B1, W*H*4*4, [CC], CL),
{ok, DiffData} = cl:wait(Read),
Img = << << (round(R*255)), (round(G*255)), (round(B*255)) >> ||
<<R:32/float-native, G:32/float-native, B:32/float-native, _:32>> <= DiffData >>,
wings_image : debug_display(1000+W,#e3d_image{width = W , height = H , image = , name="Diffuse " } ) ,
Opts = [{wrap, {repeat,repeat}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(env_diffuse_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{{env_diffuse_tex, ImId}, Img}.
make_spec(OrigImg, Buff0, Buff1, W0, H0, CL) ->
NoMipMaps = trunc(math:log2(min(W0,H0))),
[{Img,W0,H0,0}|MMs] = make_spec(0, NoMipMaps, OrigImg, Buff0, Buff1, W0, H0, CL),
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
? : ~p ~p = > ~w mipmaps ~ n",[W0,H0,length(MMs ) ] ) ,
ImId = wings_image:new_hidden(env_spec_tex, #e3d_image{width=W0,height=H0,image=Img,extra=Opts}),
{{env_spec_tex, ImId}, {Img,MMs}}.
make_spec(Level, Max, OrigImg, Buff0, Buff1, W, H, CL) when Level =< Max ->
Step = Level/Max,
Fill0 = wings_cl:fill(Buff0, <<0:(32*4)>>, W*H*4*4, CL),
Fill1 = wings_cl:fill(Buff1, <<0:(32*4)>>, W*H*4*4, CL),
{B0,B1,Pre} = cl_multipass(make_specular, [OrigImg, W, H, Step],
Buff0, Buff1, 0, 10, [W,H], [Fill0, Fill1], CL),
CC = wings_cl:cast(color_convert, [B0,B1,W,H], [W,H], Pre, CL),
Read = wings_cl:read(B1, W*H*4*4, [CC], CL),
{ok, SpecData} = cl:wait(Read),
Img = << << (round(R*255)), (round(G*255)), (round(B*255)) >> ||
<<R:32/float-native, G:32/float-native, B:32/float-native, _:32>> <= SpecData >>,
Level < 3 andalso
wings_image : debug_display(900 - Level , # e3d_image{width = W , height = H , image = ,
[{Img,W,H,Level} | make_spec(Level+1, Max, OrigImg, Buff0, Buff1, W div 2, H div 2, CL)];
make_spec(_Level, _Max, _OrigImg, _B0, _B1, _W, _H, _CL) ->
[].
save_cached_envmap(FileName0, Cached0) ->
FileName = env_map_cache_name(FileName0),
case file:open(FileName, [write, raw, binary]) of
{ok,File} ->
Cached = [{Tag,Img} || {{Tag,_},Img} <- Cached0],
Bin = term_to_binary(Cached),
file:write(File,Bin),
file:close(File);
_ ->
ok
end,
ok.
load_cached_envmap(FileName0) ->
FileName = env_map_cache_name(FileName0),
case filelib:is_file(FileName) of
true ->
case file:read_file(FileName) of
{ok,Bin} ->
Cached = binary_to_term(Bin),
[rebuild_cached_img(Buf) || Buf <- Cached];
_ ->
file:delete(FileName),
[]
end;
false -> []
end.
env_map_cache_name(FileName) ->
CacheName = filename:rootname(filename:basename(FileName))++".emc",
filename:join(wings_u:basedir(user_cache), CacheName).
rebuild_cached_img({Tag,Img}) ->
case Tag of
brdf_tex -> cached_brdf(Img, 512, 512);
env_diffuse_tex -> cached_diffuse(Img, 512, 256);
env_spec_tex -> cached_spec(Img, 2048, 1024)
end.
cached_brdf(Img, W, H) ->
Opts = [{wrap, {clamp,clamp}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(brdf_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{brdf_tex, ImId}.
cached_diffuse(Img, W, H) ->
Opts = [{wrap, {repeat,repeat}}, {filter, {linear, linear}}],
ImId = wings_image:new_hidden(env_diffuse_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{env_diffuse_tex, ImId}.
cached_spec({Img,MMs}, W, H) ->
Opts = [{wrap, {repeat,repeat}}, {filter, {mipmap, linear}}, {mipmaps, MMs}],
ImId = wings_image:new_hidden(env_spec_tex, #e3d_image{width=W,height=H,image=Img,extra=Opts}),
{env_spec_tex, ImId}.
cl_multipass(Kernel, Args, Buff0, Buff1, N, Tot, No, Wait, CL) when N < Tot ->
Next = wings_cl:cast(Kernel, Args ++ [Buff0, Buff1, N, Tot], No, Wait, CL),
cl_multipass(Kernel, Args, Buff1, Buff0, N+1, Tot, No, [Next], CL);
cl_multipass(_Kernel, _Args, Buff0, Buff1, _N, _Tot, _No, Wait, _CL) ->
{Buff0, Buff1, Wait}.
cl_setup(Recompile) ->
case ?GET(opencl) of
undefined ->
case wings_cl:is_available(true) of
true ->
try cl_setup_1()
catch _:Reason:ST ->
io:format("CL setup error: ~p ~p~n",
[Reason, ST]),
{error, no_openCL}
end;
false -> {error, no_openCL}
end;
CL0 when Recompile ->
try
CL = wings_cl:compile("img_lib.cl", CL0),
?SET(opencl, CL),
CL
catch _:Reason:ST ->
io:format("CL compile error: ~p ~p~n",
[Reason, ST]),
CL0
end;
CL ->
CL
end.
cl_setup_1() ->
CL0 = wings_cl:setup(),
case wings_cl:have_image_support(CL0) of
true ->
CL = wings_cl:compile("img_lib.cl", CL0),
?SET(opencl, CL),
CL;
false ->
?SET(opencl, CL0),
{error, no_openCL_image}
end.
light_types() ->
[{?__(1,"Infinite"),infinite,
?__(2,"Create a far-away, directional light (like the sun)")},
{?__(3,"Point"),point,
?__(4,"Create a light that radiates light in every direction")},
{?__(5,"Spot"),spot,
?__(6,"Create a spotlight")},
{?__(7,"Ambient"),ambient,
?__(8,"Create an ambient light source")},
{?__(9,"Area"),area,
?__(10,"Create an area that radiates light")}].
menu(X, Y, St) ->
SpotOnly = {iff,[spot]},
NotAmb = {iff,[spot,infinite,point,area]},
One = one_light,
Dir = wings_menu_util:directions(St#st{selmode=body}),
Menu0 = [{?__(2,"Move"),{move_light,Dir}},
{NotAmb,separator},
{NotAmb,{?__(3,"Position Highlight"),
{'VALUE',{position_highlight,{'ASK',{[point],[]}}}},
?__(4,"Position the aim point or location of light")}},
{NotAmb,{?__(5,"Color"),color,
?__(6,"Interactively adjust hue, value, and saturation")}},
{NotAmb,
{?__(7,"Attenuation"),
{attenuation,
[{?__(8,"Linear"),linear,
?__(9,"Interactively adjust how much light weakens as it travels away from its source (linear factor)")},
{?__(10,"Quadratic"),quadratic,
?__(11,"Interactively adjust how much light weakens as it travels away from its source (quadratic factor)")}]}}},
{SpotOnly,separator},
{SpotOnly,{?__(12,"Spot Angle"),spot_angle,
?__(13,"Interactively adjust the angle of the spotlight cone")}},
{SpotOnly,{?__(14,"Spot Falloff"),spot_falloff,
?__(15,"Interactively adjust how much light weakens farther away from the center of the spotlight cone")}},
{One,separator},
{One,{?__(16,"Edit Properties..."),edit,
?__(17,"Edit light properties")}}|body_menu(Dir, St)],
Menu = filter_menu(Menu0, St),
wings_menu:popup_menu(X, Y, light, Menu).
body_menu(Dir, #st{selmode=body}) ->
[separator,
{?STR(menu,18,"Duplicate"),{duplicate,Dir},
?STR(menu,19,"Duplicate and move selected lights")},
{?STR(menu,20,"Delete"),delete,
?STR(menu,21,"Delete selected lights")}];
body_menu(_, _) -> [].
filter_menu(Menu, St) ->
MF = fun(_, #we{light=#light{type=Type}}) -> Type;
(_, #we{}) -> not_light
end,
RF = fun(Type, []) -> Type;
(Type, Type) -> Type;
(_, _) -> mixed
end,
T = wings_sel:dfold(MF, RF, [], St),
foldr(fun({one_light,_}, A) when T =:= mixed -> A;
({one_light,Entry}, A) -> [Entry|A];
({{iff,[_|_]=Types},Entry}, A) ->
case member(T, Types) of
true -> [Entry|A];
false -> A
end;
(Entry, A) -> [Entry|A]
end, [], Menu).
|
775bb869626e9ef6d905d1edae97b1399cd7001d1a5c9335c2c5d7ee2ff19ac7 | hzafar/pcf-interpreter | pcf-macro-tests.rkt | #lang racket
(require
"../../pcf-dynamics.rkt"
"../pcf-macro-forms.rkt"
rackunit
rackunit/log)
(check-equal? (val? z) #t)
(check-equal? (val? (succ (succ (succ z)))) #t)
(check-equal? (val? (lam x (succ (succ x)))) #t)
(check-equal? (val? (ap (lam x (succ x)) (succ z))) #f)
(check-equal? (step z) 'z)
(check-equal? (step (succ (succ z))) (succ (succ z)))
(check-equal? (step (ifz z z (lam x (succ x)))) z)
(check-equal? (step (ifz (succ z) z (lam x (succ x)))) (succ z))
(check-equal? (step (ap (lam x x) z)) z)
;; Simple recursion test
(define recursive (fix y (lam x (ifz x z (lam w (succ (ap y w)))))))
(step (ap recursive z))
(step (step (ap recursive z)))
(step (step (step (ap recursive z))))
(check-equal? (evaluate (ap recursive z)) z)
(step (ap recursive (succ z)))
(step (step (ap recursive (succ z))))
(step (step (step (ap recursive (succ z)))))
(step (step (step (step (ap recursive (succ z))))))
(step (step (step (step (step (ap recursive (succ z)))))))
(step (step (step (step (step (step (ap recursive (succ z))))))))
(check-equal? (evaluate (ap recursive (succ z))) (succ z))
(test-log #:display? #t #:exit? #t) | null | https://raw.githubusercontent.com/hzafar/pcf-interpreter/313587b2a9b7582c894f6d9953f53cf9cc7cdd67/impl2/tests/pcf-macro-tests.rkt | racket | Simple recursion test | #lang racket
(require
"../../pcf-dynamics.rkt"
"../pcf-macro-forms.rkt"
rackunit
rackunit/log)
(check-equal? (val? z) #t)
(check-equal? (val? (succ (succ (succ z)))) #t)
(check-equal? (val? (lam x (succ (succ x)))) #t)
(check-equal? (val? (ap (lam x (succ x)) (succ z))) #f)
(check-equal? (step z) 'z)
(check-equal? (step (succ (succ z))) (succ (succ z)))
(check-equal? (step (ifz z z (lam x (succ x)))) z)
(check-equal? (step (ifz (succ z) z (lam x (succ x)))) (succ z))
(check-equal? (step (ap (lam x x) z)) z)
(define recursive (fix y (lam x (ifz x z (lam w (succ (ap y w)))))))
(step (ap recursive z))
(step (step (ap recursive z)))
(step (step (step (ap recursive z))))
(check-equal? (evaluate (ap recursive z)) z)
(step (ap recursive (succ z)))
(step (step (ap recursive (succ z))))
(step (step (step (ap recursive (succ z)))))
(step (step (step (step (ap recursive (succ z))))))
(step (step (step (step (step (ap recursive (succ z)))))))
(step (step (step (step (step (step (ap recursive (succ z))))))))
(check-equal? (evaluate (ap recursive (succ z))) (succ z))
(test-log #:display? #t #:exit? #t) |
109cebd075b79cbf910c1bb1564b9518bcaf96c6cb138d0f09d13af87b860899 | mlabs-haskell/ogmios-datum-cache | Babbage.hs | module Block.Types.Babbage (
TxOut (..),
Transaction (..),
RawTransaction (..),
BlockHeader (..),
Block (..),
datumsInTxOut,
datumsInTransaction,
) where
import Data.Aeson (FromJSON (parseJSON), withObject, (.:), (.:?))
import Data.ByteString (ByteString)
import Data.Int (Int64)
import Data.Map (Map)
import Data.Map qualified as Map
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import DataHash (DataHash (DataHash))
data TxOut = TxOut
{ address :: Text
, datumHash :: Maybe DataHash
, datum :: Maybe Text
}
deriving stock (Eq, Show)
instance FromJSON TxOut where
parseJSON = withObject "TxOut" $ \o -> do
TxOut
<$> o .: "address"
<*> ((DataHash <$>) <$> o .:? "datumHash")
<*> o .:? "datum"
data Transaction = Transaction
{ datums :: Map DataHash Text
, outputs :: [TxOut]
}
deriving stock (Eq, Show)
instance FromJSON Transaction where
parseJSON = withObject "Transaction" $ \o -> do
witness <- o .: "witness"
datums <- witness .: "datums"
body <- o .: "body"
outputs <- body .: "outputs"
pure $ Transaction datums outputs
data RawTransaction = RawTransaction
{ txId :: Text
, rawTx :: ByteString
}
deriving stock (Eq, Show)
instance FromJSON RawTransaction where
parseJSON = withObject "RawTransaction" $ \v -> do
RawTransaction
<$> v .: "id"
<*> (encodeUtf8 <$> v .: "raw")
data BlockHeader = BlockHeader
{ slot :: Int64
, blockHash :: Text
}
deriving stock (Eq, Show)
instance FromJSON BlockHeader where
parseJSON = withObject "BlockHeader" $ \o ->
BlockHeader
<$> o .: "slot"
<*> o .: "blockHash"
data Block = Block
{ body :: [Transaction]
, rawTransactions :: [RawTransaction]
, header :: BlockHeader
, headerHash :: Text
}
deriving stock (Eq, Show)
instance FromJSON Block where
parseJSON = withObject "Block" $ \v ->
Block
<$> v .: "body"
<*> v .: "body"
<*> v .: "header"
<*> v .: "headerHash"
datumsInTxOut :: TxOut -> Map DataHash Text
datumsInTxOut txOut
| Just dh <- txOut.datumHash
, Just d <- txOut.datum =
Map.singleton dh d
| otherwise = mempty
datumsInTransaction :: Transaction -> Map DataHash Text
datumsInTransaction tx = tx.datums <> foldMap datumsInTxOut tx.outputs
| null | https://raw.githubusercontent.com/mlabs-haskell/ogmios-datum-cache/201891d9843c951f95bb3ae3c812b8799384bc71/src/Block/Types/Babbage.hs | haskell | module Block.Types.Babbage (
TxOut (..),
Transaction (..),
RawTransaction (..),
BlockHeader (..),
Block (..),
datumsInTxOut,
datumsInTransaction,
) where
import Data.Aeson (FromJSON (parseJSON), withObject, (.:), (.:?))
import Data.ByteString (ByteString)
import Data.Int (Int64)
import Data.Map (Map)
import Data.Map qualified as Map
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import DataHash (DataHash (DataHash))
data TxOut = TxOut
{ address :: Text
, datumHash :: Maybe DataHash
, datum :: Maybe Text
}
deriving stock (Eq, Show)
instance FromJSON TxOut where
parseJSON = withObject "TxOut" $ \o -> do
TxOut
<$> o .: "address"
<*> ((DataHash <$>) <$> o .:? "datumHash")
<*> o .:? "datum"
data Transaction = Transaction
{ datums :: Map DataHash Text
, outputs :: [TxOut]
}
deriving stock (Eq, Show)
instance FromJSON Transaction where
parseJSON = withObject "Transaction" $ \o -> do
witness <- o .: "witness"
datums <- witness .: "datums"
body <- o .: "body"
outputs <- body .: "outputs"
pure $ Transaction datums outputs
data RawTransaction = RawTransaction
{ txId :: Text
, rawTx :: ByteString
}
deriving stock (Eq, Show)
instance FromJSON RawTransaction where
parseJSON = withObject "RawTransaction" $ \v -> do
RawTransaction
<$> v .: "id"
<*> (encodeUtf8 <$> v .: "raw")
data BlockHeader = BlockHeader
{ slot :: Int64
, blockHash :: Text
}
deriving stock (Eq, Show)
instance FromJSON BlockHeader where
parseJSON = withObject "BlockHeader" $ \o ->
BlockHeader
<$> o .: "slot"
<*> o .: "blockHash"
data Block = Block
{ body :: [Transaction]
, rawTransactions :: [RawTransaction]
, header :: BlockHeader
, headerHash :: Text
}
deriving stock (Eq, Show)
instance FromJSON Block where
parseJSON = withObject "Block" $ \v ->
Block
<$> v .: "body"
<*> v .: "body"
<*> v .: "header"
<*> v .: "headerHash"
datumsInTxOut :: TxOut -> Map DataHash Text
datumsInTxOut txOut
| Just dh <- txOut.datumHash
, Just d <- txOut.datum =
Map.singleton dh d
| otherwise = mempty
datumsInTransaction :: Transaction -> Map DataHash Text
datumsInTransaction tx = tx.datums <> foldMap datumsInTxOut tx.outputs
|
|
9e21978f97d02da5cf419451710fcf35d4ebf94b32b984b3a182439c6f5c2381 | rjray/advent-2020-clojure | day14_test.clj | (ns advent-of-code.day14-test
(:require [clojure.test :refer [deftest testing is]]
[advent-of-code.day14 :refer [part-1 part-2]]
[clojure.java.io :refer [resource]]))
(deftest part1
(let [expected 165]
(is (= expected (part-1 (slurp (resource "day14-example.txt")))))))
(deftest part2
(let [expected nil]
(is (= expected (part-2 (slurp (resource "day14-example.txt")))))))
| null | https://raw.githubusercontent.com/rjray/advent-2020-clojure/631b36545ae1efdebd11ca3dd4dca032346e8601/test/advent_of_code/day14_test.clj | clojure | (ns advent-of-code.day14-test
(:require [clojure.test :refer [deftest testing is]]
[advent-of-code.day14 :refer [part-1 part-2]]
[clojure.java.io :refer [resource]]))
(deftest part1
(let [expected 165]
(is (= expected (part-1 (slurp (resource "day14-example.txt")))))))
(deftest part2
(let [expected nil]
(is (= expected (part-2 (slurp (resource "day14-example.txt")))))))
|
|
e9fb81fb5d58b08e89db2a83d86c4c80cc7a89041f310719ec3340545c36c8f3 | holyjak/minimalist-fulcro-template-backendless | mock_server.cljs | (ns com.example.mock-server
"A mock remote for Fulcro that talks to the in-browser Pathom parser
Inspired heavily by -developer-guide/blob/master/src/book/book/pathom.cljs"
(:require
[com.example.pathom :as pathom]
[com.fulcrologic.fulcro.algorithms.tx-processing :as txn]
[com.fulcrologic.fulcro.networking.mock-server-remote :refer [mock-http-server]]))
(defn mock-remote
"A remote in Fulcro is just a map with a `:transmit!` key"
([env]
(let [parser (pathom/new-parser)
transmit! (:transmit! (mock-http-server {:parser (fn [req] (parser env req))}))]
{:transmit! (fn [this send-node]
(js/setTimeout ; simulate some network delay, for fun
#(transmit! this send-node
(update send-node
::txn/result-handler
(fn [handler]
(fn logging-wrapper [res] (println "MOCK SERVER RESULT>" res)
(handler res)))))
100))}))
([]
(mock-remote {})))
| null | https://raw.githubusercontent.com/holyjak/minimalist-fulcro-template-backendless/be1630623191d847e14e8854649d58604ca95f5c/src/com/example/mock_server.cljs | clojure | simulate some network delay, for fun | (ns com.example.mock-server
"A mock remote for Fulcro that talks to the in-browser Pathom parser
Inspired heavily by -developer-guide/blob/master/src/book/book/pathom.cljs"
(:require
[com.example.pathom :as pathom]
[com.fulcrologic.fulcro.algorithms.tx-processing :as txn]
[com.fulcrologic.fulcro.networking.mock-server-remote :refer [mock-http-server]]))
(defn mock-remote
"A remote in Fulcro is just a map with a `:transmit!` key"
([env]
(let [parser (pathom/new-parser)
transmit! (:transmit! (mock-http-server {:parser (fn [req] (parser env req))}))]
{:transmit! (fn [this send-node]
#(transmit! this send-node
(update send-node
::txn/result-handler
(fn [handler]
(fn logging-wrapper [res] (println "MOCK SERVER RESULT>" res)
(handler res)))))
100))}))
([]
(mock-remote {})))
|
456fcd6458cf0d2c14d22ae9c4916c3e3fc46e2f324fc7a2bd3128e259aa4366 | uwiger/unsplit | unsplit_SUITE.erl | %%% @doc
%%% Test suite for unsplit
B.V.
@author < >
-module(unsplit_SUITE).
%%% @end
-include_lib("eunit/include/eunit.hrl").
%%% Include files
-include_lib("common_test/include/ct.hrl").
%%% External exports
-compile(export_all).
%% -define(ERL_FLAGS, "-kernel dist_auto_connect once -pa ../../ -pa ../../../ebin/").
-define(ERL_FLAGS, "-kernel dist_auto_connect once").
-define(TABLE, test1).
-define(NODES, ['mn1@localhost', 'mn2@localhost']).
-define(DISCONNECT_TIME, 4000).
-define(UNSPLIT_TIMEOUT, 5000).
-record(?TABLE,{key,modified=erlang:now(),value}).
Macros
all() ->
[split1].
init_per_suite(Conf) ->
Nodes = ct:get_config(nodes, ?NODES),
DisconnectTime = ct:get_config(disconnect_time, ?DISCONNECT_TIME),
UnsplitTimeout = ct:get_config(unsplit_timeout, ?UNSPLIT_TIMEOUT),
Host = get_host(),
ErlFlags = lists:flatten([?ERL_FLAGS,
get_path_flags(),
" -pa ", filename:absname(
filename:dirname(code:which(?MODULE)))]),
ct:print("ErlFlags = ~p~n", [ErlFlags]),
StartNode = fun(Node)->
ct:print("starting node ~p, on host ~p ~n",[Node, Host]),
{ok, NodeName} = ct_slave:start(Host, Node,
[{erl_flags, ErlFlags}]),
NodeName
end,
NodeNames = lists:map(StartNode, Nodes),
[{disconnect_time, DisconnectTime},
{unsplit_timeout, UnsplitTimeout},
{nodes, NodeNames}|Conf].
end_per_suite(_Conf) ->
Nodes = ct:get_config(nodes,?NODES),
Host = get_host(),
StopNode = fun(Node)->
{ok, _NodeName} = ct_slave:stop(Host, Node)
end,
lists:map(StopNode, Nodes),
ok.
init_per_testcase(Case, Conf) ->
ct:print("Test case ~p started", [Case]),
init_nodes(get_conf(nodes, Conf)),
Conf.
end_per_testcase(Case, Conf) ->
ct:print("Test case ~p finished", [Case]),
terminate_nodes(get_conf(nodes, Conf)),
Conf.
split1()->
[{userdata, [{doc, "Tests split network of 2 nodes"}]}].
split1(Conf)->
DisconnectTime = get_conf(disconnect_time, Conf),
UnsplitTimeout = get_conf(unsplit_timeout, Conf),
Nodes = [M, S|_Rest] = get_conf(nodes, Conf),
ct:print("Initial table size~n"),
print_table_size(Nodes, ?TABLE),
ct:print("inserting records~n"),
{atomic, ok} = write(M, [#?TABLE{key=1, value=a}, #?TABLE{key=2, value=a}]),
print_table_size(Nodes, ?TABLE),
ct:print("disconnecting nodes~n"),
disconnect(M, S),
ct:print("inserting records on one node, while the other one is disconnected~n"),
{atomic, ok} = write(M, [#?TABLE{key=3, value=b}, #?TABLE{key=4, value=b}]),
print_table_size(Nodes, ?TABLE),
timer:sleep(DisconnectTime),
ct:print("reconnecting nodes~n"),
connect(S, M),
timer:sleep(UnsplitTimeout),
print_table_size(Nodes, ?TABLE),
true = compare_table_size(Nodes, ?TABLE).
compare_table_size([Node1, Node2|_], Table)->
table_size(Node1, Table) == table_size(Node2, Table).
table_size(Node, Table)->
rpc:call(Node, mnesia, table_info,[Table, size]).
print_table_size([M,S|_], Table)->
ct:print("master size = ~p~n",[table_size(M, Table)]),
ct:print("slave size = ~p~n",[table_size(S, Table)]).
get_conf(Key, Conf)->
proplists:get_value(Key, Conf).
terminate_nodes(Nodes)->
Terminate = fun(Node)->
rpc:call(Node, application, stop, [unsplit]),
rpc:call(Node, mnesia, stop,[]),
rpc:call(Node, application, stop, [sasl])
end,
lists:foreach(Terminate, Nodes).
init_nodes(Nodes)->
Init = fun(Node)->
rpc:call(Node, mnesia, delete_schema,[Node]),
rpc:call(Node, application, start, [sasl]),
rpc:call(Node, mnesia, start,[]),
rpc:call(Node, application, start, [unsplit]),
rpc:call(Node, mnesia, create_schema, [Nodes]),
rpc:call(Node, mnesia, change_config, [extra_db_nodes, Nodes--[Node]]),
rpc:call(Node, mnesia, delete_table, [?TABLE]),
rpc:call(Node, mnesia, create_table, [?TABLE,
[{ram_copies,Nodes},
{attributes,[key,modified,value]},
{user_properties,
[{unsplit_method,{unsplit_lib,last_modified,[]}}]}]])
end,
lists:foreach(Init, Nodes).
disconnect(Master, Slave)->
rpc:call(Master, erlang, disconnect_node, [Slave]).
connect(Master, Slave)->
rpc:call(Master, net_kernel, connect_node, [Slave]).
write(Node, Records)->
rpc:call(Node, ?MODULE, write, [Records]).
write(Records)->
Trans = fun()->
lists:foreach(fun(Record)->
mnesia:write(Record)
end, Records)
end,
mnesia:transaction(Trans).
get_host()->
[_, H] = re:split(atom_to_list(node()),"@",[{return,list}]),
list_to_atom(H).
%% {ok, HostS} = inet:gethostname(),
%% list_to_atom(HostS).
get_path_flags() ->
[ [[" -",atom_to_list(K)," ",D] || D <- V]
|| {K,V} <- init:get_arguments(),
K == pa orelse K == pz].
| null | https://raw.githubusercontent.com/uwiger/unsplit/43febfcdb56c5ad5d7a3cfa0c260d9fc25026909/test/unsplit_SUITE.erl | erlang | @doc
Test suite for unsplit
@end
Include files
External exports
-define(ERL_FLAGS, "-kernel dist_auto_connect once -pa ../../ -pa ../../../ebin/").
{ok, HostS} = inet:gethostname(),
list_to_atom(HostS). | B.V.
@author < >
-module(unsplit_SUITE).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-compile(export_all).
-define(ERL_FLAGS, "-kernel dist_auto_connect once").
-define(TABLE, test1).
-define(NODES, ['mn1@localhost', 'mn2@localhost']).
-define(DISCONNECT_TIME, 4000).
-define(UNSPLIT_TIMEOUT, 5000).
-record(?TABLE,{key,modified=erlang:now(),value}).
Macros
all() ->
[split1].
init_per_suite(Conf) ->
Nodes = ct:get_config(nodes, ?NODES),
DisconnectTime = ct:get_config(disconnect_time, ?DISCONNECT_TIME),
UnsplitTimeout = ct:get_config(unsplit_timeout, ?UNSPLIT_TIMEOUT),
Host = get_host(),
ErlFlags = lists:flatten([?ERL_FLAGS,
get_path_flags(),
" -pa ", filename:absname(
filename:dirname(code:which(?MODULE)))]),
ct:print("ErlFlags = ~p~n", [ErlFlags]),
StartNode = fun(Node)->
ct:print("starting node ~p, on host ~p ~n",[Node, Host]),
{ok, NodeName} = ct_slave:start(Host, Node,
[{erl_flags, ErlFlags}]),
NodeName
end,
NodeNames = lists:map(StartNode, Nodes),
[{disconnect_time, DisconnectTime},
{unsplit_timeout, UnsplitTimeout},
{nodes, NodeNames}|Conf].
end_per_suite(_Conf) ->
Nodes = ct:get_config(nodes,?NODES),
Host = get_host(),
StopNode = fun(Node)->
{ok, _NodeName} = ct_slave:stop(Host, Node)
end,
lists:map(StopNode, Nodes),
ok.
init_per_testcase(Case, Conf) ->
ct:print("Test case ~p started", [Case]),
init_nodes(get_conf(nodes, Conf)),
Conf.
end_per_testcase(Case, Conf) ->
ct:print("Test case ~p finished", [Case]),
terminate_nodes(get_conf(nodes, Conf)),
Conf.
split1()->
[{userdata, [{doc, "Tests split network of 2 nodes"}]}].
split1(Conf)->
DisconnectTime = get_conf(disconnect_time, Conf),
UnsplitTimeout = get_conf(unsplit_timeout, Conf),
Nodes = [M, S|_Rest] = get_conf(nodes, Conf),
ct:print("Initial table size~n"),
print_table_size(Nodes, ?TABLE),
ct:print("inserting records~n"),
{atomic, ok} = write(M, [#?TABLE{key=1, value=a}, #?TABLE{key=2, value=a}]),
print_table_size(Nodes, ?TABLE),
ct:print("disconnecting nodes~n"),
disconnect(M, S),
ct:print("inserting records on one node, while the other one is disconnected~n"),
{atomic, ok} = write(M, [#?TABLE{key=3, value=b}, #?TABLE{key=4, value=b}]),
print_table_size(Nodes, ?TABLE),
timer:sleep(DisconnectTime),
ct:print("reconnecting nodes~n"),
connect(S, M),
timer:sleep(UnsplitTimeout),
print_table_size(Nodes, ?TABLE),
true = compare_table_size(Nodes, ?TABLE).
compare_table_size([Node1, Node2|_], Table)->
table_size(Node1, Table) == table_size(Node2, Table).
table_size(Node, Table)->
rpc:call(Node, mnesia, table_info,[Table, size]).
print_table_size([M,S|_], Table)->
ct:print("master size = ~p~n",[table_size(M, Table)]),
ct:print("slave size = ~p~n",[table_size(S, Table)]).
get_conf(Key, Conf)->
proplists:get_value(Key, Conf).
terminate_nodes(Nodes)->
Terminate = fun(Node)->
rpc:call(Node, application, stop, [unsplit]),
rpc:call(Node, mnesia, stop,[]),
rpc:call(Node, application, stop, [sasl])
end,
lists:foreach(Terminate, Nodes).
init_nodes(Nodes)->
Init = fun(Node)->
rpc:call(Node, mnesia, delete_schema,[Node]),
rpc:call(Node, application, start, [sasl]),
rpc:call(Node, mnesia, start,[]),
rpc:call(Node, application, start, [unsplit]),
rpc:call(Node, mnesia, create_schema, [Nodes]),
rpc:call(Node, mnesia, change_config, [extra_db_nodes, Nodes--[Node]]),
rpc:call(Node, mnesia, delete_table, [?TABLE]),
rpc:call(Node, mnesia, create_table, [?TABLE,
[{ram_copies,Nodes},
{attributes,[key,modified,value]},
{user_properties,
[{unsplit_method,{unsplit_lib,last_modified,[]}}]}]])
end,
lists:foreach(Init, Nodes).
disconnect(Master, Slave)->
rpc:call(Master, erlang, disconnect_node, [Slave]).
connect(Master, Slave)->
rpc:call(Master, net_kernel, connect_node, [Slave]).
write(Node, Records)->
rpc:call(Node, ?MODULE, write, [Records]).
write(Records)->
Trans = fun()->
lists:foreach(fun(Record)->
mnesia:write(Record)
end, Records)
end,
mnesia:transaction(Trans).
get_host()->
[_, H] = re:split(atom_to_list(node()),"@",[{return,list}]),
list_to_atom(H).
get_path_flags() ->
[ [[" -",atom_to_list(K)," ",D] || D <- V]
|| {K,V} <- init:get_arguments(),
K == pa orelse K == pz].
|
e5b653bb223ae46cdb2b6b729adc5a73f6159f7499e62c591aeebc6502a896a0 | expipiplus1/vulkan | Extends.hs | {-# language CPP #-}
-- No documentation found for Chapter "Extends"
module OpenXR.CStruct.Extends ( BaseInStructure(..)
, BaseOutStructure(..)
, Extends
, PeekChain(..)
, PokeChain(..)
, Chain
, Extendss
, SomeStruct(..)
, extendSomeStruct
, withSomeStruct
, withSomeCStruct
, peekSomeCStruct
, pokeSomeCStruct
, forgetExtensions
, Extensible(..)
, pattern (::&)
, pattern (:&)
, SomeChild(..)
, withSomeChild
, lowerChildPointer
, Inherits
, Inheritable(..)
) where
import Data.Maybe (fromMaybe)
import Type.Reflection (typeRep)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.Base (join)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import OpenXR.CStruct (FromCStruct)
import OpenXR.CStruct (FromCStruct(..))
import OpenXR.CStruct (ToCStruct)
import OpenXR.CStruct (ToCStruct(..))
import OpenXR.Zero (Zero(..))
import Data.Proxy (Proxy(Proxy))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (Ptr)
import GHC.TypeLits (ErrorMessage(..))
import GHC.TypeLits (TypeError)
import Data.Kind (Constraint)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import {-# SOURCE #-} OpenXR.Core10.Input (ActionCreateInfo)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionSetCreateInfo)
import {-# SOURCE #-} OpenXR.Core10.Space (ActionSpaceCreateInfo)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionStateBoolean)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionStateFloat)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionStateGetInfo)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionStatePose)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionStateVector2f)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionSuggestedBinding)
import {-# SOURCE #-} OpenXR.Core10.Input (ActionsSyncInfo)
import {-# SOURCE #-} OpenXR.Core10.Input (ActiveActionSet)
import {-# SOURCE #-} OpenXR.Core10.Instance (ApiLayerProperties)
import {-# SOURCE #-} OpenXR.Core10.Instance (ApplicationInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_binding_modification (BindingModificationBaseHeaderKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_binding_modification (BindingModificationsKHR)
import {-# SOURCE #-} OpenXR.Core10.Input (BoundSourcesForActionEnumerateInfo)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (Color4f)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (CompositionLayerBaseHeader)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_color_scale_bias (CompositionLayerColorScaleBiasKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_cube (CompositionLayerCubeKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_cylinder (CompositionLayerCylinderKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_depth (CompositionLayerDepthInfoKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_equirect2 (CompositionLayerEquirect2KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_composition_layer_equirect (CompositionLayerEquirectKHR)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (CompositionLayerProjection)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (CompositionLayerProjectionView)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (CompositionLayerQuad)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_controller_model (ControllerModelKeyStateMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_controller_model (ControllerModelNodePropertiesMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_controller_model (ControllerModelNodeStateMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_controller_model (ControllerModelPropertiesMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_controller_model (ControllerModelStateMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_debug_utils (DebugUtilsLabelEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_debug_utils (DebugUtilsMessengerCallbackDataEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_debug_utils (DebugUtilsMessengerCreateInfoEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_debug_utils (DebugUtilsObjectNameInfoEXT)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataBaseHeader)
import {-# SOURCE #-} OpenXR.Core10.Instance (EventDataBuffer)
import {-# SOURCE #-} OpenXR.Extensions.XR_FB_display_refresh_rate (EventDataDisplayRefreshRateChangedFB)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataEventsLost)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataInstanceLossPending)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataInteractionProfileChanged)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXTX_overlay (EventDataMainSessionVisibilityChangedEXTX)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_performance_settings (EventDataPerfSettingsEXT)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataReferenceSpaceChangePending)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (EventDataSessionStateChanged)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_visibility_mask (EventDataVisibilityMaskChangedKHR)
import {-# SOURCE #-} OpenXR.Core10.Instance (ExtensionProperties)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Extent2Df)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Extent2Di)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_eye_gaze_interaction (EyeGazeSampleTimeEXT)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (Fovf)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (FrameBeginInfo)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (FrameEndInfo)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (FrameState)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (FrameWaitInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D11_enable (GraphicsBindingD3D11KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D12_enable (GraphicsBindingD3D12KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_MNDX_egl_enable (GraphicsBindingEGLMNDX)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_es_enable (GraphicsBindingOpenGLESAndroidKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (GraphicsBindingOpenGLWaylandKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (GraphicsBindingOpenGLWin32KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (GraphicsBindingOpenGLXcbKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (GraphicsBindingOpenGLXlibKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable (GraphicsBindingVulkanKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D11_enable (GraphicsRequirementsD3D11KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D12_enable (GraphicsRequirementsD3D12KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_es_enable (GraphicsRequirementsOpenGLESKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (GraphicsRequirementsOpenGLKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable (GraphicsRequirementsVulkanKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandJointLocationEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandJointLocationsEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandJointVelocitiesEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandJointVelocityEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandJointsLocateInfoEXT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshIndexBufferMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshSpaceCreateInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshUpdateInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshVertexBufferMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandMeshVertexMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (HandPoseTypeInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (HandTrackerCreateInfoEXT)
import {-# SOURCE #-} OpenXR.Core10.Haptics (HapticActionInfo)
import {-# SOURCE #-} OpenXR.Core10.Haptics (HapticBaseHeader)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (HapticVibration)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_holographic_window_attachment (HolographicWindowAttachmentMSFT)
import {-# SOURCE #-} OpenXR.Core10.Input (InputSourceLocalizedNameGetInfo)
import {-# SOURCE #-} OpenXR.Core10.Instance (InstanceCreateInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_android_create_instance (InstanceCreateInfoAndroidKHR)
import {-# SOURCE #-} OpenXR.Core10.Instance (InstanceProperties)
import {-# SOURCE #-} OpenXR.Extensions.XR_VALVE_analog_threshold (InteractionProfileAnalogThresholdVALVE)
import {-# SOURCE #-} OpenXR.Core10.Input (InteractionProfileState)
import {-# SOURCE #-} OpenXR.Core10.Input (InteractionProfileSuggestedBinding)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_loader_init_android (LoaderInitInfoAndroidKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_loader_init (LoaderInitInfoBaseHeaderKHR)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Offset2Df)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Offset2Di)
import {-# SOURCE #-} OpenXR.Core10.Space (Posef)
import {-# SOURCE #-} OpenXR.Core10.Space (Quaternionf)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Rect2Df)
import {-# SOURCE #-} OpenXR.Core10.FundamentalTypes (Rect2Di)
import {-# SOURCE #-} OpenXR.Core10.Space (ReferenceSpaceCreateInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationFrameEndInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationFrameStateMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationLayerInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationSessionBeginInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationStateMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_secondary_view_configuration (SecondaryViewConfigurationSwapchainCreateInfoMSFT)
import {-# SOURCE #-} OpenXR.Core10.Input (SessionActionSetsAttachInfo)
import {-# SOURCE #-} OpenXR.Core10.Session (SessionBeginInfo)
import {-# SOURCE #-} OpenXR.Core10.Device (SessionCreateInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXTX_overlay (SessionCreateInfoOverlayEXTX)
import {-# SOURCE #-} OpenXR.Core10.Space (SpaceLocation)
import {-# SOURCE #-} OpenXR.Core10.Space (SpaceVelocity)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_spatial_anchor (SpatialAnchorCreateInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_spatial_anchor (SpatialAnchorSpaceCreateInfoMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_spatial_graph_bridge (SpatialGraphNodeSpaceCreateInfoMSFT)
import OpenXR.Core10.Enums.StructureType (StructureType)
import OpenXR.Core10.Enums.StructureType (StructureType(..))
import {-# SOURCE #-} OpenXR.Core10.Image (SwapchainCreateInfo)
import {-# SOURCE #-} OpenXR.Core10.Image (SwapchainImageAcquireInfo)
import {-# SOURCE #-} OpenXR.Core10.Image (SwapchainImageBaseHeader)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D11_enable (SwapchainImageD3D11KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_D3D12_enable (SwapchainImageD3D12KHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_es_enable (SwapchainImageOpenGLESKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_opengl_enable (SwapchainImageOpenGLKHR)
import {-# SOURCE #-} OpenXR.Core10.Image (SwapchainImageReleaseInfo)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable (SwapchainImageVulkanKHR)
import {-# SOURCE #-} OpenXR.Core10.Image (SwapchainImageWaitInfo)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (SwapchainSubImage)
import {-# SOURCE #-} OpenXR.Extensions.XR_FB_color_space (SystemColorSpacePropertiesFB)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_eye_gaze_interaction (SystemEyeGazeInteractionPropertiesEXT)
import {-# SOURCE #-} OpenXR.Core10.Device (SystemGetInfo)
import {-# SOURCE #-} OpenXR.Core10.Device (SystemGraphicsProperties)
import {-# SOURCE #-} OpenXR.Extensions.XR_MSFT_hand_tracking_mesh (SystemHandTrackingMeshPropertiesMSFT)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_hand_tracking (SystemHandTrackingPropertiesEXT)
import {-# SOURCE #-} OpenXR.Core10.Device (SystemProperties)
import {-# SOURCE #-} OpenXR.Core10.Device (SystemTrackingProperties)
import {-# SOURCE #-} OpenXR.Core10.Input (Vector2f)
import {-# SOURCE #-} OpenXR.Core10.Space (Vector3f)
import {-# SOURCE #-} OpenXR.Core10.OtherTypes (Vector4f)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (View)
import {-# SOURCE #-} OpenXR.Extensions.XR_EXT_view_configuration_depth_range (ViewConfigurationDepthRangeEXT)
import {-# SOURCE #-} OpenXR.Core10.ViewConfigurations (ViewConfigurationProperties)
import {-# SOURCE #-} OpenXR.Core10.ViewConfigurations (ViewConfigurationView)
import {-# SOURCE #-} OpenXR.Extensions.XR_EPIC_view_configuration_fov (ViewConfigurationViewFovEPIC)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (ViewLocateInfo)
import {-# SOURCE #-} OpenXR.Core10.DisplayTiming (ViewState)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_visibility_mask (VisibilityMaskKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable2 (VulkanDeviceCreateInfoKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable2 (VulkanGraphicsDeviceGetInfoKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_enable2 (VulkanInstanceCreateInfoKHR)
import {-# SOURCE #-} OpenXR.Extensions.XR_KHR_vulkan_swapchain_format_list (VulkanSwapchainFormatListCreateInfoKHR)
-- | XrBaseInStructure - Convenience type for iterating (read only)
--
-- == Member Descriptions
--
-- = Description
--
-- 'BaseInStructure' can be used to facilitate iterating through a
-- read-only structure pointer chain.
--
-- = See Also
--
-- 'BaseInStructure', 'BaseOutStructure',
' OpenXR.Core10.Enums . StructureType . StructureType '
data BaseInStructure = BaseInStructure
| @type@ is the ' OpenXR.Core10.Enums . StructureType . StructureType ' of this
-- structure. This base structure itself has no associated
' OpenXR.Core10.Enums . StructureType . StructureType ' value .
type' :: StructureType
, -- | @next@ is @NULL@ or a pointer to the next structure in a structure
chain . No such structures are defined in core OpenXR .
next :: Ptr BaseInStructure
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BaseInStructure)
#endif
deriving instance Show BaseInStructure
instance ToCStruct BaseInStructure where
withCStruct x f = allocaBytes 16 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BaseInStructure{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (type')
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure))) (next)
f
cStructSize = 16
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (zero)
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure))) (zero)
f
instance FromCStruct BaseInStructure where
peekCStruct p = do
type' <- peek @StructureType ((p `plusPtr` 0 :: Ptr StructureType))
next <- peek @(Ptr BaseInStructure) ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure)))
pure $ BaseInStructure
type' next
instance Storable BaseInStructure where
sizeOf ~_ = 16
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero BaseInStructure where
zero = BaseInStructure
zero
zero
-- | XrBaseOutStructure - Convenience type for iterating (mutable)
--
-- == Member Descriptions
--
-- = Description
--
-- 'BaseOutStructure' can be used to facilitate iterating through a
-- structure pointer chain that returns data back to the application.
--
-- = See Also
--
-- 'BaseInStructure', 'BaseOutStructure',
' OpenXR.Core10.Enums . StructureType . StructureType '
data BaseOutStructure = BaseOutStructure
| @type@ is the ' OpenXR.Core10.Enums . StructureType . StructureType ' of this
-- structure. This base structure itself has no associated
' OpenXR.Core10.Enums . StructureType . StructureType ' value .
type' :: StructureType
, -- | @next@ is @NULL@ or a pointer to the next structure in a structure
chain . No such structures are defined in core OpenXR .
next :: Ptr BaseOutStructure
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BaseOutStructure)
#endif
deriving instance Show BaseOutStructure
instance ToCStruct BaseOutStructure where
withCStruct x f = allocaBytes 16 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BaseOutStructure{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (type')
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure))) (next)
f
cStructSize = 16
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (zero)
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure))) (zero)
f
instance FromCStruct BaseOutStructure where
peekCStruct p = do
type' <- peek @StructureType ((p `plusPtr` 0 :: Ptr StructureType))
next <- peek @(Ptr BaseOutStructure) ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure)))
pure $ BaseOutStructure
type' next
instance Storable BaseOutStructure where
sizeOf ~_ = 16
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero BaseOutStructure where
zero = BaseOutStructure
zero
zero
type family Extends (a :: [Type] -> Type) (b :: Type) :: Constraint where
Extends CompositionLayerBaseHeader CompositionLayerColorScaleBiasKHR = ()
Extends CompositionLayerProjectionView CompositionLayerDepthInfoKHR = ()
Extends FrameEndInfo SecondaryViewConfigurationFrameEndInfoMSFT = ()
Extends FrameState SecondaryViewConfigurationFrameStateMSFT = ()
Extends HandJointLocationsEXT HandJointVelocitiesEXT = ()
Extends HandTrackerCreateInfoEXT HandPoseTypeInfoMSFT = ()
Extends InstanceCreateInfo InstanceCreateInfoAndroidKHR = ()
Extends InstanceCreateInfo DebugUtilsMessengerCreateInfoEXT = ()
Extends InteractionProfileSuggestedBinding InteractionProfileAnalogThresholdVALVE = ()
Extends InteractionProfileSuggestedBinding BindingModificationsKHR = ()
Extends SessionBeginInfo SecondaryViewConfigurationSessionBeginInfoMSFT = ()
Extends SessionCreateInfo GraphicsBindingOpenGLWin32KHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLXlibKHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLXcbKHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLWaylandKHR = ()
Extends SessionCreateInfo GraphicsBindingD3D11KHR = ()
Extends SessionCreateInfo GraphicsBindingD3D12KHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLESAndroidKHR = ()
Extends SessionCreateInfo GraphicsBindingVulkanKHR = ()
Extends SessionCreateInfo SessionCreateInfoOverlayEXTX = ()
Extends SessionCreateInfo GraphicsBindingEGLMNDX = ()
Extends SessionCreateInfo HolographicWindowAttachmentMSFT = ()
Extends SpaceLocation SpaceVelocity = ()
Extends SpaceLocation EyeGazeSampleTimeEXT = ()
Extends SwapchainCreateInfo SecondaryViewConfigurationSwapchainCreateInfoMSFT = ()
Extends SystemProperties SystemEyeGazeInteractionPropertiesEXT = ()
Extends SystemProperties SystemHandTrackingPropertiesEXT = ()
Extends SystemProperties SystemHandTrackingMeshPropertiesMSFT = ()
Extends ViewConfigurationView ViewConfigurationDepthRangeEXT = ()
Extends ViewConfigurationView ViewConfigurationViewFovEPIC = ()
Extends a b = TypeError (ShowType a :<>: Text " is not extended by " :<>: ShowType b)
data SomeStruct (a :: [Type] -> Type) where
SomeStruct
:: forall a es
. (Extendss a es, PokeChain es, Show (Chain es))
=> a es
-> SomeStruct a
deriving instance (forall es. Show (Chain es) => Show (a es)) => Show (SomeStruct a)
-- | The constraint is so on this instance to encourage type inference
instance Zero (a '[]) => Zero (SomeStruct a) where
zero = SomeStruct (zero :: a '[])
-- | Forget which extensions a pointed-to struct has by casting the pointer
forgetExtensions :: Ptr (a es) -> Ptr (SomeStruct a)
forgetExtensions = castPtr
-- | Add an extension to the beginning of the struct chain
--
-- This can be used to optionally extend structs based on some condition (for
-- example, an extension or layer being available)
extendSomeStruct
:: (Extensible a, Extends a e, ToCStruct e, Show e)
=> e
-> SomeStruct a
-> SomeStruct a
extendSomeStruct e (SomeStruct a) = SomeStruct (setNext a (e, getNext a))
| Consume a ' SomeStruct ' value
withSomeStruct
:: forall a b
. SomeStruct a
-> (forall es . (Extendss a es, PokeChain es, Show (Chain es)) => a es -> b)
-> b
withSomeStruct (SomeStruct s) f = f s
-- | Write the C representation of some extended @a@ and use the pointer,
-- the pointer must not be returned from the continuation.
withSomeCStruct
:: forall a b
. (forall es . (Extendss a es, PokeChain es) => ToCStruct (a es))
=> SomeStruct a
-> (forall es . (Extendss a es, PokeChain es) => Ptr (a es) -> IO b)
-> IO b
withSomeCStruct s f = withSomeStruct s (`withCStruct` f)
-- | Given some memory for the head of the chain, allocate and poke the
-- tail and run an action.
pokeSomeCStruct
:: (forall es . (Extendss a es, PokeChain es) => ToCStruct (a es))
=> Ptr (SomeStruct a)
-- ^ Pointer to some memory at least the size of the head of the struct
-- chain.
-> SomeStruct a
-- ^ The struct to poke
-> IO b
-- ^ Computation to run while the poked tail is valid
-> IO b
pokeSomeCStruct p (SomeStruct s) = pokeCStruct (castPtr p) s
-- | Given a pointer to a struct with an unknown chain, peek the struct and
-- its chain.
peekSomeCStruct
:: forall a
. (Extensible a, forall es . (Extendss a es, PeekChain es) => FromCStruct (a es))
=> Ptr (SomeStruct a)
-> IO (SomeStruct a)
peekSomeCStruct p = do
head' <- peekCStruct (castPtr @_ @(a '[]) p)
pNext <- peek @(Ptr BaseOutStructure) (p `plusPtr` 8)
peekSomeChain @a pNext $ \tail' -> SomeStruct (setNext head' tail')
peekSomeChain
:: forall a b
. (Extensible a)
=> Ptr BaseOutStructure
-> ( forall es
. (Extendss a es, PokeChain es, Show (Chain es))
=> Chain es
-> b
)
-> IO b
peekSomeChain p c = if p == nullPtr
then pure (c ())
else do
baseOut <- peek p
join
$ peekChainHead @a (case baseOut of BaseOutStructure{type'} -> type')
(castPtr @BaseOutStructure @() p)
$ \head' -> peekSomeChain @a (case baseOut of BaseOutStructure{next} -> next)
(\tail' -> c (head', tail'))
peekChainHead
:: forall a b
. Extensible a
=> StructureType
-> Ptr ()
-> (forall e . (Extends a e, ToCStruct e, Show e) => e -> b)
-> IO b
peekChainHead ty p c = case ty of
TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR -> go @GraphicsBindingOpenGLWin32KHR
TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR -> go @GraphicsBindingOpenGLXlibKHR
TYPE_GRAPHICS_BINDING_OPENGL_XCB_KHR -> go @GraphicsBindingOpenGLXcbKHR
TYPE_GRAPHICS_BINDING_OPENGL_WAYLAND_KHR -> go @GraphicsBindingOpenGLWaylandKHR
TYPE_GRAPHICS_BINDING_D3D11_KHR -> go @GraphicsBindingD3D11KHR
TYPE_GRAPHICS_BINDING_D3D12_KHR -> go @GraphicsBindingD3D12KHR
TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR -> go @GraphicsBindingOpenGLESAndroidKHR
TYPE_GRAPHICS_BINDING_VULKAN_KHR -> go @GraphicsBindingVulkanKHR
TYPE_SPACE_VELOCITY -> go @SpaceVelocity
TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR -> go @CompositionLayerDepthInfoKHR
TYPE_INSTANCE_CREATE_INFO_ANDROID_KHR -> go @InstanceCreateInfoAndroidKHR
TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT -> go @DebugUtilsMessengerCreateInfoEXT
TYPE_SESSION_CREATE_INFO_OVERLAY_EXTX -> go @SessionCreateInfoOverlayEXTX
TYPE_VIEW_CONFIGURATION_DEPTH_RANGE_EXT -> go @ViewConfigurationDepthRangeEXT
TYPE_VIEW_CONFIGURATION_VIEW_FOV_EPIC -> go @ViewConfigurationViewFovEPIC
TYPE_INTERACTION_PROFILE_ANALOG_THRESHOLD_VALVE -> go @InteractionProfileAnalogThresholdVALVE
TYPE_BINDING_MODIFICATIONS_KHR -> go @BindingModificationsKHR
TYPE_SYSTEM_EYE_GAZE_INTERACTION_PROPERTIES_EXT -> go @SystemEyeGazeInteractionPropertiesEXT
TYPE_EYE_GAZE_SAMPLE_TIME_EXT -> go @EyeGazeSampleTimeEXT
TYPE_GRAPHICS_BINDING_EGL_MNDX -> go @GraphicsBindingEGLMNDX
TYPE_SYSTEM_HAND_TRACKING_PROPERTIES_EXT -> go @SystemHandTrackingPropertiesEXT
TYPE_HAND_JOINT_VELOCITIES_EXT -> go @HandJointVelocitiesEXT
TYPE_SYSTEM_HAND_TRACKING_MESH_PROPERTIES_MSFT -> go @SystemHandTrackingMeshPropertiesMSFT
TYPE_HAND_POSE_TYPE_INFO_MSFT -> go @HandPoseTypeInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_SESSION_BEGIN_INFO_MSFT -> go @SecondaryViewConfigurationSessionBeginInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_STATE_MSFT -> go @SecondaryViewConfigurationFrameStateMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_END_INFO_MSFT -> go @SecondaryViewConfigurationFrameEndInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_SWAPCHAIN_CREATE_INFO_MSFT -> go @SecondaryViewConfigurationSwapchainCreateInfoMSFT
TYPE_HOLOGRAPHIC_WINDOW_ATTACHMENT_MSFT -> go @HolographicWindowAttachmentMSFT
TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR -> go @CompositionLayerColorScaleBiasKHR
t -> throwIO $ IOError Nothing InvalidArgument "peekChainHead" ("Unrecognized struct type: " <> show t) Nothing Nothing
where
go :: forall e . (Typeable e, FromCStruct e, ToCStruct e, Show e) => IO b
go =
let r = extends @a @e Proxy $ do
head' <- peekCStruct @e (castPtr p)
pure $ c head'
in fromMaybe
(throwIO $ IOError
Nothing
InvalidArgument
"peekChainHead"
( "Illegal struct extension of "
<> extensibleTypeName @a
<> " with "
<> show ty
)
Nothing
Nothing
)
r
class Extensible (a :: [Type] -> Type) where
extensibleTypeName :: String
-- ^ For error reporting an invalid extension
getNext :: a es -> Chain es
setNext :: a ds -> Chain es -> a es
extends :: forall e b proxy. Typeable e => proxy e -> (Extends a e => b) -> Maybe b
type family Chain (xs :: [a]) = (r :: a) | r -> xs where
Chain '[] = ()
Chain (x:xs) = (x, Chain xs)
-- | A pattern synonym to separate the head of a struct chain from the
-- tail, use in conjunction with ':&' to extract several members.
--
-- @
-- Head{..} ::& () <- returningNoTail a b c
-- -- Equivalent to
-- Head{..} <- returningNoTail @'[] a b c
-- @
--
-- @
-- Head{..} ::& Foo{..} :& Bar{..} :& () <- returningWithTail a b c
-- @
--
-- @
-- myFun (Head{..} :&& Foo{..} :& ())
-- @
pattern (::&) :: Extensible a => a es' -> Chain es -> a es
pattern a ::& es <- (\a -> (a, getNext a) -> (a, es))
where a ::& es = setNext a es
infix 6 ::&
# complete (: : & ) : : #
{-# complete (::&) :: GraphicsBindingOpenGLXlibKHR #-}
{-# complete (::&) :: GraphicsBindingOpenGLXcbKHR #-}
{-# complete (::&) :: GraphicsBindingOpenGLWaylandKHR #-}
{-# complete (::&) :: GraphicsBindingD3D11KHR #-}
{-# complete (::&) :: GraphicsBindingD3D12KHR #-}
{-# complete (::&) :: GraphicsBindingOpenGLESAndroidKHR #-}
{-# complete (::&) :: GraphicsBindingVulkanKHR #-}
{-# complete (::&) :: SpaceVelocity #-}
{-# complete (::&) :: CompositionLayerDepthInfoKHR #-}
{-# complete (::&) :: InstanceCreateInfoAndroidKHR #-}
{-# complete (::&) :: DebugUtilsMessengerCreateInfoEXT #-}
{-# complete (::&) :: SessionCreateInfoOverlayEXTX #-}
{-# complete (::&) :: ViewConfigurationDepthRangeEXT #-}
{-# complete (::&) :: ViewConfigurationViewFovEPIC #-}
{-# complete (::&) :: InteractionProfileAnalogThresholdVALVE #-}
{-# complete (::&) :: BindingModificationsKHR #-}
{-# complete (::&) :: SystemEyeGazeInteractionPropertiesEXT #-}
{-# complete (::&) :: EyeGazeSampleTimeEXT #-}
{-# complete (::&) :: GraphicsBindingEGLMNDX #-}
{-# complete (::&) :: SystemHandTrackingPropertiesEXT #-}
{-# complete (::&) :: HandJointVelocitiesEXT #-}
{-# complete (::&) :: SystemHandTrackingMeshPropertiesMSFT #-}
{-# complete (::&) :: HandPoseTypeInfoMSFT #-}
{-# complete (::&) :: SecondaryViewConfigurationSessionBeginInfoMSFT #-}
{-# complete (::&) :: SecondaryViewConfigurationFrameStateMSFT #-}
{-# complete (::&) :: SecondaryViewConfigurationFrameEndInfoMSFT #-}
{-# complete (::&) :: SecondaryViewConfigurationSwapchainCreateInfoMSFT #-}
{-# complete (::&) :: HolographicWindowAttachmentMSFT #-}
{-# complete (::&) :: CompositionLayerColorScaleBiasKHR #-}
-- | View the head and tail of a 'Chain', see '::&'
--
Equivalent to
pattern (:&) :: e -> Chain es -> Chain (e:es)
pattern e :& es = (e, es)
infixr 7 :&
{-# complete (:&) #-}
type family Extendss (p :: [Type] -> Type) (xs :: [Type]) :: Constraint where
Extendss p '[] = ()
Extendss p (x : xs) = (Extends p x, Extendss p xs)
class PokeChain es where
withChain :: Chain es -> (Ptr (Chain es) -> IO a) -> IO a
withZeroChain :: (Ptr (Chain es) -> IO a) -> IO a
instance PokeChain '[] where
withChain () f = f nullPtr
withZeroChain f = f nullPtr
instance (ToCStruct e, PokeChain es) => PokeChain (e:es) where
withChain (e, es) f = evalContT $ do
t <- ContT $ withChain es
h <- ContT $ withCStruct e
lift $ linkChain h t
lift $ f (castPtr h)
withZeroChain f = evalContT $ do
t <- ContT $ withZeroChain @es
h <- ContT $ withZeroCStruct @e
lift $ linkChain h t
lift $ f (castPtr h)
class PeekChain es where
peekChain :: Ptr (Chain es) -> IO (Chain es)
instance PeekChain '[] where
peekChain _ = pure ()
instance (FromCStruct e, PeekChain es) => PeekChain (e:es) where
peekChain p = do
h <- peekCStruct @e (castPtr p)
tPtr <- peek (p `plusPtr` 8)
t <- peekChain tPtr
pure (h, t)
linkChain :: Ptr a -> Ptr b -> IO ()
linkChain head' tail' = poke (head' `plusPtr` 8) tail'
data SomeChild (a :: Type) where
SomeChild :: forall a b . (Inherits a b, Typeable b, ToCStruct b, Show b) => b -> SomeChild a
deriving instance Show (SomeChild a)
type family Inherits (a :: Type) (b :: Type) :: Constraint where
Inherits SwapchainImageBaseHeader SwapchainImageD3D12KHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageD3D11KHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageVulkanKHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageOpenGLESKHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageOpenGLKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerEquirect2KHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerEquirectKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerCubeKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerCylinderKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerQuad = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerProjection = ()
Inherits HapticBaseHeader HapticVibration = ()
Inherits EventDataBaseHeader EventDataDisplayRefreshRateChangedFB = ()
Inherits EventDataBaseHeader EventDataMainSessionVisibilityChangedEXTX = ()
Inherits EventDataBaseHeader EventDataInteractionProfileChanged = ()
Inherits EventDataBaseHeader EventDataVisibilityMaskChangedKHR = ()
Inherits EventDataBaseHeader EventDataPerfSettingsEXT = ()
Inherits EventDataBaseHeader EventDataReferenceSpaceChangePending = ()
Inherits EventDataBaseHeader EventDataSessionStateChanged = ()
Inherits EventDataBaseHeader EventDataInstanceLossPending = ()
Inherits EventDataBaseHeader EventDataEventsLost = ()
Inherits LoaderInitInfoBaseHeaderKHR LoaderInitInfoAndroidKHR = ()
Inherits parent child =
TypeError (ShowType parent :<>: Text " is not inherited by " :<>: ShowType child)
class Inheritable (a :: Type) where
peekSomeCChild :: Ptr (SomeChild a) -> IO (SomeChild a)
withSomeChild :: SomeChild a -> (Ptr (SomeChild a) -> IO b) -> IO b
withSomeChild (SomeChild c) f = withCStruct c (f . lowerChildPointer)
lowerChildPointer :: Inherits a b => Ptr b -> Ptr (SomeChild a)
lowerChildPointer = castPtr
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/b1e33d1031779b4740c279c68879d05aee371659/openxr/src/OpenXR/CStruct/Extends.hs | haskell | # language CPP #
No documentation found for Chapter "Extends"
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
| XrBaseInStructure - Convenience type for iterating (read only)
== Member Descriptions
= Description
'BaseInStructure' can be used to facilitate iterating through a
read-only structure pointer chain.
= See Also
'BaseInStructure', 'BaseOutStructure',
structure. This base structure itself has no associated
| @next@ is @NULL@ or a pointer to the next structure in a structure
| XrBaseOutStructure - Convenience type for iterating (mutable)
== Member Descriptions
= Description
'BaseOutStructure' can be used to facilitate iterating through a
structure pointer chain that returns data back to the application.
= See Also
'BaseInStructure', 'BaseOutStructure',
structure. This base structure itself has no associated
| @next@ is @NULL@ or a pointer to the next structure in a structure
| The constraint is so on this instance to encourage type inference
| Forget which extensions a pointed-to struct has by casting the pointer
| Add an extension to the beginning of the struct chain
This can be used to optionally extend structs based on some condition (for
example, an extension or layer being available)
| Write the C representation of some extended @a@ and use the pointer,
the pointer must not be returned from the continuation.
| Given some memory for the head of the chain, allocate and poke the
tail and run an action.
^ Pointer to some memory at least the size of the head of the struct
chain.
^ The struct to poke
^ Computation to run while the poked tail is valid
| Given a pointer to a struct with an unknown chain, peek the struct and
its chain.
^ For error reporting an invalid extension
| A pattern synonym to separate the head of a struct chain from the
tail, use in conjunction with ':&' to extract several members.
@
Head{..} ::& () <- returningNoTail a b c
-- Equivalent to
Head{..} <- returningNoTail @'[] a b c
@
@
Head{..} ::& Foo{..} :& Bar{..} :& () <- returningWithTail a b c
@
@
myFun (Head{..} :&& Foo{..} :& ())
@
# complete (::&) :: GraphicsBindingOpenGLXlibKHR #
# complete (::&) :: GraphicsBindingOpenGLXcbKHR #
# complete (::&) :: GraphicsBindingOpenGLWaylandKHR #
# complete (::&) :: GraphicsBindingD3D11KHR #
# complete (::&) :: GraphicsBindingD3D12KHR #
# complete (::&) :: GraphicsBindingOpenGLESAndroidKHR #
# complete (::&) :: GraphicsBindingVulkanKHR #
# complete (::&) :: SpaceVelocity #
# complete (::&) :: CompositionLayerDepthInfoKHR #
# complete (::&) :: InstanceCreateInfoAndroidKHR #
# complete (::&) :: DebugUtilsMessengerCreateInfoEXT #
# complete (::&) :: SessionCreateInfoOverlayEXTX #
# complete (::&) :: ViewConfigurationDepthRangeEXT #
# complete (::&) :: ViewConfigurationViewFovEPIC #
# complete (::&) :: InteractionProfileAnalogThresholdVALVE #
# complete (::&) :: BindingModificationsKHR #
# complete (::&) :: SystemEyeGazeInteractionPropertiesEXT #
# complete (::&) :: EyeGazeSampleTimeEXT #
# complete (::&) :: GraphicsBindingEGLMNDX #
# complete (::&) :: SystemHandTrackingPropertiesEXT #
# complete (::&) :: HandJointVelocitiesEXT #
# complete (::&) :: SystemHandTrackingMeshPropertiesMSFT #
# complete (::&) :: HandPoseTypeInfoMSFT #
# complete (::&) :: SecondaryViewConfigurationSessionBeginInfoMSFT #
# complete (::&) :: SecondaryViewConfigurationFrameStateMSFT #
# complete (::&) :: SecondaryViewConfigurationFrameEndInfoMSFT #
# complete (::&) :: SecondaryViewConfigurationSwapchainCreateInfoMSFT #
# complete (::&) :: HolographicWindowAttachmentMSFT #
# complete (::&) :: CompositionLayerColorScaleBiasKHR #
| View the head and tail of a 'Chain', see '::&'
# complete (:&) # | module OpenXR.CStruct.Extends ( BaseInStructure(..)
, BaseOutStructure(..)
, Extends
, PeekChain(..)
, PokeChain(..)
, Chain
, Extendss
, SomeStruct(..)
, extendSomeStruct
, withSomeStruct
, withSomeCStruct
, peekSomeCStruct
, pokeSomeCStruct
, forgetExtensions
, Extensible(..)
, pattern (::&)
, pattern (:&)
, SomeChild(..)
, withSomeChild
, lowerChildPointer
, Inherits
, Inheritable(..)
) where
import Data.Maybe (fromMaybe)
import Type.Reflection (typeRep)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.Base (join)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import OpenXR.CStruct (FromCStruct)
import OpenXR.CStruct (FromCStruct(..))
import OpenXR.CStruct (ToCStruct)
import OpenXR.CStruct (ToCStruct(..))
import OpenXR.Zero (Zero(..))
import Data.Proxy (Proxy(Proxy))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (Ptr)
import GHC.TypeLits (ErrorMessage(..))
import GHC.TypeLits (TypeError)
import Data.Kind (Constraint)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import OpenXR.Core10.Enums.StructureType (StructureType)
import OpenXR.Core10.Enums.StructureType (StructureType(..))
' OpenXR.Core10.Enums . StructureType . StructureType '
data BaseInStructure = BaseInStructure
| @type@ is the ' OpenXR.Core10.Enums . StructureType . StructureType ' of this
' OpenXR.Core10.Enums . StructureType . StructureType ' value .
type' :: StructureType
chain . No such structures are defined in core OpenXR .
next :: Ptr BaseInStructure
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BaseInStructure)
#endif
deriving instance Show BaseInStructure
instance ToCStruct BaseInStructure where
withCStruct x f = allocaBytes 16 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BaseInStructure{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (type')
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure))) (next)
f
cStructSize = 16
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (zero)
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure))) (zero)
f
instance FromCStruct BaseInStructure where
peekCStruct p = do
type' <- peek @StructureType ((p `plusPtr` 0 :: Ptr StructureType))
next <- peek @(Ptr BaseInStructure) ((p `plusPtr` 8 :: Ptr (Ptr BaseInStructure)))
pure $ BaseInStructure
type' next
instance Storable BaseInStructure where
sizeOf ~_ = 16
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero BaseInStructure where
zero = BaseInStructure
zero
zero
' OpenXR.Core10.Enums . StructureType . StructureType '
data BaseOutStructure = BaseOutStructure
| @type@ is the ' OpenXR.Core10.Enums . StructureType . StructureType ' of this
' OpenXR.Core10.Enums . StructureType . StructureType ' value .
type' :: StructureType
chain . No such structures are defined in core OpenXR .
next :: Ptr BaseOutStructure
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BaseOutStructure)
#endif
deriving instance Show BaseOutStructure
instance ToCStruct BaseOutStructure where
withCStruct x f = allocaBytes 16 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BaseOutStructure{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (type')
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure))) (next)
f
cStructSize = 16
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (zero)
poke ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure))) (zero)
f
instance FromCStruct BaseOutStructure where
peekCStruct p = do
type' <- peek @StructureType ((p `plusPtr` 0 :: Ptr StructureType))
next <- peek @(Ptr BaseOutStructure) ((p `plusPtr` 8 :: Ptr (Ptr BaseOutStructure)))
pure $ BaseOutStructure
type' next
instance Storable BaseOutStructure where
sizeOf ~_ = 16
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero BaseOutStructure where
zero = BaseOutStructure
zero
zero
type family Extends (a :: [Type] -> Type) (b :: Type) :: Constraint where
Extends CompositionLayerBaseHeader CompositionLayerColorScaleBiasKHR = ()
Extends CompositionLayerProjectionView CompositionLayerDepthInfoKHR = ()
Extends FrameEndInfo SecondaryViewConfigurationFrameEndInfoMSFT = ()
Extends FrameState SecondaryViewConfigurationFrameStateMSFT = ()
Extends HandJointLocationsEXT HandJointVelocitiesEXT = ()
Extends HandTrackerCreateInfoEXT HandPoseTypeInfoMSFT = ()
Extends InstanceCreateInfo InstanceCreateInfoAndroidKHR = ()
Extends InstanceCreateInfo DebugUtilsMessengerCreateInfoEXT = ()
Extends InteractionProfileSuggestedBinding InteractionProfileAnalogThresholdVALVE = ()
Extends InteractionProfileSuggestedBinding BindingModificationsKHR = ()
Extends SessionBeginInfo SecondaryViewConfigurationSessionBeginInfoMSFT = ()
Extends SessionCreateInfo GraphicsBindingOpenGLWin32KHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLXlibKHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLXcbKHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLWaylandKHR = ()
Extends SessionCreateInfo GraphicsBindingD3D11KHR = ()
Extends SessionCreateInfo GraphicsBindingD3D12KHR = ()
Extends SessionCreateInfo GraphicsBindingOpenGLESAndroidKHR = ()
Extends SessionCreateInfo GraphicsBindingVulkanKHR = ()
Extends SessionCreateInfo SessionCreateInfoOverlayEXTX = ()
Extends SessionCreateInfo GraphicsBindingEGLMNDX = ()
Extends SessionCreateInfo HolographicWindowAttachmentMSFT = ()
Extends SpaceLocation SpaceVelocity = ()
Extends SpaceLocation EyeGazeSampleTimeEXT = ()
Extends SwapchainCreateInfo SecondaryViewConfigurationSwapchainCreateInfoMSFT = ()
Extends SystemProperties SystemEyeGazeInteractionPropertiesEXT = ()
Extends SystemProperties SystemHandTrackingPropertiesEXT = ()
Extends SystemProperties SystemHandTrackingMeshPropertiesMSFT = ()
Extends ViewConfigurationView ViewConfigurationDepthRangeEXT = ()
Extends ViewConfigurationView ViewConfigurationViewFovEPIC = ()
Extends a b = TypeError (ShowType a :<>: Text " is not extended by " :<>: ShowType b)
data SomeStruct (a :: [Type] -> Type) where
SomeStruct
:: forall a es
. (Extendss a es, PokeChain es, Show (Chain es))
=> a es
-> SomeStruct a
deriving instance (forall es. Show (Chain es) => Show (a es)) => Show (SomeStruct a)
instance Zero (a '[]) => Zero (SomeStruct a) where
zero = SomeStruct (zero :: a '[])
forgetExtensions :: Ptr (a es) -> Ptr (SomeStruct a)
forgetExtensions = castPtr
extendSomeStruct
:: (Extensible a, Extends a e, ToCStruct e, Show e)
=> e
-> SomeStruct a
-> SomeStruct a
extendSomeStruct e (SomeStruct a) = SomeStruct (setNext a (e, getNext a))
| Consume a ' SomeStruct ' value
withSomeStruct
:: forall a b
. SomeStruct a
-> (forall es . (Extendss a es, PokeChain es, Show (Chain es)) => a es -> b)
-> b
withSomeStruct (SomeStruct s) f = f s
withSomeCStruct
:: forall a b
. (forall es . (Extendss a es, PokeChain es) => ToCStruct (a es))
=> SomeStruct a
-> (forall es . (Extendss a es, PokeChain es) => Ptr (a es) -> IO b)
-> IO b
withSomeCStruct s f = withSomeStruct s (`withCStruct` f)
pokeSomeCStruct
:: (forall es . (Extendss a es, PokeChain es) => ToCStruct (a es))
=> Ptr (SomeStruct a)
-> SomeStruct a
-> IO b
-> IO b
pokeSomeCStruct p (SomeStruct s) = pokeCStruct (castPtr p) s
peekSomeCStruct
:: forall a
. (Extensible a, forall es . (Extendss a es, PeekChain es) => FromCStruct (a es))
=> Ptr (SomeStruct a)
-> IO (SomeStruct a)
peekSomeCStruct p = do
head' <- peekCStruct (castPtr @_ @(a '[]) p)
pNext <- peek @(Ptr BaseOutStructure) (p `plusPtr` 8)
peekSomeChain @a pNext $ \tail' -> SomeStruct (setNext head' tail')
peekSomeChain
:: forall a b
. (Extensible a)
=> Ptr BaseOutStructure
-> ( forall es
. (Extendss a es, PokeChain es, Show (Chain es))
=> Chain es
-> b
)
-> IO b
peekSomeChain p c = if p == nullPtr
then pure (c ())
else do
baseOut <- peek p
join
$ peekChainHead @a (case baseOut of BaseOutStructure{type'} -> type')
(castPtr @BaseOutStructure @() p)
$ \head' -> peekSomeChain @a (case baseOut of BaseOutStructure{next} -> next)
(\tail' -> c (head', tail'))
peekChainHead
:: forall a b
. Extensible a
=> StructureType
-> Ptr ()
-> (forall e . (Extends a e, ToCStruct e, Show e) => e -> b)
-> IO b
peekChainHead ty p c = case ty of
TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR -> go @GraphicsBindingOpenGLWin32KHR
TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR -> go @GraphicsBindingOpenGLXlibKHR
TYPE_GRAPHICS_BINDING_OPENGL_XCB_KHR -> go @GraphicsBindingOpenGLXcbKHR
TYPE_GRAPHICS_BINDING_OPENGL_WAYLAND_KHR -> go @GraphicsBindingOpenGLWaylandKHR
TYPE_GRAPHICS_BINDING_D3D11_KHR -> go @GraphicsBindingD3D11KHR
TYPE_GRAPHICS_BINDING_D3D12_KHR -> go @GraphicsBindingD3D12KHR
TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR -> go @GraphicsBindingOpenGLESAndroidKHR
TYPE_GRAPHICS_BINDING_VULKAN_KHR -> go @GraphicsBindingVulkanKHR
TYPE_SPACE_VELOCITY -> go @SpaceVelocity
TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR -> go @CompositionLayerDepthInfoKHR
TYPE_INSTANCE_CREATE_INFO_ANDROID_KHR -> go @InstanceCreateInfoAndroidKHR
TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT -> go @DebugUtilsMessengerCreateInfoEXT
TYPE_SESSION_CREATE_INFO_OVERLAY_EXTX -> go @SessionCreateInfoOverlayEXTX
TYPE_VIEW_CONFIGURATION_DEPTH_RANGE_EXT -> go @ViewConfigurationDepthRangeEXT
TYPE_VIEW_CONFIGURATION_VIEW_FOV_EPIC -> go @ViewConfigurationViewFovEPIC
TYPE_INTERACTION_PROFILE_ANALOG_THRESHOLD_VALVE -> go @InteractionProfileAnalogThresholdVALVE
TYPE_BINDING_MODIFICATIONS_KHR -> go @BindingModificationsKHR
TYPE_SYSTEM_EYE_GAZE_INTERACTION_PROPERTIES_EXT -> go @SystemEyeGazeInteractionPropertiesEXT
TYPE_EYE_GAZE_SAMPLE_TIME_EXT -> go @EyeGazeSampleTimeEXT
TYPE_GRAPHICS_BINDING_EGL_MNDX -> go @GraphicsBindingEGLMNDX
TYPE_SYSTEM_HAND_TRACKING_PROPERTIES_EXT -> go @SystemHandTrackingPropertiesEXT
TYPE_HAND_JOINT_VELOCITIES_EXT -> go @HandJointVelocitiesEXT
TYPE_SYSTEM_HAND_TRACKING_MESH_PROPERTIES_MSFT -> go @SystemHandTrackingMeshPropertiesMSFT
TYPE_HAND_POSE_TYPE_INFO_MSFT -> go @HandPoseTypeInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_SESSION_BEGIN_INFO_MSFT -> go @SecondaryViewConfigurationSessionBeginInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_STATE_MSFT -> go @SecondaryViewConfigurationFrameStateMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_FRAME_END_INFO_MSFT -> go @SecondaryViewConfigurationFrameEndInfoMSFT
TYPE_SECONDARY_VIEW_CONFIGURATION_SWAPCHAIN_CREATE_INFO_MSFT -> go @SecondaryViewConfigurationSwapchainCreateInfoMSFT
TYPE_HOLOGRAPHIC_WINDOW_ATTACHMENT_MSFT -> go @HolographicWindowAttachmentMSFT
TYPE_COMPOSITION_LAYER_COLOR_SCALE_BIAS_KHR -> go @CompositionLayerColorScaleBiasKHR
t -> throwIO $ IOError Nothing InvalidArgument "peekChainHead" ("Unrecognized struct type: " <> show t) Nothing Nothing
where
go :: forall e . (Typeable e, FromCStruct e, ToCStruct e, Show e) => IO b
go =
let r = extends @a @e Proxy $ do
head' <- peekCStruct @e (castPtr p)
pure $ c head'
in fromMaybe
(throwIO $ IOError
Nothing
InvalidArgument
"peekChainHead"
( "Illegal struct extension of "
<> extensibleTypeName @a
<> " with "
<> show ty
)
Nothing
Nothing
)
r
class Extensible (a :: [Type] -> Type) where
extensibleTypeName :: String
getNext :: a es -> Chain es
setNext :: a ds -> Chain es -> a es
extends :: forall e b proxy. Typeable e => proxy e -> (Extends a e => b) -> Maybe b
type family Chain (xs :: [a]) = (r :: a) | r -> xs where
Chain '[] = ()
Chain (x:xs) = (x, Chain xs)
pattern (::&) :: Extensible a => a es' -> Chain es -> a es
pattern a ::& es <- (\a -> (a, getNext a) -> (a, es))
where a ::& es = setNext a es
infix 6 ::&
# complete (: : & ) : : #
Equivalent to
pattern (:&) :: e -> Chain es -> Chain (e:es)
pattern e :& es = (e, es)
infixr 7 :&
type family Extendss (p :: [Type] -> Type) (xs :: [Type]) :: Constraint where
Extendss p '[] = ()
Extendss p (x : xs) = (Extends p x, Extendss p xs)
class PokeChain es where
withChain :: Chain es -> (Ptr (Chain es) -> IO a) -> IO a
withZeroChain :: (Ptr (Chain es) -> IO a) -> IO a
instance PokeChain '[] where
withChain () f = f nullPtr
withZeroChain f = f nullPtr
instance (ToCStruct e, PokeChain es) => PokeChain (e:es) where
withChain (e, es) f = evalContT $ do
t <- ContT $ withChain es
h <- ContT $ withCStruct e
lift $ linkChain h t
lift $ f (castPtr h)
withZeroChain f = evalContT $ do
t <- ContT $ withZeroChain @es
h <- ContT $ withZeroCStruct @e
lift $ linkChain h t
lift $ f (castPtr h)
class PeekChain es where
peekChain :: Ptr (Chain es) -> IO (Chain es)
instance PeekChain '[] where
peekChain _ = pure ()
instance (FromCStruct e, PeekChain es) => PeekChain (e:es) where
peekChain p = do
h <- peekCStruct @e (castPtr p)
tPtr <- peek (p `plusPtr` 8)
t <- peekChain tPtr
pure (h, t)
linkChain :: Ptr a -> Ptr b -> IO ()
linkChain head' tail' = poke (head' `plusPtr` 8) tail'
data SomeChild (a :: Type) where
SomeChild :: forall a b . (Inherits a b, Typeable b, ToCStruct b, Show b) => b -> SomeChild a
deriving instance Show (SomeChild a)
type family Inherits (a :: Type) (b :: Type) :: Constraint where
Inherits SwapchainImageBaseHeader SwapchainImageD3D12KHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageD3D11KHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageVulkanKHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageOpenGLESKHR = ()
Inherits SwapchainImageBaseHeader SwapchainImageOpenGLKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerEquirect2KHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerEquirectKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerCubeKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerCylinderKHR = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerQuad = ()
Inherits (CompositionLayerBaseHeader '[]) CompositionLayerProjection = ()
Inherits HapticBaseHeader HapticVibration = ()
Inherits EventDataBaseHeader EventDataDisplayRefreshRateChangedFB = ()
Inherits EventDataBaseHeader EventDataMainSessionVisibilityChangedEXTX = ()
Inherits EventDataBaseHeader EventDataInteractionProfileChanged = ()
Inherits EventDataBaseHeader EventDataVisibilityMaskChangedKHR = ()
Inherits EventDataBaseHeader EventDataPerfSettingsEXT = ()
Inherits EventDataBaseHeader EventDataReferenceSpaceChangePending = ()
Inherits EventDataBaseHeader EventDataSessionStateChanged = ()
Inherits EventDataBaseHeader EventDataInstanceLossPending = ()
Inherits EventDataBaseHeader EventDataEventsLost = ()
Inherits LoaderInitInfoBaseHeaderKHR LoaderInitInfoAndroidKHR = ()
Inherits parent child =
TypeError (ShowType parent :<>: Text " is not inherited by " :<>: ShowType child)
class Inheritable (a :: Type) where
peekSomeCChild :: Ptr (SomeChild a) -> IO (SomeChild a)
withSomeChild :: SomeChild a -> (Ptr (SomeChild a) -> IO b) -> IO b
withSomeChild (SomeChild c) f = withCStruct c (f . lowerChildPointer)
lowerChildPointer :: Inherits a b => Ptr b -> Ptr (SomeChild a)
lowerChildPointer = castPtr
|
cad4c2fbc856f2adb6030555ab19ccead2f6558ae4d9cb562923647d7dbfcfea | ijvcms/chuanqi_dev | misc_timer.erl | %%% -------------------------------------------------------------------
%%% Author : ming
Description : 时间生成器
%%%
Created : 2012 - 12 - 2
%%% -------------------------------------------------------------------
-module(misc_timer).
-behaviour(gen_server).
%% --------------------------------------------------------------------
%% Include files
%% --------------------------------------------------------------------
%% --------------------------------------------------------------------
%% External exports
-export([now/0, now_seconds/0, now_milseconds/0, cpu_time/0, start_link/0, start/1, info/0]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(ETS_TIMER, ets_timer).
-define(CLOCK, 100).
%% ====================================================================
%% External functions
%% ====================================================================
now() ->
[{timer, {Now, _}}] = ets:lookup(?ETS_TIMER, timer),
Now.
now_seconds() ->
[{timer, {Now, _}}] = ets:lookup(?ETS_TIMER, timer),
{MegaSecs, Secs, _MicroSecs} = Now,
MegaSecs * 1000000 + Secs.
%% 毫秒
now_milseconds() ->
util_date:longunixtime().
cpu_time() ->
[{timer, {_, Wallclock_Time_Since_Last_Call}}] = ets:lookup(?ETS_TIMER, timer),
Wallclock_Time_Since_Last_Call.
info() ->
[
ets:info(ets_timer),
ets:tab2list(ets_timer)
].
start(Sup) ->
supervisor:start_child(Sup,
{misc_timer,
{misc_timer, start_link, []},
permanent, brutal_kill, worker, [misc_timer]}).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%% ====================================================================
%% Server functions
%% ====================================================================
%% --------------------------------------------------------------------
%% Function: init/1
%% Description: Initiates the server
%% Returns: {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% --------------------------------------------------------------------
init([]) ->
process_flag(trap_exit, true),
ets:new(?ETS_TIMER, [set, protected, named_table]),
ets:insert(?ETS_TIMER, {timer, {erlang:now(), 0}}),
erlang:send_after(?CLOCK, self(), {event, clock}),
{ok, []}.
%% --------------------------------------------------------------------
Function : handle_call/3
%% Description: Handling call messages
%% Returns: {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} | (terminate/2 is called)
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%% --------------------------------------------------------------------
%% Function: handle_cast/2
%% Description: Handling cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%% --------------------------------------------------------------------
%% Function: handle_info/2
%% Description: Handling all non call/cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_info({event, clock}, State) ->
{_Total_Run_Time, Time_Since_Last_Call} = statistics(runtime),
ets:insert(?ETS_TIMER, {timer, {erlang:now(), Time_Since_Last_Call}}),
erlang:send_after(?CLOCK, self(), {event, clock}),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
%% --------------------------------------------------------------------
%% Function: terminate/2
%% Description: Shutdown the server
%% Returns: any (ignored by gen_server)
%% --------------------------------------------------------------------
terminate(_Reason, _State) ->
io:format("~p terminate~n", [?MODULE]),
ok.
%% --------------------------------------------------------------------
%% Func: code_change/3
%% Purpose: Convert process state when code is changed
%% Returns: {ok, NewState}
%% --------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% --------------------------------------------------------------------
Internal functions
%% --------------------------------------------------------------------
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/system/timer/misc_timer.erl | erlang | -------------------------------------------------------------------
Author : ming
-------------------------------------------------------------------
--------------------------------------------------------------------
Include files
--------------------------------------------------------------------
--------------------------------------------------------------------
External exports
gen_server callbacks
====================================================================
External functions
====================================================================
毫秒
====================================================================
Server functions
====================================================================
--------------------------------------------------------------------
Function: init/1
Description: Initiates the server
Returns: {ok, State} |
ignore |
{stop, Reason}
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Handling call messages
Returns: {reply, Reply, State} |
{stop, Reason, Reply, State} | (terminate/2 is called)
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: handle_cast/2
Description: Handling cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: handle_info/2
Description: Handling all non call/cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: terminate/2
Description: Shutdown the server
Returns: any (ignored by gen_server)
--------------------------------------------------------------------
--------------------------------------------------------------------
Func: code_change/3
Purpose: Convert process state when code is changed
Returns: {ok, NewState}
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | Description : 时间生成器
Created : 2012 - 12 - 2
-module(misc_timer).
-behaviour(gen_server).
-export([now/0, now_seconds/0, now_milseconds/0, cpu_time/0, start_link/0, start/1, info/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(ETS_TIMER, ets_timer).
-define(CLOCK, 100).
now() ->
[{timer, {Now, _}}] = ets:lookup(?ETS_TIMER, timer),
Now.
now_seconds() ->
[{timer, {Now, _}}] = ets:lookup(?ETS_TIMER, timer),
{MegaSecs, Secs, _MicroSecs} = Now,
MegaSecs * 1000000 + Secs.
now_milseconds() ->
util_date:longunixtime().
cpu_time() ->
[{timer, {_, Wallclock_Time_Since_Last_Call}}] = ets:lookup(?ETS_TIMER, timer),
Wallclock_Time_Since_Last_Call.
info() ->
[
ets:info(ets_timer),
ets:tab2list(ets_timer)
].
start(Sup) ->
supervisor:start_child(Sup,
{misc_timer,
{misc_timer, start_link, []},
permanent, brutal_kill, worker, [misc_timer]}).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
{ ok , State , Timeout } |
init([]) ->
process_flag(trap_exit, true),
ets:new(?ETS_TIMER, [set, protected, named_table]),
ets:insert(?ETS_TIMER, {timer, {erlang:now(), 0}}),
erlang:send_after(?CLOCK, self(), {event, clock}),
{ok, []}.
Function : handle_call/3
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_info({event, clock}, State) ->
{_Total_Run_Time, Time_Since_Last_Call} = statistics(runtime),
ets:insert(?ETS_TIMER, {timer, {erlang:now(), Time_Since_Last_Call}}),
erlang:send_after(?CLOCK, self(), {event, clock}),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
io:format("~p terminate~n", [?MODULE]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
|
635fc530a2f73b0e67d4462cfea7b7a490fb06c2d7937f2afd5cb111efea9fce | wh5a/thih | Desugar.hs | ------------------------------------------------------------------------------
Copyright : The Hatchet Team ( see file Contributors )
Module :
Description : Desugaring of the abstract syntax .
The main tasks implemented by this module are :
- pattern bindings are converted
into " simple " pattern bindings
( x , y , z ) = foo
becomes
newVal = foo
x = ( \(a , _ , _ ) - > a ) newVal
y = ( \ ( _ , a , _ ) - > a ) newVal
z = ( \ ( _ , _ , a ) - > a ) newVal
- do notation is converted into
expression form , using ( > > ) and
( > > =)
- type synonyms are removed
Primary Authors :
Notes : See the file License for license information
According to the report a pattern
binding is called " simple " if it consists only
of a single variable - thus we convert all
pattern bindings to simple bindings .
------------------------------------------------------------------------------
Copyright: The Hatchet Team (see file Contributors)
Module: Desugar
Description: Desugaring of the abstract syntax.
The main tasks implemented by this module are:
- pattern bindings are converted
into "simple" pattern bindings
(x, y, z) = foo
becomes
newVal = foo
x = (\(a, _, _) -> a) newVal
y = (\(_, a, _) -> a) newVal
z = (\(_, _, a) -> a) newVal
- do notation is converted into
expression form, using (>>) and
(>>=)
- type synonyms are removed
Primary Authors: Bernie Pope
Notes: See the file License for license information
According to the Haskell report a pattern
binding is called "simple" if it consists only
of a single variable - thus we convert all
pattern bindings to simple bindings.
-------------------------------------------------------------------------------}
module Desugar (desugarTidyModule, doToExp) where
import AnnotatedHsSyn -- everything
import TypeSynonyms (removeSynonymsFromType,
removeSynsFromSig)
import TidyModule (TidyModule (..))
-- (unique int, list of type synoyms)
type PatState = (Int, [AHsDecl])
readUnique :: PatSM Int
readUnique
= do
state <- readPatSM
return (fst state)
readSyns :: PatSM [AHsDecl]
readSyns
= do
state <- readPatSM
return (snd state)
incUnique :: PatSM ()
incUnique = updatePatSM (\(u, s) -> (u + 1, s))
data PatSM a = PatSM (PatState -> (a, PatState)) -- The monadic type
instance Monad PatSM where
-- defines state propagation
PatSM c1 >>= fc2 = PatSM (\s0 -> let (r,s1) = c1 s0
PatSM c2 = fc2 r in
c2 s1)
return k = PatSM (\s -> (k,s))
-- extracts the state from the monad
readPatSM :: PatSM PatState
readPatSM = PatSM (\s -> (s,s))
-- updates the state of the monad
updatePatSM :: (PatState -> PatState) -> PatSM () -- alters the state
updatePatSM f = PatSM (\s -> ((), f s))
-- run a computation in the PatSM monad
runPatSM :: PatState -> PatSM a -> (a, PatState)
runPatSM s0 (PatSM c) = c s0
{------------------------------------------------------------------------------}
-- a new (unique) name introduced in pattern selector functions
newPatVarName :: AHsName
newPatVarName = AUnQual $ AHsIdent "newPatVar_From_Desugaring"
-- a new (unique) name introduced in expressions
newVarName :: AHsName
newVarName = AUnQual $ AHsIdent "newVar_From_Desugaring"
remSynsSig :: AHsDecl -> PatSM AHsDecl
remSynsSig sig
= do
syns <- readSyns
let newSig = removeSynsFromSig syns sig
return newSig
remSynsType :: AHsType -> PatSM AHsType
remSynsType t
= do
syns <- readSyns
let newType = removeSynonymsFromType syns t
return newType
this function replaces all constructor - pattern bindings in a module with
function calls
ie :
( x , y ) = head $ zip " abc " [ 1,2,3 ]
becomes
x = ( \(a , _ ) - > a ) rhs1
y = ( \ ( _ , a ) - > a ) rhs1
rhs1 = head $ zip " abc " [ 1,2,3 ]
this function replaces all constructor-pattern bindings in a module with
function calls
ie:
(x, y) = head $ zip "abc" [1,2,3]
becomes
x = (\(a, _) -> a) rhs1
y = (\(_, a) -> a) rhs1
rhs1 = head $ zip "abc" [1,2,3]
-}
first argument is imported synonyms
desugarTidyModule :: [AHsDecl] -> TidyModule -> TidyModule
desugarTidyModule importSyns tidy
= newTidy
where
(newTidy, _) = runPatSM (0::Int, synonyms) $ desugarTidyModuleM tidy
synonyms = tidyTyDecls tidy ++ importSyns
desugarTidyModuleM :: TidyModule -> PatSM TidyModule
desugarTidyModuleM tidy
oldTyDecls = tidyTyDecls tidy
oldDataDecls = tidyDataDecls tidy
oldInFixDecls = tidyInFixDecls tidy
oldNewTyDecls = tidyNewTyDecls tidy
oldClassDecls = tidyClassDecls tidy
oldInstDecls = tidyInstDecls tidy
oldDefs = tidyDefDecls tidy
oldTySigs = tidyTySigs tidy
oldFunBinds = tidyFunBinds tidy
oldPatBinds = tidyPatBinds tidy
newTyDecls < - desugarDecl oldTyDecls
newDataDecls <- mapM desugarDecl oldDataDecls
newInFixDecls <- mapM desugarDecl oldInFixDecls
newNewTyDecls <- mapM desugarDecl oldNewTyDecls
newClassDecls <- mapM desugarDecl oldClassDecls
newInstDecls <- mapM desugarDecl oldInstDecls
newDefs <- mapM desugarDecl oldDefs
newTySigs <- mapM desugarDecl oldTySigs
newFunBinds <- mapM desugarDecl oldFunBinds
newPatBinds <- mapM desugarDecl oldPatBinds
return tidy{tidyTyDecls = [], -- return the empty list of synonyms, we don't need them anymore
tidyDataDecls = concat newDataDecls,
tidyInFixDecls = concat newInFixDecls,
tidyNewTyDecls = concat newNewTyDecls,
tidyClassDecls = concat newClassDecls,
tidyInstDecls = concat newInstDecls,
tidyDefDecls = concat newDefs,
tidyTySigs = concat newTySigs,
tidyFunBinds = concat newFunBinds,
tidyPatBinds = concat newPatBinds}
desugarDecl :: AHsDecl -> PatSM [AHsDecl]
desugarDecl ( sloc matches )
desugarDecl (AHsFunBind matches)
= do
newMatches <- mapM desugarMatch matches
return [ AHsFunBind sloc ]
return [AHsFunBind newMatches]
-- variable pattern bindings remain unchanged
desugarDecl pb@(AHsPatBind sloc (AHsPVar n) rhs wheres)
-- = return [pb]
= do
newRhs <- desugarRhs rhs
newWheres <- mapM desugarDecl wheres
return [AHsPatBind sloc (AHsPVar n) newRhs (concat newWheres)]
-- constructor and tuple pattern bindings must be changed
-- XXX bjpop: what about nested parenthesised patterns that just bind
-- variables?
desugarDecl pb@(AHsPatBind sloc pat rhs wheres)
= do
unique <- readUnique -- these must be done
incUnique -- together
let newRhsName = AUnQual $ AHsIdent $ "newPatRhs_From_Desugaring" ++ show unique
newWheres <- mapM desugarDecl wheres
let newTopDeclForRhs
= AHsPatBind bogusASrcLoc (AHsPVar newRhsName) rhs (concat newWheres)
let newBinds = genBindsForPat pat sloc newRhsName
return (newTopDeclForRhs : newBinds)
desugarDecl (AHsClassDecl sloc qualtype decls)
= do
newDecls <- mapM desugarDecl decls
return [AHsClassDecl sloc qualtype (concat newDecls)]
desugarDecl (AHsInstDecl sloc qualtype decls)
= do
newQualType <- remSynsQualType qualtype
newDecls <- mapM desugarDecl decls
return [AHsInstDecl sloc newQualType (concat newDecls)]
desugarDecl sig@(AHsTypeSig _sloc _names _qualType)
= do
newSig <- remSynsSig sig
return [newSig]
desugarDecl (AHsDataDecl sloc cntxt name args condecls derives)
= do
newConDecls <- mapM remSynsFromCondecl condecls
return [(AHsDataDecl sloc cntxt name args newConDecls derives)]
desugarDecl anyOtherDecl = return [anyOtherDecl]
remSynsFromCondecl :: AHsConDecl -> PatSM AHsConDecl
remSynsFromCondecl (AHsConDecl sloc name bangTypes)
= do
newBangTypes <- mapM remSynsFromBangType bangTypes
return (AHsConDecl sloc name newBangTypes)
remSynsFromCondecl (AHsRecDecl _ _ _)
= error $ "remSynsFromCondecl (AHsRecDecl _ _ _) not implemented"
remSynsFromBangType :: AHsBangType -> PatSM AHsBangType
remSynsFromBangType (AHsBangedTy t)
= do
newType <- remSynsType t
return (AHsBangedTy newType)
remSynsFromBangType (AHsUnBangedTy t)
= do
newType <- remSynsType t
return (AHsUnBangedTy newType)
desugarMatch :: (AHsMatch) -> PatSM (AHsMatch)
desugarMatch (AHsMatch sloc funName pats rhs wheres)
= do
newWheres <- mapM desugarDecl wheres
newRhs <- desugarRhs rhs
return (AHsMatch sloc funName pats newRhs (concat newWheres))
-- generate the pattern bindings for each variable in a pattern
genBindsForPat :: AHsPat -> ASrcLoc -> AHsName -> [AHsDecl]
genBindsForPat pat sloc rhsName
= [AHsPatBind sloc (AHsPVar patName) (AHsUnGuardedRhs (AHsApp selector (AHsVar rhsName))) [] | (patName, selector) <- selFuns]
where
selFuns = getPatSelFuns pat
-- generate selector functions for each of the variables that
-- are bound in a pattern
getPatSelFuns :: AHsPat -> [(AHsName, (AHsExp))]
getPatSelFuns pat
= [(varName, AHsParen (AHsLambda bogusASrcLoc [replaceVarNamesInPat varName pat] (AHsVar newPatVarName ))) | varName <- patVarNames pat]
-- returns the names of variables bound in a pattern
-- XXX bjpop: do as patterns work properly?
patVarNames :: AHsPat -> [AHsName]
patVarNames (AHsPVar name) = [name]
patVarNames (AHsPLit _) = []
patVarNames (AHsPNeg pat) = patVarNames pat
patVarNames (AHsPInfixApp pat1 conName pat2)
= patVarNames pat1 ++ patVarNames pat2
patVarNames (AHsPApp conName pats)
= concatMap patVarNames pats
patVarNames (AHsPTuple pats)
= concatMap patVarNames pats
patVarNames (AHsPList pats)
= concatMap patVarNames pats
patVarNames (AHsPParen pat)
= patVarNames pat
patVarNames (AHsPRec _ _) = error "patVarNames (AHsPRec _ _): not implemented "
patVarNames (AHsPAsPat asName pat)
= asName : patVarNames pat
patVarNames AHsPWildCard = []
patVarNames (AHsPIrrPat pat)
= patVarNames pat
-- replaces all occurrences of a name with a new variable
-- and every other name with underscore
replaceVarNamesInPat :: AHsName -> AHsPat -> AHsPat
replaceVarNamesInPat name1 (AHsPVar name2)
| name1 == name2 = AHsPVar $ newPatVarName
| otherwise = AHsPWildCard
replaceVarNamesInPat _ p@(AHsPLit _) = p
replaceVarNamesInPat name (AHsPNeg pat)
= AHsPNeg $ replaceVarNamesInPat name pat
replaceVarNamesInPat name (AHsPInfixApp pat1 conName pat2)
= AHsPInfixApp (replaceVarNamesInPat name pat1) conName (replaceVarNamesInPat name pat2)
replaceVarNamesInPat name (AHsPApp conName pats)
= AHsPApp conName (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPTuple pats)
= AHsPTuple (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPList pats)
= AHsPList (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPParen pat)
= AHsPParen (replaceVarNamesInPat name pat)
replaceVarNamesInPat name (AHsPRec _ _)
= error "replaceVarNamesInPat name (AHsPRec _ _): not implemented"
replaceVarNamesInPat name (AHsPAsPat asName pat)
| name == asName = AHsPAsPat newPatVarName (replaceVarNamesInPat name pat)
| otherwise = replaceVarNamesInPat name pat
replaceVarNamesInPat name AHsPWildCard = AHsPWildCard
replaceVarNamesInPat name (AHsPIrrPat pat)
= AHsPIrrPat $ replaceVarNamesInPat name pat
desugarRhs :: (AHsRhs) -> PatSM (AHsRhs)
desugarRhs (AHsUnGuardedRhs e)
= do
newE <- desugarExp e
return (AHsUnGuardedRhs newE)
desugarRhs (AHsGuardedRhss gRhss)
= do
newRhss <- mapM desugarGRhs gRhss
return (AHsGuardedRhss newRhss)
desugarGRhs :: AHsGuardedRhs -> PatSM (AHsGuardedRhs)
desugarGRhs (AHsGuardedRhs sloc e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsGuardedRhs sloc newE1 newE2)
desugarExp :: (AHsExp) -> PatSM (AHsExp)
desugarExp e@(AHsVar name)
= return e
desugarExp e@(AHsCon name)
= return e
desugarExp e@(AHsLit l)
= return e
desugarExp (AHsInfixApp e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsInfixApp newE1 newE2 newE3)
desugarExp (AHsApp e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsApp newE1 newE2)
desugarExp (AHsNegApp e)
= do
newE <- desugarExp e
return (AHsNegApp newE)
desugarExp (AHsLambda sloc pats e)
= do
newE <- desugarExp e
return (AHsLambda sloc pats newE)
desugarExp (AHsLet decls e)
= do
newDecls <- mapM desugarDecl decls
newE <- desugarExp e
return (AHsLet (concat newDecls) newE)
desugarExp (AHsIf e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsIf newE1 newE2 newE3)
desugarExp (AHsCase e alts)
= do
newE <- desugarExp e
newAlts <- mapM desugarAlt alts
return (AHsCase newE newAlts)
desugarExp (AHsDo stmts)
= do
newStmts <- mapM desugarStmt stmts
return (doToExp newStmts)
desugarExp (AHsTuple exps)
= do
newExps <- mapM desugarExp exps
return (AHsTuple newExps)
desugarExp (AHsList exps)
= do
newExps <- mapM desugarExp exps
return (AHsList newExps)
desugarExp (AHsParen e)
= do
newE <- desugarExp e
return (AHsParen newE)
desugarExp (AHsLeftSection e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsLeftSection newE1 newE2)
desugarExp (AHsRightSection e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsRightSection newE1 newE2)
desugarExp (AHsRecConstr _ _)
= error "desugarExp (AHsRecConstr _ _): not implemented"
desugarExp (AHsRecUpdate _ _)
= error "desugarExp (AHsRecUpdate _ _): not implemented"
desugarExp (AHsEnumFrom e)
= do
newE <- desugarExp e
return (AHsEnumFrom newE)
desugarExp (AHsEnumFromTo e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsEnumFromTo newE1 newE2)
desugarExp (AHsEnumFromThen e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsEnumFromThen newE1 newE2)
desugarExp (AHsEnumFromThenTo e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsEnumFromThenTo newE1 newE2 newE3)
desugarExp (AHsListComp e stmts)
= do
newE <- desugarExp e
newStmts <- mapM desugarStmt stmts
return (AHsListComp newE newStmts)
-- e :: t ---> let {v :: t, v = e} in e
desugarExp ( AHsExpTypeSig sloc e qualType )
= do
newE < - desugarExp e
newQualType < - remSynsQualType qualType
return ( AHsExpTypeSig sloc newE newQualType )
desugarExp (AHsExpTypeSig sloc e qualType)
= do
newE <- desugarExp e
newQualType <- remSynsQualType qualType
return (AHsExpTypeSig sloc newE newQualType)
-}
desugarExp (AHsExpTypeSig sloc e qualType)
= do
newE <- desugarExp e
newQualType <- remSynsQualType qualType
let newTypeSig = AHsTypeSig bogusASrcLoc [newVarName] newQualType
let newVarDecl = AHsPatBind bogusASrcLoc
(AHsPVar newVarName)
(AHsUnGuardedRhs newE) []
return (AHsLet [newTypeSig, newVarDecl] (AHsVar newVarName))
desugarExp (AHsAsPat name e)
= do
newE <- desugarExp e
return (AHsAsPat name e)
desugarExp AHsWildCard
= return AHsWildCard
desugarExp (AHsIrrPat e)
= do
newE <- desugarExp e
return (AHsIrrPat newE)
desugarAlt :: (AHsAlt) -> PatSM (AHsAlt)
desugarAlt (AHsAlt sloc pat gAlts wheres)
= do
newGAlts <- desugarGAlts gAlts
newWheres <- mapM desugarDecl wheres
return (AHsAlt sloc pat newGAlts (concat newWheres))
desugarGAlts :: (AHsGuardedAlts) -> PatSM (AHsGuardedAlts)
desugarGAlts (AHsUnGuardedAlt e)
= do
newE <- desugarExp e
return (AHsUnGuardedAlt newE)
desugarGAlts (AHsGuardedAlts gAlts)
= do
newGAlts <- mapM desugarGuardedAlt gAlts
return (AHsGuardedAlts newGAlts)
desugarGuardedAlt :: (AHsGuardedAlt) -> PatSM (AHsGuardedAlt)
desugarGuardedAlt (AHsGuardedAlt sloc e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsGuardedAlt sloc newE1 newE2)
desugarStmt :: (AHsStmt) -> PatSM (AHsStmt)
desugarStmt (AHsGenerator srcLoc pat e)
= do
newE <- desugarExp e
return (AHsGenerator srcLoc pat newE)
desugarStmt (AHsQualifier e)
= do
newE <- desugarExp e
return (AHsQualifier newE)
desugarStmt (AHsLetStmt decls)
= do
newDecls <- mapM desugarDecl decls
return (AHsLetStmt $ concat newDecls)
remSynsQualType :: AHsQualType -> PatSM AHsQualType
remSynsQualType qualtype
= case qualtype of
AHsQualType cntxt t
-> do
newT <- remSynsType t
return (AHsQualType cntxt newT)
AHsUnQualType t
-> do
newT <- remSynsType t
return (AHsUnQualType newT)
--------------------------------------------------------------------------------
-- desugar the do-notation
-- flatten out do notation into an expression
-- involving ">>" and ">>="
doToExp :: [AHsStmt] -> AHsExp
doToExp [] = error "doToExp: empty statements in do notation"
doToExp [AHsQualifier e] = e
doToExp [gen@(AHsGenerator srcLoc _pat _e)]
= error $ "doToExp: last expression n do notation is a generator (srcLoc):" ++ show srcLoc
doToExp [letst@(AHsLetStmt _decls)]
= error $ "doToExp: last expression n do notation is a let statement"
doToExp ((AHsQualifier e):ss)
= AHsInfixApp (AHsParen e) (AHsVar (AUnQual (AHsSymbol ">>"))) (doToExp ss)
doToExp ((AHsGenerator _srcLoc pat e):ss)
= AHsInfixApp (AHsParen e) (AHsVar (AUnQual (AHsSymbol ">>="))) (AHsLambda bogusASrcLoc [pat] (doToExp ss))
doToExp ((AHsLetStmt decls):ss)
= AHsLet decls (doToExp ss)
| null | https://raw.githubusercontent.com/wh5a/thih/dc5cb16ba4e998097135beb0c7b0b416cac7bfae/hatchet/Desugar.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
everything
(unique int, list of type synoyms)
The monadic type
defines state propagation
extracts the state from the monad
updates the state of the monad
alters the state
run a computation in the PatSM monad
----------------------------------------------------------------------------
a new (unique) name introduced in pattern selector functions
a new (unique) name introduced in expressions
return the empty list of synonyms, we don't need them anymore
variable pattern bindings remain unchanged
= return [pb]
constructor and tuple pattern bindings must be changed
XXX bjpop: what about nested parenthesised patterns that just bind
variables?
these must be done
together
generate the pattern bindings for each variable in a pattern
generate selector functions for each of the variables that
are bound in a pattern
returns the names of variables bound in a pattern
XXX bjpop: do as patterns work properly?
replaces all occurrences of a name with a new variable
and every other name with underscore
e :: t ---> let {v :: t, v = e} in e
------------------------------------------------------------------------------
desugar the do-notation
flatten out do notation into an expression
involving ">>" and ">>=" |
Copyright : The Hatchet Team ( see file Contributors )
Module :
Description : Desugaring of the abstract syntax .
The main tasks implemented by this module are :
- pattern bindings are converted
into " simple " pattern bindings
( x , y , z ) = foo
becomes
newVal = foo
x = ( \(a , _ , _ ) - > a ) newVal
y = ( \ ( _ , a , _ ) - > a ) newVal
z = ( \ ( _ , _ , a ) - > a ) newVal
- do notation is converted into
expression form , using ( > > ) and
( > > =)
- type synonyms are removed
Primary Authors :
Notes : See the file License for license information
According to the report a pattern
binding is called " simple " if it consists only
of a single variable - thus we convert all
pattern bindings to simple bindings .
Copyright: The Hatchet Team (see file Contributors)
Module: Desugar
Description: Desugaring of the abstract syntax.
The main tasks implemented by this module are:
- pattern bindings are converted
into "simple" pattern bindings
(x, y, z) = foo
becomes
newVal = foo
x = (\(a, _, _) -> a) newVal
y = (\(_, a, _) -> a) newVal
z = (\(_, _, a) -> a) newVal
- do notation is converted into
expression form, using (>>) and
(>>=)
- type synonyms are removed
Primary Authors: Bernie Pope
Notes: See the file License for license information
According to the Haskell report a pattern
binding is called "simple" if it consists only
of a single variable - thus we convert all
pattern bindings to simple bindings.
module Desugar (desugarTidyModule, doToExp) where
import TypeSynonyms (removeSynonymsFromType,
removeSynsFromSig)
import TidyModule (TidyModule (..))
type PatState = (Int, [AHsDecl])
readUnique :: PatSM Int
readUnique
= do
state <- readPatSM
return (fst state)
readSyns :: PatSM [AHsDecl]
readSyns
= do
state <- readPatSM
return (snd state)
incUnique :: PatSM ()
incUnique = updatePatSM (\(u, s) -> (u + 1, s))
instance Monad PatSM where
PatSM c1 >>= fc2 = PatSM (\s0 -> let (r,s1) = c1 s0
PatSM c2 = fc2 r in
c2 s1)
return k = PatSM (\s -> (k,s))
readPatSM :: PatSM PatState
readPatSM = PatSM (\s -> (s,s))
updatePatSM f = PatSM (\s -> ((), f s))
runPatSM :: PatState -> PatSM a -> (a, PatState)
runPatSM s0 (PatSM c) = c s0
newPatVarName :: AHsName
newPatVarName = AUnQual $ AHsIdent "newPatVar_From_Desugaring"
newVarName :: AHsName
newVarName = AUnQual $ AHsIdent "newVar_From_Desugaring"
remSynsSig :: AHsDecl -> PatSM AHsDecl
remSynsSig sig
= do
syns <- readSyns
let newSig = removeSynsFromSig syns sig
return newSig
remSynsType :: AHsType -> PatSM AHsType
remSynsType t
= do
syns <- readSyns
let newType = removeSynonymsFromType syns t
return newType
this function replaces all constructor - pattern bindings in a module with
function calls
ie :
( x , y ) = head $ zip " abc " [ 1,2,3 ]
becomes
x = ( \(a , _ ) - > a ) rhs1
y = ( \ ( _ , a ) - > a ) rhs1
rhs1 = head $ zip " abc " [ 1,2,3 ]
this function replaces all constructor-pattern bindings in a module with
function calls
ie:
(x, y) = head $ zip "abc" [1,2,3]
becomes
x = (\(a, _) -> a) rhs1
y = (\(_, a) -> a) rhs1
rhs1 = head $ zip "abc" [1,2,3]
-}
first argument is imported synonyms
desugarTidyModule :: [AHsDecl] -> TidyModule -> TidyModule
desugarTidyModule importSyns tidy
= newTidy
where
(newTidy, _) = runPatSM (0::Int, synonyms) $ desugarTidyModuleM tidy
synonyms = tidyTyDecls tidy ++ importSyns
desugarTidyModuleM :: TidyModule -> PatSM TidyModule
desugarTidyModuleM tidy
oldTyDecls = tidyTyDecls tidy
oldDataDecls = tidyDataDecls tidy
oldInFixDecls = tidyInFixDecls tidy
oldNewTyDecls = tidyNewTyDecls tidy
oldClassDecls = tidyClassDecls tidy
oldInstDecls = tidyInstDecls tidy
oldDefs = tidyDefDecls tidy
oldTySigs = tidyTySigs tidy
oldFunBinds = tidyFunBinds tidy
oldPatBinds = tidyPatBinds tidy
newTyDecls < - desugarDecl oldTyDecls
newDataDecls <- mapM desugarDecl oldDataDecls
newInFixDecls <- mapM desugarDecl oldInFixDecls
newNewTyDecls <- mapM desugarDecl oldNewTyDecls
newClassDecls <- mapM desugarDecl oldClassDecls
newInstDecls <- mapM desugarDecl oldInstDecls
newDefs <- mapM desugarDecl oldDefs
newTySigs <- mapM desugarDecl oldTySigs
newFunBinds <- mapM desugarDecl oldFunBinds
newPatBinds <- mapM desugarDecl oldPatBinds
tidyDataDecls = concat newDataDecls,
tidyInFixDecls = concat newInFixDecls,
tidyNewTyDecls = concat newNewTyDecls,
tidyClassDecls = concat newClassDecls,
tidyInstDecls = concat newInstDecls,
tidyDefDecls = concat newDefs,
tidyTySigs = concat newTySigs,
tidyFunBinds = concat newFunBinds,
tidyPatBinds = concat newPatBinds}
desugarDecl :: AHsDecl -> PatSM [AHsDecl]
desugarDecl ( sloc matches )
desugarDecl (AHsFunBind matches)
= do
newMatches <- mapM desugarMatch matches
return [ AHsFunBind sloc ]
return [AHsFunBind newMatches]
desugarDecl pb@(AHsPatBind sloc (AHsPVar n) rhs wheres)
= do
newRhs <- desugarRhs rhs
newWheres <- mapM desugarDecl wheres
return [AHsPatBind sloc (AHsPVar n) newRhs (concat newWheres)]
desugarDecl pb@(AHsPatBind sloc pat rhs wheres)
= do
let newRhsName = AUnQual $ AHsIdent $ "newPatRhs_From_Desugaring" ++ show unique
newWheres <- mapM desugarDecl wheres
let newTopDeclForRhs
= AHsPatBind bogusASrcLoc (AHsPVar newRhsName) rhs (concat newWheres)
let newBinds = genBindsForPat pat sloc newRhsName
return (newTopDeclForRhs : newBinds)
desugarDecl (AHsClassDecl sloc qualtype decls)
= do
newDecls <- mapM desugarDecl decls
return [AHsClassDecl sloc qualtype (concat newDecls)]
desugarDecl (AHsInstDecl sloc qualtype decls)
= do
newQualType <- remSynsQualType qualtype
newDecls <- mapM desugarDecl decls
return [AHsInstDecl sloc newQualType (concat newDecls)]
desugarDecl sig@(AHsTypeSig _sloc _names _qualType)
= do
newSig <- remSynsSig sig
return [newSig]
desugarDecl (AHsDataDecl sloc cntxt name args condecls derives)
= do
newConDecls <- mapM remSynsFromCondecl condecls
return [(AHsDataDecl sloc cntxt name args newConDecls derives)]
desugarDecl anyOtherDecl = return [anyOtherDecl]
remSynsFromCondecl :: AHsConDecl -> PatSM AHsConDecl
remSynsFromCondecl (AHsConDecl sloc name bangTypes)
= do
newBangTypes <- mapM remSynsFromBangType bangTypes
return (AHsConDecl sloc name newBangTypes)
remSynsFromCondecl (AHsRecDecl _ _ _)
= error $ "remSynsFromCondecl (AHsRecDecl _ _ _) not implemented"
remSynsFromBangType :: AHsBangType -> PatSM AHsBangType
remSynsFromBangType (AHsBangedTy t)
= do
newType <- remSynsType t
return (AHsBangedTy newType)
remSynsFromBangType (AHsUnBangedTy t)
= do
newType <- remSynsType t
return (AHsUnBangedTy newType)
desugarMatch :: (AHsMatch) -> PatSM (AHsMatch)
desugarMatch (AHsMatch sloc funName pats rhs wheres)
= do
newWheres <- mapM desugarDecl wheres
newRhs <- desugarRhs rhs
return (AHsMatch sloc funName pats newRhs (concat newWheres))
genBindsForPat :: AHsPat -> ASrcLoc -> AHsName -> [AHsDecl]
genBindsForPat pat sloc rhsName
= [AHsPatBind sloc (AHsPVar patName) (AHsUnGuardedRhs (AHsApp selector (AHsVar rhsName))) [] | (patName, selector) <- selFuns]
where
selFuns = getPatSelFuns pat
getPatSelFuns :: AHsPat -> [(AHsName, (AHsExp))]
getPatSelFuns pat
= [(varName, AHsParen (AHsLambda bogusASrcLoc [replaceVarNamesInPat varName pat] (AHsVar newPatVarName ))) | varName <- patVarNames pat]
patVarNames :: AHsPat -> [AHsName]
patVarNames (AHsPVar name) = [name]
patVarNames (AHsPLit _) = []
patVarNames (AHsPNeg pat) = patVarNames pat
patVarNames (AHsPInfixApp pat1 conName pat2)
= patVarNames pat1 ++ patVarNames pat2
patVarNames (AHsPApp conName pats)
= concatMap patVarNames pats
patVarNames (AHsPTuple pats)
= concatMap patVarNames pats
patVarNames (AHsPList pats)
= concatMap patVarNames pats
patVarNames (AHsPParen pat)
= patVarNames pat
patVarNames (AHsPRec _ _) = error "patVarNames (AHsPRec _ _): not implemented "
patVarNames (AHsPAsPat asName pat)
= asName : patVarNames pat
patVarNames AHsPWildCard = []
patVarNames (AHsPIrrPat pat)
= patVarNames pat
replaceVarNamesInPat :: AHsName -> AHsPat -> AHsPat
replaceVarNamesInPat name1 (AHsPVar name2)
| name1 == name2 = AHsPVar $ newPatVarName
| otherwise = AHsPWildCard
replaceVarNamesInPat _ p@(AHsPLit _) = p
replaceVarNamesInPat name (AHsPNeg pat)
= AHsPNeg $ replaceVarNamesInPat name pat
replaceVarNamesInPat name (AHsPInfixApp pat1 conName pat2)
= AHsPInfixApp (replaceVarNamesInPat name pat1) conName (replaceVarNamesInPat name pat2)
replaceVarNamesInPat name (AHsPApp conName pats)
= AHsPApp conName (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPTuple pats)
= AHsPTuple (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPList pats)
= AHsPList (map (replaceVarNamesInPat name) pats)
replaceVarNamesInPat name (AHsPParen pat)
= AHsPParen (replaceVarNamesInPat name pat)
replaceVarNamesInPat name (AHsPRec _ _)
= error "replaceVarNamesInPat name (AHsPRec _ _): not implemented"
replaceVarNamesInPat name (AHsPAsPat asName pat)
| name == asName = AHsPAsPat newPatVarName (replaceVarNamesInPat name pat)
| otherwise = replaceVarNamesInPat name pat
replaceVarNamesInPat name AHsPWildCard = AHsPWildCard
replaceVarNamesInPat name (AHsPIrrPat pat)
= AHsPIrrPat $ replaceVarNamesInPat name pat
desugarRhs :: (AHsRhs) -> PatSM (AHsRhs)
desugarRhs (AHsUnGuardedRhs e)
= do
newE <- desugarExp e
return (AHsUnGuardedRhs newE)
desugarRhs (AHsGuardedRhss gRhss)
= do
newRhss <- mapM desugarGRhs gRhss
return (AHsGuardedRhss newRhss)
desugarGRhs :: AHsGuardedRhs -> PatSM (AHsGuardedRhs)
desugarGRhs (AHsGuardedRhs sloc e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsGuardedRhs sloc newE1 newE2)
desugarExp :: (AHsExp) -> PatSM (AHsExp)
desugarExp e@(AHsVar name)
= return e
desugarExp e@(AHsCon name)
= return e
desugarExp e@(AHsLit l)
= return e
desugarExp (AHsInfixApp e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsInfixApp newE1 newE2 newE3)
desugarExp (AHsApp e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsApp newE1 newE2)
desugarExp (AHsNegApp e)
= do
newE <- desugarExp e
return (AHsNegApp newE)
desugarExp (AHsLambda sloc pats e)
= do
newE <- desugarExp e
return (AHsLambda sloc pats newE)
desugarExp (AHsLet decls e)
= do
newDecls <- mapM desugarDecl decls
newE <- desugarExp e
return (AHsLet (concat newDecls) newE)
desugarExp (AHsIf e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsIf newE1 newE2 newE3)
desugarExp (AHsCase e alts)
= do
newE <- desugarExp e
newAlts <- mapM desugarAlt alts
return (AHsCase newE newAlts)
desugarExp (AHsDo stmts)
= do
newStmts <- mapM desugarStmt stmts
return (doToExp newStmts)
desugarExp (AHsTuple exps)
= do
newExps <- mapM desugarExp exps
return (AHsTuple newExps)
desugarExp (AHsList exps)
= do
newExps <- mapM desugarExp exps
return (AHsList newExps)
desugarExp (AHsParen e)
= do
newE <- desugarExp e
return (AHsParen newE)
desugarExp (AHsLeftSection e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsLeftSection newE1 newE2)
desugarExp (AHsRightSection e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsRightSection newE1 newE2)
desugarExp (AHsRecConstr _ _)
= error "desugarExp (AHsRecConstr _ _): not implemented"
desugarExp (AHsRecUpdate _ _)
= error "desugarExp (AHsRecUpdate _ _): not implemented"
desugarExp (AHsEnumFrom e)
= do
newE <- desugarExp e
return (AHsEnumFrom newE)
desugarExp (AHsEnumFromTo e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsEnumFromTo newE1 newE2)
desugarExp (AHsEnumFromThen e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsEnumFromThen newE1 newE2)
desugarExp (AHsEnumFromThenTo e1 e2 e3)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
newE3 <- desugarExp e3
return (AHsEnumFromThenTo newE1 newE2 newE3)
desugarExp (AHsListComp e stmts)
= do
newE <- desugarExp e
newStmts <- mapM desugarStmt stmts
return (AHsListComp newE newStmts)
desugarExp ( AHsExpTypeSig sloc e qualType )
= do
newE < - desugarExp e
newQualType < - remSynsQualType qualType
return ( AHsExpTypeSig sloc newE newQualType )
desugarExp (AHsExpTypeSig sloc e qualType)
= do
newE <- desugarExp e
newQualType <- remSynsQualType qualType
return (AHsExpTypeSig sloc newE newQualType)
-}
desugarExp (AHsExpTypeSig sloc e qualType)
= do
newE <- desugarExp e
newQualType <- remSynsQualType qualType
let newTypeSig = AHsTypeSig bogusASrcLoc [newVarName] newQualType
let newVarDecl = AHsPatBind bogusASrcLoc
(AHsPVar newVarName)
(AHsUnGuardedRhs newE) []
return (AHsLet [newTypeSig, newVarDecl] (AHsVar newVarName))
desugarExp (AHsAsPat name e)
= do
newE <- desugarExp e
return (AHsAsPat name e)
desugarExp AHsWildCard
= return AHsWildCard
desugarExp (AHsIrrPat e)
= do
newE <- desugarExp e
return (AHsIrrPat newE)
desugarAlt :: (AHsAlt) -> PatSM (AHsAlt)
desugarAlt (AHsAlt sloc pat gAlts wheres)
= do
newGAlts <- desugarGAlts gAlts
newWheres <- mapM desugarDecl wheres
return (AHsAlt sloc pat newGAlts (concat newWheres))
desugarGAlts :: (AHsGuardedAlts) -> PatSM (AHsGuardedAlts)
desugarGAlts (AHsUnGuardedAlt e)
= do
newE <- desugarExp e
return (AHsUnGuardedAlt newE)
desugarGAlts (AHsGuardedAlts gAlts)
= do
newGAlts <- mapM desugarGuardedAlt gAlts
return (AHsGuardedAlts newGAlts)
desugarGuardedAlt :: (AHsGuardedAlt) -> PatSM (AHsGuardedAlt)
desugarGuardedAlt (AHsGuardedAlt sloc e1 e2)
= do
newE1 <- desugarExp e1
newE2 <- desugarExp e2
return (AHsGuardedAlt sloc newE1 newE2)
desugarStmt :: (AHsStmt) -> PatSM (AHsStmt)
desugarStmt (AHsGenerator srcLoc pat e)
= do
newE <- desugarExp e
return (AHsGenerator srcLoc pat newE)
desugarStmt (AHsQualifier e)
= do
newE <- desugarExp e
return (AHsQualifier newE)
desugarStmt (AHsLetStmt decls)
= do
newDecls <- mapM desugarDecl decls
return (AHsLetStmt $ concat newDecls)
remSynsQualType :: AHsQualType -> PatSM AHsQualType
remSynsQualType qualtype
= case qualtype of
AHsQualType cntxt t
-> do
newT <- remSynsType t
return (AHsQualType cntxt newT)
AHsUnQualType t
-> do
newT <- remSynsType t
return (AHsUnQualType newT)
doToExp :: [AHsStmt] -> AHsExp
doToExp [] = error "doToExp: empty statements in do notation"
doToExp [AHsQualifier e] = e
doToExp [gen@(AHsGenerator srcLoc _pat _e)]
= error $ "doToExp: last expression n do notation is a generator (srcLoc):" ++ show srcLoc
doToExp [letst@(AHsLetStmt _decls)]
= error $ "doToExp: last expression n do notation is a let statement"
doToExp ((AHsQualifier e):ss)
= AHsInfixApp (AHsParen e) (AHsVar (AUnQual (AHsSymbol ">>"))) (doToExp ss)
doToExp ((AHsGenerator _srcLoc pat e):ss)
= AHsInfixApp (AHsParen e) (AHsVar (AUnQual (AHsSymbol ">>="))) (AHsLambda bogusASrcLoc [pat] (doToExp ss))
doToExp ((AHsLetStmt decls):ss)
= AHsLet decls (doToExp ss)
|
e86b421fb12a69c523606d7480f9fc802af593b929e19fa2dfca190a05d4ed14 | psandahl/hats | Parser.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
Module : Network . . Message .
Copyright : ( c ) 2016
License : MIT
Maintainer : < >
-- Stability: experimental
-- Portability: portable
--
-- NATS protocol 'Message' parser. To be used with the
" Data . Attoparsec . ByteString " library .
module Network.Nats.Message.Parser
( parseMessage
) where
import Control.Applicative ((<|>))
import Control.Monad (void)
import Data.Attoparsec.ByteString.Char8
import Data.ByteString.Char8 (ByteString)
import qualified Data.Attoparsec.ByteString.Char8 as AP
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Network.Nats.Message.Message ( Message (..)
, ProtocolError (..)
)
import Network.Nats.Types (Sid)
data HandshakeMessageValue =
Bool !Bool
| String !ByteString
| Int !Int
deriving Show
type HandshakeMessageField = (ByteString, HandshakeMessageValue)
| Parse a message .
parseMessage :: Parser Message
parseMessage = skipSpace *> parseMessage'
where
parseMessage' = msgMessage
<|> infoMessage
<|> connectMessage
<|> pubMessage
<|> subMessage
<|> unsubMessage
<|> pingMessage
<|> pongMessage
<|> okMessage
<|> errMessage
-- | The parsing of the Info message is not performance critical.
infoMessage :: Parser Message
infoMessage = do
spacedMsgName "INFO"
void $ char '{'
fields <- parseInfoMessageFields
void $ char '}'
mkInfoMessage fields
parseInfoMessageFields :: Parser [HandshakeMessageField]
parseInfoMessageFields = infoMessageField `sepBy` char ','
where
infoMessageField = parseServerId
<|> parseVersion
<|> parseGoVersion
<|> parseServerHost
<|> parseServerPort
<|> parseServerAuthRequired
<|> parseSslRequired
<|> parseTlsRequired
<|> parseTlsVerify
<|> parseMaxPayload
| Nor is the parsing the Connect message performace critical .
connectMessage :: Parser Message
connectMessage = do
spacedMsgName "CONNECT"
void $ char '{'
fields <- parseConnectMessageFields
void $ char '}'
newLine
mkConnectMessage fields
parseConnectMessageFields :: Parser [HandshakeMessageField]
parseConnectMessageFields = connectMessageField `sepBy` char ','
where
connectMessageField = parseClientVerbose
<|> parseClientPedantic
<|> parseSslRequired
<|> parseClientAuthToken
<|> parseClientUser
<|> parseClientPass
<|> parseClientName
<|> parseClientLang
<|> parseVersion
-- | Parse a MSG message ...
msgMessage :: Parser Message
msgMessage = msgMessageWithReply <|> msgMessageWithoutReply
-- | ... with a reply-to field.
msgMessageWithReply :: Parser Message
msgMessageWithReply = do
spacedMsgName "MSG"
MSG <$> takeTill isSpace <* singleSpace
<*> parseSid <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> readPayload <* newLine
-- | ... and without a reply-to.
msgMessageWithoutReply :: Parser Message
msgMessageWithoutReply = do
spacedMsgName "MSG"
MSG <$> takeTill isSpace <* singleSpace
<*> parseSid <* singleSpace
<*> pure Nothing
<*> readPayload <* newLine
-- | Parse a PUB message ...
pubMessage :: Parser Message
pubMessage = pubMessageWithReply <|> pubMessageWithoutReply
-- | ... with a reply-to field.
pubMessageWithReply :: Parser Message
pubMessageWithReply = do
spacedMsgName "PUB"
PUB <$> takeTill isSpace <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> readPayload <* newLine
-- | ... and without a reply-to.
pubMessageWithoutReply :: Parser Message
pubMessageWithoutReply = do
spacedMsgName "PUB"
PUB <$> takeTill isSpace <* singleSpace
<*> pure Nothing
<*> readPayload <* newLine
| Helper parser to read the length / payload pair from a PUB / MSG
-- message.
readPayload :: Parser LBS.ByteString
readPayload = do
len <- decimal <* newLine
LBS.fromStrict <$> AP.take len
# INLINE readPayload #
-- | Parse a SUB message ...
subMessage :: Parser Message
subMessage = subMessageWithQueue <|> subMessageWithoutQueue
-- | ... with a queue group.
subMessageWithQueue :: Parser Message
subMessageWithQueue = do
spacedMsgName "SUB"
SUB <$> takeTill isSpace <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> parseSid <* newLine
-- | ... and without a queue group.
subMessageWithoutQueue :: Parser Message
subMessageWithoutQueue = do
spacedMsgName "SUB"
SUB <$> takeTill isSpace <* singleSpace
<*> pure Nothing
<*> parseSid <* newLine
| Parse an UNSUB message ...
unsubMessage :: Parser Message
unsubMessage = unsubMessageWithLimit <|> unsubMessageWithoutLimit
-- | ... with an unsubscribe limit.
unsubMessageWithLimit :: Parser Message
unsubMessageWithLimit = do
spacedMsgName "UNSUB"
UNSUB <$> parseSid <* singleSpace
<*> (Just <$> decimal) <* newLine
-- | ... and without an unsubscribe limit.
unsubMessageWithoutLimit :: Parser Message
unsubMessageWithoutLimit = do
spacedMsgName "UNSUB"
UNSUB <$> parseSid <* newLine
<*> pure Nothing
pingMessage :: Parser Message
pingMessage = (msgName "PING" >> newLine) *> pure PING
pongMessage :: Parser Message
pongMessage = (msgName "PONG" >> newLine) *> pure PONG
okMessage :: Parser Message
okMessage = (msgName "+OK" >> newLine) *> pure OK
errMessage :: Parser Message
errMessage = do
spacedMsgName "-ERR"
ERR <$> protocolError <* newLine
parseServerId :: Parser HandshakeMessageField
parseServerId = pair "\"server_id\"" quotedString "server_id" String
parseVersion :: Parser HandshakeMessageField
parseVersion = pair "\"version\"" quotedString "version" String
parseGoVersion :: Parser HandshakeMessageField
parseGoVersion = pair "\"go\"" quotedString "go" String
parseServerHost :: Parser HandshakeMessageField
parseServerHost = pair "\"host\"" quotedString "host" String
parseServerPort :: Parser HandshakeMessageField
parseServerPort = pair "\"port\"" decimal "port" Int
parseServerAuthRequired :: Parser HandshakeMessageField
parseServerAuthRequired =
pair "\"auth_required\"" boolean "auth_required" Bool
parseSslRequired :: Parser HandshakeMessageField
parseSslRequired = pair "\"ssl_required\"" boolean "ssl_required" Bool
parseTlsRequired :: Parser HandshakeMessageField
parseTlsRequired = pair "\"tls_required\"" boolean "tls_required" Bool
parseTlsVerify :: Parser HandshakeMessageField
parseTlsVerify = pair "\"tls_verify\"" boolean "tls_verify" Bool
parseMaxPayload :: Parser HandshakeMessageField
parseMaxPayload = pair "\"max_payload\"" decimal "max_payload" Int
parseClientVerbose :: Parser HandshakeMessageField
parseClientVerbose = pair "\"verbose\"" boolean "verbose" Bool
parseClientPedantic :: Parser HandshakeMessageField
parseClientPedantic = pair "\"pedantic\"" boolean "pedantic" Bool
parseClientAuthToken :: Parser HandshakeMessageField
parseClientAuthToken =
pair "\"auth_token\"" quotedString "auth_token" String
parseClientUser :: Parser HandshakeMessageField
parseClientUser = pair "\"user\"" quotedString "user" String
parseClientPass :: Parser HandshakeMessageField
parseClientPass = pair "\"pass\"" quotedString "pass" String
parseClientName :: Parser HandshakeMessageField
parseClientName = pair "\"name\"" quotedString "name" String
parseClientLang :: Parser HandshakeMessageField
parseClientLang = pair "\"lang\"" quotedString "lang" String
pair :: ByteString -> Parser a -> ByteString
-> (a -> HandshakeMessageValue)
-> Parser HandshakeMessageField
pair fieldName parser keyName ctor = do
void $ string fieldName
void $ char ':'
value <- parser
return (keyName, ctor value)
quotedString :: Parser ByteString
quotedString = BS.pack <$> (char '\"' *> manyTill anyChar (char '\"'))
boolean :: Parser Bool
boolean = string "false" *> return False <|> string "true" *> return True
protocolError :: Parser ProtocolError
protocolError =
stringCI "\'Unknown Protocol Operation\'"
*> return UnknownProtocolOperation <|>
stringCI "\'Authorization Violation\'"
*> return AuthorizationViolation <|>
stringCI "\'Authorization Timeout\'"
*> return AuthorizationTimeout <|>
stringCI "\'Parser Error\'"
*> return ParserError <|>
stringCI "\'Stale Connection\'"
*> return StaleConnection <|>
stringCI "\'Slow Consumer\'"
*> return SlowConsumer <|>
stringCI "\'Maximum Payload Exceeded\'"
*> return MaximumPayloadExceeded <|>
stringCI "\'Invalid Subject\'"
*> return InvalidSubject
parseSid :: Parser Sid
parseSid = decimal
mkInfoMessage :: [HandshakeMessageField] -> Parser Message
mkInfoMessage fields =
INFO <$> asByteString (lookup "server_id" fields)
<*> asByteString (lookup "version" fields)
<*> asByteString (lookup "go" fields)
<*> asByteString (lookup "host" fields)
<*> asInt (lookup "port" fields)
<*> asBool (lookup "auth_required" fields)
<*> asBool (lookup "ssl_required" fields)
<*> asBool (lookup "tls_required" fields)
<*> asBool (lookup "tls_verify" fields)
<*> asInt (lookup "max_payload" fields)
mkConnectMessage :: [HandshakeMessageField] -> Parser Message
mkConnectMessage fields =
CONNECT <$> asBool (lookup "verbose" fields)
<*> asBool (lookup "pedantic" fields)
<*> asBool (lookup "ssl_required" fields)
<*> asByteString (lookup "auth_token" fields)
<*> asByteString (lookup "user" fields)
<*> asByteString (lookup "pass" fields)
<*> asByteString (lookup "name" fields)
<*> asByteString (lookup "lang" fields)
<*> asByteString (lookup "version" fields)
asByteString :: Maybe HandshakeMessageValue -> Parser (Maybe ByteString)
asByteString Nothing = return Nothing
asByteString (Just (String value)) = return (Just value)
asByteString _ = fail "Expected a ByteString"
asBool :: Maybe HandshakeMessageValue -> Parser (Maybe Bool)
asBool Nothing = return Nothing
asBool (Just (Bool value)) = return (Just value)
asBool _ = fail "Expected a boolean"
asInt :: Maybe HandshakeMessageValue -> Parser (Maybe Int)
asInt Nothing = return Nothing
asInt (Just (Int value)) = return (Just value)
asInt _ = fail "Expected an Int"
spacedMsgName :: ByteString -> Parser ()
spacedMsgName name = do
msgName name
singleSpace
# INLINE spacedMsgName #
singleSpace :: Parser ()
singleSpace = void space
# INLINE singleSpace #
newLine :: Parser ()
newLine = void $ string "\r\n"
# INLINE newLine #
msgName :: ByteString -> Parser ()
msgName = void . stringCI
{-# INLINE msgName #-}
| null | https://raw.githubusercontent.com/psandahl/hats/2503edefbda64209d20509b075b3ab90cac39b8d/src/Network/Nats/Message/Parser.hs | haskell | # LANGUAGE OverloadedStrings #
|
Stability: experimental
Portability: portable
NATS protocol 'Message' parser. To be used with the
| The parsing of the Info message is not performance critical.
| Parse a MSG message ...
| ... with a reply-to field.
| ... and without a reply-to.
| Parse a PUB message ...
| ... with a reply-to field.
| ... and without a reply-to.
message.
| Parse a SUB message ...
| ... with a queue group.
| ... and without a queue group.
| ... with an unsubscribe limit.
| ... and without an unsubscribe limit.
# INLINE msgName # | Module : Network . . Message .
Copyright : ( c ) 2016
License : MIT
Maintainer : < >
" Data . Attoparsec . ByteString " library .
module Network.Nats.Message.Parser
( parseMessage
) where
import Control.Applicative ((<|>))
import Control.Monad (void)
import Data.Attoparsec.ByteString.Char8
import Data.ByteString.Char8 (ByteString)
import qualified Data.Attoparsec.ByteString.Char8 as AP
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Network.Nats.Message.Message ( Message (..)
, ProtocolError (..)
)
import Network.Nats.Types (Sid)
data HandshakeMessageValue =
Bool !Bool
| String !ByteString
| Int !Int
deriving Show
type HandshakeMessageField = (ByteString, HandshakeMessageValue)
| Parse a message .
parseMessage :: Parser Message
parseMessage = skipSpace *> parseMessage'
where
parseMessage' = msgMessage
<|> infoMessage
<|> connectMessage
<|> pubMessage
<|> subMessage
<|> unsubMessage
<|> pingMessage
<|> pongMessage
<|> okMessage
<|> errMessage
infoMessage :: Parser Message
infoMessage = do
spacedMsgName "INFO"
void $ char '{'
fields <- parseInfoMessageFields
void $ char '}'
mkInfoMessage fields
parseInfoMessageFields :: Parser [HandshakeMessageField]
parseInfoMessageFields = infoMessageField `sepBy` char ','
where
infoMessageField = parseServerId
<|> parseVersion
<|> parseGoVersion
<|> parseServerHost
<|> parseServerPort
<|> parseServerAuthRequired
<|> parseSslRequired
<|> parseTlsRequired
<|> parseTlsVerify
<|> parseMaxPayload
| Nor is the parsing the Connect message performace critical .
connectMessage :: Parser Message
connectMessage = do
spacedMsgName "CONNECT"
void $ char '{'
fields <- parseConnectMessageFields
void $ char '}'
newLine
mkConnectMessage fields
parseConnectMessageFields :: Parser [HandshakeMessageField]
parseConnectMessageFields = connectMessageField `sepBy` char ','
where
connectMessageField = parseClientVerbose
<|> parseClientPedantic
<|> parseSslRequired
<|> parseClientAuthToken
<|> parseClientUser
<|> parseClientPass
<|> parseClientName
<|> parseClientLang
<|> parseVersion
msgMessage :: Parser Message
msgMessage = msgMessageWithReply <|> msgMessageWithoutReply
msgMessageWithReply :: Parser Message
msgMessageWithReply = do
spacedMsgName "MSG"
MSG <$> takeTill isSpace <* singleSpace
<*> parseSid <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> readPayload <* newLine
msgMessageWithoutReply :: Parser Message
msgMessageWithoutReply = do
spacedMsgName "MSG"
MSG <$> takeTill isSpace <* singleSpace
<*> parseSid <* singleSpace
<*> pure Nothing
<*> readPayload <* newLine
pubMessage :: Parser Message
pubMessage = pubMessageWithReply <|> pubMessageWithoutReply
pubMessageWithReply :: Parser Message
pubMessageWithReply = do
spacedMsgName "PUB"
PUB <$> takeTill isSpace <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> readPayload <* newLine
pubMessageWithoutReply :: Parser Message
pubMessageWithoutReply = do
spacedMsgName "PUB"
PUB <$> takeTill isSpace <* singleSpace
<*> pure Nothing
<*> readPayload <* newLine
| Helper parser to read the length / payload pair from a PUB / MSG
readPayload :: Parser LBS.ByteString
readPayload = do
len <- decimal <* newLine
LBS.fromStrict <$> AP.take len
# INLINE readPayload #
subMessage :: Parser Message
subMessage = subMessageWithQueue <|> subMessageWithoutQueue
subMessageWithQueue :: Parser Message
subMessageWithQueue = do
spacedMsgName "SUB"
SUB <$> takeTill isSpace <* singleSpace
<*> (Just <$> takeTill isSpace <* singleSpace)
<*> parseSid <* newLine
subMessageWithoutQueue :: Parser Message
subMessageWithoutQueue = do
spacedMsgName "SUB"
SUB <$> takeTill isSpace <* singleSpace
<*> pure Nothing
<*> parseSid <* newLine
| Parse an UNSUB message ...
unsubMessage :: Parser Message
unsubMessage = unsubMessageWithLimit <|> unsubMessageWithoutLimit
unsubMessageWithLimit :: Parser Message
unsubMessageWithLimit = do
spacedMsgName "UNSUB"
UNSUB <$> parseSid <* singleSpace
<*> (Just <$> decimal) <* newLine
unsubMessageWithoutLimit :: Parser Message
unsubMessageWithoutLimit = do
spacedMsgName "UNSUB"
UNSUB <$> parseSid <* newLine
<*> pure Nothing
pingMessage :: Parser Message
pingMessage = (msgName "PING" >> newLine) *> pure PING
pongMessage :: Parser Message
pongMessage = (msgName "PONG" >> newLine) *> pure PONG
okMessage :: Parser Message
okMessage = (msgName "+OK" >> newLine) *> pure OK
errMessage :: Parser Message
errMessage = do
spacedMsgName "-ERR"
ERR <$> protocolError <* newLine
parseServerId :: Parser HandshakeMessageField
parseServerId = pair "\"server_id\"" quotedString "server_id" String
parseVersion :: Parser HandshakeMessageField
parseVersion = pair "\"version\"" quotedString "version" String
parseGoVersion :: Parser HandshakeMessageField
parseGoVersion = pair "\"go\"" quotedString "go" String
parseServerHost :: Parser HandshakeMessageField
parseServerHost = pair "\"host\"" quotedString "host" String
parseServerPort :: Parser HandshakeMessageField
parseServerPort = pair "\"port\"" decimal "port" Int
parseServerAuthRequired :: Parser HandshakeMessageField
parseServerAuthRequired =
pair "\"auth_required\"" boolean "auth_required" Bool
parseSslRequired :: Parser HandshakeMessageField
parseSslRequired = pair "\"ssl_required\"" boolean "ssl_required" Bool
parseTlsRequired :: Parser HandshakeMessageField
parseTlsRequired = pair "\"tls_required\"" boolean "tls_required" Bool
parseTlsVerify :: Parser HandshakeMessageField
parseTlsVerify = pair "\"tls_verify\"" boolean "tls_verify" Bool
parseMaxPayload :: Parser HandshakeMessageField
parseMaxPayload = pair "\"max_payload\"" decimal "max_payload" Int
parseClientVerbose :: Parser HandshakeMessageField
parseClientVerbose = pair "\"verbose\"" boolean "verbose" Bool
parseClientPedantic :: Parser HandshakeMessageField
parseClientPedantic = pair "\"pedantic\"" boolean "pedantic" Bool
parseClientAuthToken :: Parser HandshakeMessageField
parseClientAuthToken =
pair "\"auth_token\"" quotedString "auth_token" String
parseClientUser :: Parser HandshakeMessageField
parseClientUser = pair "\"user\"" quotedString "user" String
parseClientPass :: Parser HandshakeMessageField
parseClientPass = pair "\"pass\"" quotedString "pass" String
parseClientName :: Parser HandshakeMessageField
parseClientName = pair "\"name\"" quotedString "name" String
parseClientLang :: Parser HandshakeMessageField
parseClientLang = pair "\"lang\"" quotedString "lang" String
pair :: ByteString -> Parser a -> ByteString
-> (a -> HandshakeMessageValue)
-> Parser HandshakeMessageField
pair fieldName parser keyName ctor = do
void $ string fieldName
void $ char ':'
value <- parser
return (keyName, ctor value)
quotedString :: Parser ByteString
quotedString = BS.pack <$> (char '\"' *> manyTill anyChar (char '\"'))
boolean :: Parser Bool
boolean = string "false" *> return False <|> string "true" *> return True
protocolError :: Parser ProtocolError
protocolError =
stringCI "\'Unknown Protocol Operation\'"
*> return UnknownProtocolOperation <|>
stringCI "\'Authorization Violation\'"
*> return AuthorizationViolation <|>
stringCI "\'Authorization Timeout\'"
*> return AuthorizationTimeout <|>
stringCI "\'Parser Error\'"
*> return ParserError <|>
stringCI "\'Stale Connection\'"
*> return StaleConnection <|>
stringCI "\'Slow Consumer\'"
*> return SlowConsumer <|>
stringCI "\'Maximum Payload Exceeded\'"
*> return MaximumPayloadExceeded <|>
stringCI "\'Invalid Subject\'"
*> return InvalidSubject
parseSid :: Parser Sid
parseSid = decimal
mkInfoMessage :: [HandshakeMessageField] -> Parser Message
mkInfoMessage fields =
INFO <$> asByteString (lookup "server_id" fields)
<*> asByteString (lookup "version" fields)
<*> asByteString (lookup "go" fields)
<*> asByteString (lookup "host" fields)
<*> asInt (lookup "port" fields)
<*> asBool (lookup "auth_required" fields)
<*> asBool (lookup "ssl_required" fields)
<*> asBool (lookup "tls_required" fields)
<*> asBool (lookup "tls_verify" fields)
<*> asInt (lookup "max_payload" fields)
mkConnectMessage :: [HandshakeMessageField] -> Parser Message
mkConnectMessage fields =
CONNECT <$> asBool (lookup "verbose" fields)
<*> asBool (lookup "pedantic" fields)
<*> asBool (lookup "ssl_required" fields)
<*> asByteString (lookup "auth_token" fields)
<*> asByteString (lookup "user" fields)
<*> asByteString (lookup "pass" fields)
<*> asByteString (lookup "name" fields)
<*> asByteString (lookup "lang" fields)
<*> asByteString (lookup "version" fields)
asByteString :: Maybe HandshakeMessageValue -> Parser (Maybe ByteString)
asByteString Nothing = return Nothing
asByteString (Just (String value)) = return (Just value)
asByteString _ = fail "Expected a ByteString"
asBool :: Maybe HandshakeMessageValue -> Parser (Maybe Bool)
asBool Nothing = return Nothing
asBool (Just (Bool value)) = return (Just value)
asBool _ = fail "Expected a boolean"
asInt :: Maybe HandshakeMessageValue -> Parser (Maybe Int)
asInt Nothing = return Nothing
asInt (Just (Int value)) = return (Just value)
asInt _ = fail "Expected an Int"
spacedMsgName :: ByteString -> Parser ()
spacedMsgName name = do
msgName name
singleSpace
# INLINE spacedMsgName #
singleSpace :: Parser ()
singleSpace = void space
# INLINE singleSpace #
newLine :: Parser ()
newLine = void $ string "\r\n"
# INLINE newLine #
msgName :: ByteString -> Parser ()
msgName = void . stringCI
|
72792dfd4d2996012fb51b38b53dbd70052ed8ac6a63754a4d3896141dd6d764 | nikita-volkov/vector-builder | Builder.hs | module VectorBuilder.Core.Builder where
import qualified Data.Vector.Generic as B
import qualified Data.Vector.Generic.Mutable as C
import qualified VectorBuilder.Core.Update as A
import VectorBuilder.Prelude hiding (concat, empty)
-- |
-- An abstraction over the size of a vector for the process of its construction.
--
-- It postpones the actual construction of a vector until the execution of the builder.
data Builder element
= Builder !Int !(A.Update element)
-- |
-- Gets the size of a Builder.
# INLINE size #
size :: Builder element -> Int
size (Builder s _) = s
-- * Initialisation
-- |
-- Empty builder.
{-# INLINE empty #-}
empty :: Builder element
empty =
Builder 0 A.empty
-- |
-- Builder of a single element.
# INLINE singleton #
singleton :: element -> Builder element
singleton element =
Builder 1 (A.write element)
-- |
-- Builder from an immutable vector of elements.
--
-- Supports all kinds of vectors: boxed, unboxed, primitive, storable.
# INLINE vector #
vector :: B.Vector vector element => vector element -> Builder element
vector vector =
Builder (B.length vector) (A.writeMany vector)
# INLINE foldable #
foldable :: Foldable foldable => foldable element -> Builder element
foldable foldable =
Builder (length foldable) (A.writeFoldable foldable)
-- * Updates
# INLINE snoc #
snoc :: element -> Builder element -> Builder element
snoc element (Builder size update) =
Builder (succ size) (A.prepend size update (A.write element))
# INLINE cons #
cons :: element -> Builder element -> Builder element
cons element (Builder size update) =
Builder (succ size) (A.prepend 1 (A.write element) update)
# INLINE prepend #
prepend :: Builder element -> Builder element -> Builder element
prepend (Builder leftSize leftUpdate) (Builder rightSize rightUpdate) =
Builder (leftSize + rightSize) (A.prepend leftSize leftUpdate rightUpdate)
# INLINE append #
append :: Builder element -> Builder element -> Builder element
append =
flip prepend
# INLINE concat #
concat :: Foldable foldable => foldable (Builder element) -> Builder element
concat builders =
Builder
( let step size (Builder builderSize _) = size + builderSize
in foldl' step 0 builders
)
( A.Update
( \mVector offset ->
foldM_
( \index (Builder size (A.Update st)) ->
st mVector index $> index + size
)
offset
builders
)
)
-- * Instances
-- |
-- Provides support for /O(1)/ concatenation.
instance Semigroup (Builder element) where
{-# INLINE (<>) #-}
(<>) =
prepend
sconcat =
concat
-- |
-- Provides support for /O(1)/ concatenation.
instance Monoid (Builder element) where
# INLINE mempty #
mempty =
empty
# INLINE mappend #
mappend =
(<>)
# INLINE mconcat #
mconcat =
concat
| null | https://raw.githubusercontent.com/nikita-volkov/vector-builder/83f733476ba99bccbbe1e832b668a3f07b639938/library/VectorBuilder/Core/Builder.hs | haskell | |
An abstraction over the size of a vector for the process of its construction.
It postpones the actual construction of a vector until the execution of the builder.
|
Gets the size of a Builder.
* Initialisation
|
Empty builder.
# INLINE empty #
|
Builder of a single element.
|
Builder from an immutable vector of elements.
Supports all kinds of vectors: boxed, unboxed, primitive, storable.
* Updates
* Instances
|
Provides support for /O(1)/ concatenation.
# INLINE (<>) #
|
Provides support for /O(1)/ concatenation. | module VectorBuilder.Core.Builder where
import qualified Data.Vector.Generic as B
import qualified Data.Vector.Generic.Mutable as C
import qualified VectorBuilder.Core.Update as A
import VectorBuilder.Prelude hiding (concat, empty)
data Builder element
= Builder !Int !(A.Update element)
# INLINE size #
size :: Builder element -> Int
size (Builder s _) = s
empty :: Builder element
empty =
Builder 0 A.empty
# INLINE singleton #
singleton :: element -> Builder element
singleton element =
Builder 1 (A.write element)
# INLINE vector #
vector :: B.Vector vector element => vector element -> Builder element
vector vector =
Builder (B.length vector) (A.writeMany vector)
# INLINE foldable #
foldable :: Foldable foldable => foldable element -> Builder element
foldable foldable =
Builder (length foldable) (A.writeFoldable foldable)
# INLINE snoc #
snoc :: element -> Builder element -> Builder element
snoc element (Builder size update) =
Builder (succ size) (A.prepend size update (A.write element))
# INLINE cons #
cons :: element -> Builder element -> Builder element
cons element (Builder size update) =
Builder (succ size) (A.prepend 1 (A.write element) update)
# INLINE prepend #
prepend :: Builder element -> Builder element -> Builder element
prepend (Builder leftSize leftUpdate) (Builder rightSize rightUpdate) =
Builder (leftSize + rightSize) (A.prepend leftSize leftUpdate rightUpdate)
# INLINE append #
append :: Builder element -> Builder element -> Builder element
append =
flip prepend
# INLINE concat #
concat :: Foldable foldable => foldable (Builder element) -> Builder element
concat builders =
Builder
( let step size (Builder builderSize _) = size + builderSize
in foldl' step 0 builders
)
( A.Update
( \mVector offset ->
foldM_
( \index (Builder size (A.Update st)) ->
st mVector index $> index + size
)
offset
builders
)
)
instance Semigroup (Builder element) where
(<>) =
prepend
sconcat =
concat
instance Monoid (Builder element) where
# INLINE mempty #
mempty =
empty
# INLINE mappend #
mappend =
(<>)
# INLINE mconcat #
mconcat =
concat
|
495df40ce111e1a51742459ab9152f09e11c93d05daaf4b487f6acc557db13e4 | cwgoes/scisco | Main.hs | module Main where
import Control.Concurrent
import Control.Monad
import Foundation
import qualified ABCI.Server as ABCI
import qualified Scisco.Core as Scisco
main ∷ IO ()
main = do
state ← Scisco.load
void $ forkIO $ Scisco.serve
ABCI.serve (Scisco.handle state)
| null | https://raw.githubusercontent.com/cwgoes/scisco/55b5c94a42d2e99fbd74fa193d9cbaeea525531c/app/Main.hs | haskell | module Main where
import Control.Concurrent
import Control.Monad
import Foundation
import qualified ABCI.Server as ABCI
import qualified Scisco.Core as Scisco
main ∷ IO ()
main = do
state ← Scisco.load
void $ forkIO $ Scisco.serve
ABCI.serve (Scisco.handle state)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.