_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
04c0d86b609a217ab427d9666e72a40c3ffed109c630112cbeacda75c309f788 | ralexstokes/stoken | block.clj | (ns io.stokes.block
(:require [io.stokes.hash :as hash]
[clojure.set :as set]
[clj-time.core :as time]
[clj-time.coerce :as coerce]
[io.stokes.transaction :as transaction])
(:refer-clojure :exclude [hash]))
(def default-halving-frequency
"how many blocks occur since the last time the block reward halved"
5000)
(def default-base-block-reward
"the largest block reward that will ever be claimed"
128)
(defn calculate-subsidy [height halving-frequency base-block-reward]
(let [halvings (quot height halving-frequency)]
(int (quot base-block-reward
(Math/pow 2 halvings)))))
(def ^:private block-header-keys #{:previous-hash
:difficulty
:transaction-root
:time
:nonce})
(defn header [block]
(select-keys block block-header-keys))
(defn hash [block]
(get block :hash
(some-> block
header
hash/of)))
(defn difficulty [block]
(get block :difficulty 0))
(defn- previous [block]
(:previous-hash block))
(defn with-nonce [block nonce]
(assoc block :nonce nonce))
(defn- calculate-threshold [max-threshold difficulty]
(.shiftRight max-threshold difficulty))
(defn- hex->bignum [str]
(BigInteger. str 16))
(defn sealed?
"a proof-of-work block is sealed when the block hash is less than a threshold determined by the difficulty"
[block max-threshold]
(let [threshold (calculate-threshold max-threshold (difficulty block))
hash (-> block
hash
hex->bignum)]
(< hash threshold)))
(defn readable
"returns a human-readable description of the block"
[block]
(update block :time coerce/to-date))
(defn from-readable
"parses a human-readable description of the block"
[block]
(update block :time coerce/from-date))
(def ^:private target-blocktime 10000) ;; milliseconds
(defn- timestamps->blocktimes [[a b]]
(time/in-seconds
(time/interval b a)))
(defn- average [default coll]
(if (seq coll)
(let [n (count coll)
sum (reduce + coll)]
(/ sum n))
default))
(defn- calculate-average-blocktime
[timestamps]
(->> timestamps
reverse
(take 4)
(partition 2)
(map timestamps->blocktimes)
(average target-blocktime)))
(defn- calculate-difficulty
"adjust difficulty so that the average time between blocks is N seconds"
[block timestamps]
(let [difficulty (difficulty block)
average-blocktime (calculate-average-blocktime timestamps)
next (if (> average-blocktime target-blocktime)
dec
inc)]
(next difficulty)))
(defn- transactions->root [transactions]
(-> transactions
hash/tree-of
hash/root-of))
(defn- header-from [chain transactions]
(let [previous-block (last chain)
block-timestamps (map :time chain)]
{:previous-hash (hash previous-block)
:difficulty (calculate-difficulty previous-block block-timestamps)
:transaction-root (transactions->root transactions)
:time (time/now)
:nonce 0}))
(defn next-template
"generates a block with `transactions` that satisfies the constraints to be appended to the chain modulo a valid proof-of-work, i.e. a nonce that satisfies the difficulty in the block, also implies a missing block hash in the returned data. note: timestamp in the block header is currently included in the hash pre-image; given that a valid block must be within some time interval, client code MUST refresh this timestamp as needed; if you are having issues, run the proof-of-work routine for a smaller number of rounds"
[chain transactions]
{:post [(let [keys (->> %
keys
(into #{}))]
(set/subset? block-header-keys keys))]}
(merge {:transactions transactions}
(header-from chain transactions)))
(defn- node->block [node]
(:block node))
(defn- node->children [node]
(:children node))
(defn block->height [block]
(let [transactions (:transactions block)
[coinbase] (filter transaction/coinbase? transactions)
{:keys [block-height]} (-> coinbase
transaction/inputs
first)]
block-height))
(defn- parent?
"indicates if block `parent` is a parent of block `child`; NOTE: we add the check for block height to ensure the block subsidies are correct in combination with `valid?`"
[parent child]
(and
(= (previous child)
(hash parent))
(= (block->height child)
(inc (block->height parent)))))
(defn- same-block?
"if two blocks have the same hash, they are the same block"
[a b]
(= (hash a)
(hash b)))
(defn- node-of [block & children]
(merge {:block block} (when children
{:children (reduce conj #{} children)})))
(defn- children-contains-block? [children block]
(let [blocks (map (comp hash :block) children)]
(some #{(hash block)} blocks)))
(defn- insert [node new-block]
(let [block (node->block node)
children (node->children node)]
(apply node-of block
(if (children-contains-block? children new-block)
children
(if (parent? block new-block)
(conj children (node-of new-block))
(map #(insert % new-block) children))))))
(def same-chain? =)
(defn chain-contains-block? [blockchain target-block]
(let [block (node->block blockchain)
children (node->children blockchain)]
(or (same-block? block target-block)
(some true? (map #(chain-contains-block? % target-block) children)))))
(defn add-to-chain
"takes a blocktree and a set of blocks, possibly containing orphans; will insert all possible blocks in the set and return the updated tree along with the remaining orphans"
[blockchain set-of-blocks]
(let [blocks (into [] set-of-blocks)]
(if-let [[inserted-block new-chain] (->> blocks
(map #(vector % (insert blockchain %)))
(drop-while (comp (partial same-chain? blockchain) second))
first)]
(add-to-chain new-chain (disj set-of-blocks inserted-block))
[blockchain (into #{} (remove (partial chain-contains-block? blockchain) set-of-blocks))])))
(defn tree->blocks
"collects every block in the tree into a seq of blocks"
[blockchain]
(->> (tree-seq :children #(into [] (:children %)) blockchain)
(map :block)))
;; find the best chain in a given block tree
(defn- total-difficulty [node]
(let [block (node->block node)
children (node->children node)]
(apply + (difficulty block) (map total-difficulty children))))
(defn- select-many-by-key [weight f xs]
(let [decorated (map (fn [x] [x (f x)]) xs)
max-key (apply weight (map second decorated))]
(->> decorated
(filter #(= max-key (second %)))
(map first))))
(defn- max-keys [f xs]
(select-many-by-key max f xs))
(defn- min-keys [f xs]
(select-many-by-key min f xs))
(defn- select-nodes-with-most-work [nodes]
(max-keys total-difficulty nodes))
(defn- node->timestamp [node]
(let [block (node->block node)]
(coerce/to-long (:time block))))
(defn- select-earliest-nodes [nodes]
(min-keys node->timestamp nodes))
(defn- fork-choice-rule
"We use a fork choice rule resembling Bitcoin Core. First find the nodes with the most work, breaking ties by timestamp"
[nodes]
(-> nodes
select-nodes-with-most-work
select-earliest-nodes
first))
(defn- collect-best-chain [chain node]
(let [block (node->block node)
children (node->children node)
chain (conj chain block)]
(if children
(collect-best-chain chain (fork-choice-rule children))
chain)))
(defn best-chain
"accepts the block tree and returns a seq of those blocks on the best chain according to the fork choice rule"
[blockchain]
(collect-best-chain [] blockchain))
(defn chain-from [{genesis-block :initial-state}]
(node-of genesis-block))
(defn valid-proof-of-work? [block max-threshold]
(sealed? block max-threshold))
(def ^:private default-forward-time-limit-in-hours 2)
(def ^:private default-backward-time-limit-by-blocks 11)
(defn median [times]
(let [count (count times)
index (/ (- count 1)
2)]
(nth times index)))
(defn- after-median-of [times new-time]
(let [median (median times)]
(time/after? new-time median)))
(defn- within-hours
"`b` must be within `hours` time of `a`"
[hours a b]
(time/within?
(time/interval a (time/plus a (time/hours hours)))
b))
(defn reasonable-time?
"a block cannot be equal to or before the median of the last 11 blocks; a block cannot be more than 2 hours ahead of the latest block"
[chain block]
(let [block-time (:time block)]
(and (after-median-of (->> chain
reverse
(take default-backward-time-limit-by-blocks)
(map :time)) block-time)
(within-hours default-forward-time-limit-in-hours (->> chain
last
:time) block-time))))
(defn subsidy-correct? [coinbase halving-frequency base-block-reward]
(let [halving-frequency (or halving-frequency
default-halving-frequency)
base-block-reward (or base-block-reward
default-base-block-reward)
{:keys [block-height]} (-> coinbase
transaction/inputs
first)
value (-> coinbase
transaction/outputs
first
transaction/output->value)]
(= value
(calculate-subsidy block-height halving-frequency base-block-reward))))
(defn proper-transactions? [ledger block halving-frequency base-block-reward]
(let [transactions (:transactions block)
coinbase-transaction (first transactions)]
(and (transaction/coinbase? coinbase-transaction)
(subsidy-correct? coinbase-transaction halving-frequency base-block-reward)
(every? false? (map transaction/coinbase? (rest transactions)))
(every? true? (map (partial transaction/valid? ledger) transactions)))))
(defn valid-transaction-root? [block]
(let [transaction-root (:transaction-root block)
transactions (:transactions block)]
(= transaction-root
(transactions->root transactions))))
(defn- find-previous-block [chain target]
(let [blocks (tree->blocks chain)]
(first (filter #(parent? % target) blocks))))
(defn correct-difficulty? [chain block]
(when-let [candidate-block (find-previous-block chain block)]
(let [difficulty (difficulty block)
expected-difficulty (calculate-difficulty candidate-block (map :time (best-chain chain)))]
(= difficulty
expected-difficulty))))
(defn valid? [blockchain max-threshold ledger block halving-frequency base-block-reward]
(and
(not (empty? (:transactions block)))
(valid-proof-of-work? block max-threshold)
(reasonable-time? (best-chain blockchain) block)
(proper-transactions? ledger block halving-frequency base-block-reward)
(valid-transaction-root? block)
(correct-difficulty? blockchain block)))
| null | https://raw.githubusercontent.com/ralexstokes/stoken/b88adb36ffa1e9f3099925634eb1f98beb986442/src/io/stokes/block.clj | clojure | milliseconds
find the best chain in a given block tree | (ns io.stokes.block
(:require [io.stokes.hash :as hash]
[clojure.set :as set]
[clj-time.core :as time]
[clj-time.coerce :as coerce]
[io.stokes.transaction :as transaction])
(:refer-clojure :exclude [hash]))
(def default-halving-frequency
"how many blocks occur since the last time the block reward halved"
5000)
(def default-base-block-reward
"the largest block reward that will ever be claimed"
128)
(defn calculate-subsidy [height halving-frequency base-block-reward]
(let [halvings (quot height halving-frequency)]
(int (quot base-block-reward
(Math/pow 2 halvings)))))
(def ^:private block-header-keys #{:previous-hash
:difficulty
:transaction-root
:time
:nonce})
(defn header [block]
(select-keys block block-header-keys))
(defn hash [block]
(get block :hash
(some-> block
header
hash/of)))
(defn difficulty [block]
(get block :difficulty 0))
(defn- previous [block]
(:previous-hash block))
(defn with-nonce [block nonce]
(assoc block :nonce nonce))
(defn- calculate-threshold [max-threshold difficulty]
(.shiftRight max-threshold difficulty))
(defn- hex->bignum [str]
(BigInteger. str 16))
(defn sealed?
"a proof-of-work block is sealed when the block hash is less than a threshold determined by the difficulty"
[block max-threshold]
(let [threshold (calculate-threshold max-threshold (difficulty block))
hash (-> block
hash
hex->bignum)]
(< hash threshold)))
(defn readable
"returns a human-readable description of the block"
[block]
(update block :time coerce/to-date))
(defn from-readable
"parses a human-readable description of the block"
[block]
(update block :time coerce/from-date))
(defn- timestamps->blocktimes [[a b]]
(time/in-seconds
(time/interval b a)))
(defn- average [default coll]
(if (seq coll)
(let [n (count coll)
sum (reduce + coll)]
(/ sum n))
default))
(defn- calculate-average-blocktime
[timestamps]
(->> timestamps
reverse
(take 4)
(partition 2)
(map timestamps->blocktimes)
(average target-blocktime)))
(defn- calculate-difficulty
"adjust difficulty so that the average time between blocks is N seconds"
[block timestamps]
(let [difficulty (difficulty block)
average-blocktime (calculate-average-blocktime timestamps)
next (if (> average-blocktime target-blocktime)
dec
inc)]
(next difficulty)))
(defn- transactions->root [transactions]
(-> transactions
hash/tree-of
hash/root-of))
(defn- header-from [chain transactions]
(let [previous-block (last chain)
block-timestamps (map :time chain)]
{:previous-hash (hash previous-block)
:difficulty (calculate-difficulty previous-block block-timestamps)
:transaction-root (transactions->root transactions)
:time (time/now)
:nonce 0}))
(defn next-template
"generates a block with `transactions` that satisfies the constraints to be appended to the chain modulo a valid proof-of-work, i.e. a nonce that satisfies the difficulty in the block, also implies a missing block hash in the returned data. note: timestamp in the block header is currently included in the hash pre-image; given that a valid block must be within some time interval, client code MUST refresh this timestamp as needed; if you are having issues, run the proof-of-work routine for a smaller number of rounds"
[chain transactions]
{:post [(let [keys (->> %
keys
(into #{}))]
(set/subset? block-header-keys keys))]}
(merge {:transactions transactions}
(header-from chain transactions)))
(defn- node->block [node]
(:block node))
(defn- node->children [node]
(:children node))
(defn block->height [block]
(let [transactions (:transactions block)
[coinbase] (filter transaction/coinbase? transactions)
{:keys [block-height]} (-> coinbase
transaction/inputs
first)]
block-height))
(defn- parent?
"indicates if block `parent` is a parent of block `child`; NOTE: we add the check for block height to ensure the block subsidies are correct in combination with `valid?`"
[parent child]
(and
(= (previous child)
(hash parent))
(= (block->height child)
(inc (block->height parent)))))
(defn- same-block?
"if two blocks have the same hash, they are the same block"
[a b]
(= (hash a)
(hash b)))
(defn- node-of [block & children]
(merge {:block block} (when children
{:children (reduce conj #{} children)})))
(defn- children-contains-block? [children block]
(let [blocks (map (comp hash :block) children)]
(some #{(hash block)} blocks)))
(defn- insert [node new-block]
(let [block (node->block node)
children (node->children node)]
(apply node-of block
(if (children-contains-block? children new-block)
children
(if (parent? block new-block)
(conj children (node-of new-block))
(map #(insert % new-block) children))))))
(def same-chain? =)
(defn chain-contains-block? [blockchain target-block]
(let [block (node->block blockchain)
children (node->children blockchain)]
(or (same-block? block target-block)
(some true? (map #(chain-contains-block? % target-block) children)))))
(defn add-to-chain
"takes a blocktree and a set of blocks, possibly containing orphans; will insert all possible blocks in the set and return the updated tree along with the remaining orphans"
[blockchain set-of-blocks]
(let [blocks (into [] set-of-blocks)]
(if-let [[inserted-block new-chain] (->> blocks
(map #(vector % (insert blockchain %)))
(drop-while (comp (partial same-chain? blockchain) second))
first)]
(add-to-chain new-chain (disj set-of-blocks inserted-block))
[blockchain (into #{} (remove (partial chain-contains-block? blockchain) set-of-blocks))])))
(defn tree->blocks
"collects every block in the tree into a seq of blocks"
[blockchain]
(->> (tree-seq :children #(into [] (:children %)) blockchain)
(map :block)))
(defn- total-difficulty [node]
(let [block (node->block node)
children (node->children node)]
(apply + (difficulty block) (map total-difficulty children))))
(defn- select-many-by-key [weight f xs]
(let [decorated (map (fn [x] [x (f x)]) xs)
max-key (apply weight (map second decorated))]
(->> decorated
(filter #(= max-key (second %)))
(map first))))
(defn- max-keys [f xs]
(select-many-by-key max f xs))
(defn- min-keys [f xs]
(select-many-by-key min f xs))
(defn- select-nodes-with-most-work [nodes]
(max-keys total-difficulty nodes))
(defn- node->timestamp [node]
(let [block (node->block node)]
(coerce/to-long (:time block))))
(defn- select-earliest-nodes [nodes]
(min-keys node->timestamp nodes))
(defn- fork-choice-rule
"We use a fork choice rule resembling Bitcoin Core. First find the nodes with the most work, breaking ties by timestamp"
[nodes]
(-> nodes
select-nodes-with-most-work
select-earliest-nodes
first))
(defn- collect-best-chain [chain node]
(let [block (node->block node)
children (node->children node)
chain (conj chain block)]
(if children
(collect-best-chain chain (fork-choice-rule children))
chain)))
(defn best-chain
"accepts the block tree and returns a seq of those blocks on the best chain according to the fork choice rule"
[blockchain]
(collect-best-chain [] blockchain))
(defn chain-from [{genesis-block :initial-state}]
(node-of genesis-block))
(defn valid-proof-of-work? [block max-threshold]
(sealed? block max-threshold))
(def ^:private default-forward-time-limit-in-hours 2)
(def ^:private default-backward-time-limit-by-blocks 11)
(defn median [times]
(let [count (count times)
index (/ (- count 1)
2)]
(nth times index)))
(defn- after-median-of [times new-time]
(let [median (median times)]
(time/after? new-time median)))
(defn- within-hours
"`b` must be within `hours` time of `a`"
[hours a b]
(time/within?
(time/interval a (time/plus a (time/hours hours)))
b))
(defn reasonable-time?
"a block cannot be equal to or before the median of the last 11 blocks; a block cannot be more than 2 hours ahead of the latest block"
[chain block]
(let [block-time (:time block)]
(and (after-median-of (->> chain
reverse
(take default-backward-time-limit-by-blocks)
(map :time)) block-time)
(within-hours default-forward-time-limit-in-hours (->> chain
last
:time) block-time))))
(defn subsidy-correct? [coinbase halving-frequency base-block-reward]
(let [halving-frequency (or halving-frequency
default-halving-frequency)
base-block-reward (or base-block-reward
default-base-block-reward)
{:keys [block-height]} (-> coinbase
transaction/inputs
first)
value (-> coinbase
transaction/outputs
first
transaction/output->value)]
(= value
(calculate-subsidy block-height halving-frequency base-block-reward))))
(defn proper-transactions? [ledger block halving-frequency base-block-reward]
(let [transactions (:transactions block)
coinbase-transaction (first transactions)]
(and (transaction/coinbase? coinbase-transaction)
(subsidy-correct? coinbase-transaction halving-frequency base-block-reward)
(every? false? (map transaction/coinbase? (rest transactions)))
(every? true? (map (partial transaction/valid? ledger) transactions)))))
(defn valid-transaction-root? [block]
(let [transaction-root (:transaction-root block)
transactions (:transactions block)]
(= transaction-root
(transactions->root transactions))))
(defn- find-previous-block [chain target]
(let [blocks (tree->blocks chain)]
(first (filter #(parent? % target) blocks))))
(defn correct-difficulty? [chain block]
(when-let [candidate-block (find-previous-block chain block)]
(let [difficulty (difficulty block)
expected-difficulty (calculate-difficulty candidate-block (map :time (best-chain chain)))]
(= difficulty
expected-difficulty))))
(defn valid? [blockchain max-threshold ledger block halving-frequency base-block-reward]
(and
(not (empty? (:transactions block)))
(valid-proof-of-work? block max-threshold)
(reasonable-time? (best-chain blockchain) block)
(proper-transactions? ledger block halving-frequency base-block-reward)
(valid-transaction-root? block)
(correct-difficulty? blockchain block)))
|
c80ea9b7b0df7189bcee27c8dd16e767535296f1795c94dcc9324ba935a76648 | bhauman/advent-of-clojure | day17.clj | (ns advent-2015.day17)
(def prob17 [43 3 4 10 21 44 4 6 47 41 34 17 17 44 36 31 46 9 27 38])
(def p [20, 15, 10, 5, 5])
(def find-combos
(memoize
(fn [target items]
(if (zero? 0)
[[]]
(mapcat
(fn [[x & xs]]
(map #(cons x %)
(find-combos (- target x) xs)))
(take-while not-empty
(iterate rest (filter #(<= % target) items))))))))
#_(time
(count (find-combos 150 (reverse (sort prob17)))))
part 2
;; find the minimum number of containers
#_(reduce min (map count (find-combos 150 (reverse (sort prob17)))))
;; find the number of ways that the can be used
#_(count (filter #(= (count %) 4) (find-combos 150 (reverse (sort prob17)))))
| null | https://raw.githubusercontent.com/bhauman/advent-of-clojure/856763baf45bf7bf452ffd304dc1b89f9bc879a6/src/advent-2015/day17.clj | clojure | find the minimum number of containers
find the number of ways that the can be used | (ns advent-2015.day17)
(def prob17 [43 3 4 10 21 44 4 6 47 41 34 17 17 44 36 31 46 9 27 38])
(def p [20, 15, 10, 5, 5])
(def find-combos
(memoize
(fn [target items]
(if (zero? 0)
[[]]
(mapcat
(fn [[x & xs]]
(map #(cons x %)
(find-combos (- target x) xs)))
(take-while not-empty
(iterate rest (filter #(<= % target) items))))))))
#_(time
(count (find-combos 150 (reverse (sort prob17)))))
part 2
#_(reduce min (map count (find-combos 150 (reverse (sort prob17)))))
#_(count (filter #(= (count %) 4) (find-combos 150 (reverse (sort prob17)))))
|
70aa141e3b0ee94dfa7cd54a0f42048292f2bd2065090a162d1ec2f01743222d | MinaProtocol/mina | nat.mli | * Representation of naturals for
* { 1 Type definitions }
(** [z] is uninhabited *)
type z = Z of z
type 'a s = Z | S of 'a
type _ t = Z : z t | S : 'n t -> 'n s t
type 'a nat = 'a t
type e = T : 'n nat -> e
(** {1 Modules} *)
module type Intf = sig
type n
val n : n t
end
module Adds : sig
type ('a, 'b, 'c) t =
| Z : (z, 'n, 'n) t
| S : ('a, 'b, 'c) t -> ('a s, 'b, 'c s) t
val add_zr : 'n nat -> ('n, z, 'n) t
end
module Lte : sig
type (_, _) t = Z : (z, 'a) t | S : ('n, 'm) t -> ('n s, 'm s) t
val refl : 'n nat -> ('n, 'n) t
val trans : ('a, 'b) t -> ('b, 'c) t -> ('a, 'c) t
end
module Add : sig
module type Intf = sig
type _ plus_n
type n
val eq : (n, z plus_n) Core_kernel.Type_equal.t
val n : z plus_n t
val add : 'm nat -> 'm plus_n nat * (z plus_n, 'm, 'm plus_n) Adds.t
end
module type Intf_transparent = sig
type _ plus_n
type n = z plus_n
val eq : (n, n) Base.Type_equal.t
val n : z plus_n nat
val add : 'm nat -> 'm plus_n nat * (z plus_n, 'm, 'm plus_n) Adds.t
end
val n : 'n. (module Intf with type n = 'n) -> 'n nat
val create : 'n nat -> (module Intf with type n = 'n)
end
module type I = Add.Intf_transparent
* { 2 Module encoding naturals }
module N0 : I with type 'a plus_n = 'a
module N1 : I with type 'a plus_n = 'a s
module N2 : I with type 'a plus_n = 'a N1.plus_n s
module N3 : I with type 'a plus_n = 'a N2.plus_n s
module N4 : I with type 'a plus_n = 'a N3.plus_n s
module N5 : I with type 'a plus_n = 'a N4.plus_n s
module N6 : I with type 'a plus_n = 'a N5.plus_n s
module N7 : I with type 'a plus_n = 'a N6.plus_n s
module N8 : I with type 'a plus_n = 'a N7.plus_n s
module N9 : I with type 'a plus_n = 'a N8.plus_n s
module N10 : I with type 'a plus_n = 'a N9.plus_n s
module N11 : I with type 'a plus_n = 'a N10.plus_n s
module N12 : I with type 'a plus_n = 'a N11.plus_n s
module N13 : I with type 'a plus_n = 'a N12.plus_n s
module N14 : I with type 'a plus_n = 'a N13.plus_n s
module N15 : I with type 'a plus_n = 'a N14.plus_n s
module N16 : I with type 'a plus_n = 'a N15.plus_n s
module N17 : I with type 'a plus_n = 'a N16.plus_n s
module N18 : I with type 'a plus_n = 'a N17.plus_n s
module N19 : I with type 'a plus_n = 'a N18.plus_n s
module N20 : I with type 'a plus_n = 'a N19.plus_n s
module N21 : I with type 'a plus_n = 'a N20.plus_n s
module N22 : I with type 'a plus_n = 'a N21.plus_n s
module N23 : I with type 'a plus_n = 'a N22.plus_n s
module N24 : I with type 'a plus_n = 'a N23.plus_n s
module N25 : I with type 'a plus_n = 'a N24.plus_n s
module N26 : I with type 'a plus_n = 'a N25.plus_n s
module N27 : I with type 'a plus_n = 'a N26.plus_n s
module N28 : I with type 'a plus_n = 'a N27.plus_n s
module N29 : I with type 'a plus_n = 'a N28.plus_n s
module N30 : I with type 'a plus_n = 'a N29.plus_n s
module N31 : I with type 'a plus_n = 'a N30.plus_n s
module N32 : I with type 'a plus_n = 'a N31.plus_n s
module N33 : I with type 'a plus_n = 'a N32.plus_n s
module N34 : I with type 'a plus_n = 'a N33.plus_n s
module N35 : I with type 'a plus_n = 'a N34.plus_n s
module N36 : I with type 'a plus_n = 'a N35.plus_n s
module N37 : I with type 'a plus_n = 'a N36.plus_n s
module N38 : I with type 'a plus_n = 'a N37.plus_n s
module N39 : I with type 'a plus_n = 'a N38.plus_n s
module N40 : I with type 'a plus_n = 'a N39.plus_n s
module N41 : I with type 'a plus_n = 'a N40.plus_n s
module N42 : I with type 'a plus_n = 'a N41.plus_n s
module N43 : I with type 'a plus_n = 'a N42.plus_n s
module N44 : I with type 'a plus_n = 'a N43.plus_n s
module N45 : I with type 'a plus_n = 'a N44.plus_n s
module N46 : I with type 'a plus_n = 'a N45.plus_n s
module N47 : I with type 'a plus_n = 'a N46.plus_n s
module N48 : I with type 'a plus_n = 'a N47.plus_n s
module Empty : sig
type t = T of t
val elim : t -> 'a
end
module Not : sig
type 'a t = 'a -> Empty.t
end
* { 1 Functions }
val to_int : 'n. 'n t -> int
val of_int : int -> e
val lte_exn : 'a nat -> 'b nat -> ('a, 'b) Lte.t
val eq_exn : 'n 'm. 'n nat -> 'm nat -> ('n, 'm) Core_kernel.Type_equal.t
val compare :
'n 'm. 'n t -> 'm t -> [ `Lte of ('n, 'm) Lte.t | `Gt of ('n, 'm) Lte.t Not.t ]
val gt_implies_gte :
'n 'm. 'n nat -> 'm nat -> ('n, 'm) Lte.t Not.t -> ('m, 'n) Lte.t
| null | https://raw.githubusercontent.com/MinaProtocol/mina/c824be7d80db1d290e0d48cbc920182d07de0330/src/lib/pickles_types/nat.mli | ocaml | * [z] is uninhabited
* {1 Modules} | * Representation of naturals for
* { 1 Type definitions }
type z = Z of z
type 'a s = Z | S of 'a
type _ t = Z : z t | S : 'n t -> 'n s t
type 'a nat = 'a t
type e = T : 'n nat -> e
module type Intf = sig
type n
val n : n t
end
module Adds : sig
type ('a, 'b, 'c) t =
| Z : (z, 'n, 'n) t
| S : ('a, 'b, 'c) t -> ('a s, 'b, 'c s) t
val add_zr : 'n nat -> ('n, z, 'n) t
end
module Lte : sig
type (_, _) t = Z : (z, 'a) t | S : ('n, 'm) t -> ('n s, 'm s) t
val refl : 'n nat -> ('n, 'n) t
val trans : ('a, 'b) t -> ('b, 'c) t -> ('a, 'c) t
end
module Add : sig
module type Intf = sig
type _ plus_n
type n
val eq : (n, z plus_n) Core_kernel.Type_equal.t
val n : z plus_n t
val add : 'm nat -> 'm plus_n nat * (z plus_n, 'm, 'm plus_n) Adds.t
end
module type Intf_transparent = sig
type _ plus_n
type n = z plus_n
val eq : (n, n) Base.Type_equal.t
val n : z plus_n nat
val add : 'm nat -> 'm plus_n nat * (z plus_n, 'm, 'm plus_n) Adds.t
end
val n : 'n. (module Intf with type n = 'n) -> 'n nat
val create : 'n nat -> (module Intf with type n = 'n)
end
module type I = Add.Intf_transparent
* { 2 Module encoding naturals }
module N0 : I with type 'a plus_n = 'a
module N1 : I with type 'a plus_n = 'a s
module N2 : I with type 'a plus_n = 'a N1.plus_n s
module N3 : I with type 'a plus_n = 'a N2.plus_n s
module N4 : I with type 'a plus_n = 'a N3.plus_n s
module N5 : I with type 'a plus_n = 'a N4.plus_n s
module N6 : I with type 'a plus_n = 'a N5.plus_n s
module N7 : I with type 'a plus_n = 'a N6.plus_n s
module N8 : I with type 'a plus_n = 'a N7.plus_n s
module N9 : I with type 'a plus_n = 'a N8.plus_n s
module N10 : I with type 'a plus_n = 'a N9.plus_n s
module N11 : I with type 'a plus_n = 'a N10.plus_n s
module N12 : I with type 'a plus_n = 'a N11.plus_n s
module N13 : I with type 'a plus_n = 'a N12.plus_n s
module N14 : I with type 'a plus_n = 'a N13.plus_n s
module N15 : I with type 'a plus_n = 'a N14.plus_n s
module N16 : I with type 'a plus_n = 'a N15.plus_n s
module N17 : I with type 'a plus_n = 'a N16.plus_n s
module N18 : I with type 'a plus_n = 'a N17.plus_n s
module N19 : I with type 'a plus_n = 'a N18.plus_n s
module N20 : I with type 'a plus_n = 'a N19.plus_n s
module N21 : I with type 'a plus_n = 'a N20.plus_n s
module N22 : I with type 'a plus_n = 'a N21.plus_n s
module N23 : I with type 'a plus_n = 'a N22.plus_n s
module N24 : I with type 'a plus_n = 'a N23.plus_n s
module N25 : I with type 'a plus_n = 'a N24.plus_n s
module N26 : I with type 'a plus_n = 'a N25.plus_n s
module N27 : I with type 'a plus_n = 'a N26.plus_n s
module N28 : I with type 'a plus_n = 'a N27.plus_n s
module N29 : I with type 'a plus_n = 'a N28.plus_n s
module N30 : I with type 'a plus_n = 'a N29.plus_n s
module N31 : I with type 'a plus_n = 'a N30.plus_n s
module N32 : I with type 'a plus_n = 'a N31.plus_n s
module N33 : I with type 'a plus_n = 'a N32.plus_n s
module N34 : I with type 'a plus_n = 'a N33.plus_n s
module N35 : I with type 'a plus_n = 'a N34.plus_n s
module N36 : I with type 'a plus_n = 'a N35.plus_n s
module N37 : I with type 'a plus_n = 'a N36.plus_n s
module N38 : I with type 'a plus_n = 'a N37.plus_n s
module N39 : I with type 'a plus_n = 'a N38.plus_n s
module N40 : I with type 'a plus_n = 'a N39.plus_n s
module N41 : I with type 'a plus_n = 'a N40.plus_n s
module N42 : I with type 'a plus_n = 'a N41.plus_n s
module N43 : I with type 'a plus_n = 'a N42.plus_n s
module N44 : I with type 'a plus_n = 'a N43.plus_n s
module N45 : I with type 'a plus_n = 'a N44.plus_n s
module N46 : I with type 'a plus_n = 'a N45.plus_n s
module N47 : I with type 'a plus_n = 'a N46.plus_n s
module N48 : I with type 'a plus_n = 'a N47.plus_n s
module Empty : sig
type t = T of t
val elim : t -> 'a
end
module Not : sig
type 'a t = 'a -> Empty.t
end
* { 1 Functions }
val to_int : 'n. 'n t -> int
val of_int : int -> e
val lte_exn : 'a nat -> 'b nat -> ('a, 'b) Lte.t
val eq_exn : 'n 'm. 'n nat -> 'm nat -> ('n, 'm) Core_kernel.Type_equal.t
val compare :
'n 'm. 'n t -> 'm t -> [ `Lte of ('n, 'm) Lte.t | `Gt of ('n, 'm) Lte.t Not.t ]
val gt_implies_gte :
'n 'm. 'n nat -> 'm nat -> ('n, 'm) Lte.t Not.t -> ('m, 'n) Lte.t
|
9f03d3a5b577fc063f6cbc6a7d98f93daade86408f2cb04ebb79f0204e3ea0e8 | Clojure2D/clojure2d-examples | camera.clj | (ns rt4.the-next-week.ch05b.camera
(:require [rt4.the-next-week.ch05b.ray :as ray]
[fastmath.vector :as v]
[fastmath.core :as m]
[rt4.common :as common]
[fastmath.random :as r])
(:import [fastmath.vector Vec2]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol CameraProto
(get-ray [camera s t]))
(defrecord Camera [origin lower-left-corner horizontal vertical
u v w ^double lens-radius]
CameraProto
(get-ray [_ s t]
(let [^Vec2 rd (v/mult (common/random-in-unit-disc) lens-radius)
offset (v/add (v/mult u (.x rd))
(v/mult v (.y rd)))
ray-time (r/drand)]
(ray/ray (v/add origin offset)
(-> lower-left-corner
(v/add (v/mult horizontal s))
(v/add (v/mult vertical t))
(v/sub origin)
(v/sub offset))
ray-time))))
(def default-config {:vfov 40.0 :aspect-ratio (/ 16.0 9.0)
:lookfrom (v/vec3 0.0 0.0 -1.0)
:lookat (v/vec3 0.0 0.0 0.0)
:vup (v/vec3 0.0 1.0 0.0)
:aperture 0.0
:focus-dist 10.0})
(defn camera
([] (camera {}))
([config]
(let [{:keys [^double vfov ^double aspect-ratio
^double aperture ^double focus-dist
lookfrom lookat vup]} (merge default-config config)
theta (m/radians vfov)
h (m/tan (/ theta 2.0))
viewport-height (* 2.0 h)
viewport-width (* aspect-ratio viewport-height)
w (v/normalize (v/sub lookfrom lookat))
u (v/normalize (v/cross vup w))
v (v/cross w u)
origin lookfrom
horizontal (v/mult u (* focus-dist viewport-width))
vertical (v/mult v (* focus-dist viewport-height))
lower-left-corner (-> origin
(v/sub (v/div horizontal 2.0))
(v/sub (v/div vertical 2.0))
(v/sub (v/mult w focus-dist)))
lens-radius (/ aperture 2.0)]
(->Camera origin lower-left-corner horizontal vertical u v w lens-radius))))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/ead92d6f17744b91070e6308157364ad4eab8a1b/src/rt4/the_next_week/ch05b/camera.clj | clojure | (ns rt4.the-next-week.ch05b.camera
(:require [rt4.the-next-week.ch05b.ray :as ray]
[fastmath.vector :as v]
[fastmath.core :as m]
[rt4.common :as common]
[fastmath.random :as r])
(:import [fastmath.vector Vec2]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol CameraProto
(get-ray [camera s t]))
(defrecord Camera [origin lower-left-corner horizontal vertical
u v w ^double lens-radius]
CameraProto
(get-ray [_ s t]
(let [^Vec2 rd (v/mult (common/random-in-unit-disc) lens-radius)
offset (v/add (v/mult u (.x rd))
(v/mult v (.y rd)))
ray-time (r/drand)]
(ray/ray (v/add origin offset)
(-> lower-left-corner
(v/add (v/mult horizontal s))
(v/add (v/mult vertical t))
(v/sub origin)
(v/sub offset))
ray-time))))
(def default-config {:vfov 40.0 :aspect-ratio (/ 16.0 9.0)
:lookfrom (v/vec3 0.0 0.0 -1.0)
:lookat (v/vec3 0.0 0.0 0.0)
:vup (v/vec3 0.0 1.0 0.0)
:aperture 0.0
:focus-dist 10.0})
(defn camera
([] (camera {}))
([config]
(let [{:keys [^double vfov ^double aspect-ratio
^double aperture ^double focus-dist
lookfrom lookat vup]} (merge default-config config)
theta (m/radians vfov)
h (m/tan (/ theta 2.0))
viewport-height (* 2.0 h)
viewport-width (* aspect-ratio viewport-height)
w (v/normalize (v/sub lookfrom lookat))
u (v/normalize (v/cross vup w))
v (v/cross w u)
origin lookfrom
horizontal (v/mult u (* focus-dist viewport-width))
vertical (v/mult v (* focus-dist viewport-height))
lower-left-corner (-> origin
(v/sub (v/div horizontal 2.0))
(v/sub (v/div vertical 2.0))
(v/sub (v/mult w focus-dist)))
lens-radius (/ aperture 2.0)]
(->Camera origin lower-left-corner horizontal vertical u v w lens-radius))))
|
|
b960016dcfe2cc69a054a1956b8a5687838a3fd38ae3c931b74de9a7fe248d75 | kuribas/cubicbezier | Outline.hs | -- | Offsetting bezier curves and stroking curves.
module Geom2D.CubicBezier.Outline
(bezierOffset, bezierOffsetPoint)
where
import Geom2D
import Geom2D.CubicBezier.Basic
import Geom2D.CubicBezier.Approximate
offsetPoint :: (Floating a) => a -> Point a -> Point a -> Point a
offsetPoint dist start tangent =
start ^+^ (rotate90L $* dist *^ normVector tangent)
bezierOffsetPoint :: CubicBezier Double -> Double -> Double -> (DPoint, DPoint)
bezierOffsetPoint cb dist t = (offsetPoint dist p p', p')
where (p, p') = evalBezierDeriv cb t
-- | Calculate an offset path from the bezier curve to within
-- tolerance. If the distance is positive offset to the left,
-- otherwise to the right. A smaller tolerance may require more bezier
-- curves in the path to approximate the offset curve
bezierOffset :: CubicBezier Double -- ^ The curve
-> Double -- ^ Offset distance.
^ maximum subcurves
-> Double -- ^ Tolerance.
-> [CubicBezier Double] -- ^ The offset curve
bezierOffset cb dist (Just m) tol =
approximatePathMax m (bezierOffsetPoint cb dist) 15 tol 0 1 False
bezierOffset cb dist Nothing tol =
approximatePath (bezierOffsetPoint cb dist) 15 tol 0 1 False
| null | https://raw.githubusercontent.com/kuribas/cubicbezier/52da0941ba1deb33c06a2edcfa279bace0e44075/Geom2D/CubicBezier/Outline.hs | haskell | | Offsetting bezier curves and stroking curves.
| Calculate an offset path from the bezier curve to within
tolerance. If the distance is positive offset to the left,
otherwise to the right. A smaller tolerance may require more bezier
curves in the path to approximate the offset curve
^ The curve
^ Offset distance.
^ Tolerance.
^ The offset curve |
module Geom2D.CubicBezier.Outline
(bezierOffset, bezierOffsetPoint)
where
import Geom2D
import Geom2D.CubicBezier.Basic
import Geom2D.CubicBezier.Approximate
offsetPoint :: (Floating a) => a -> Point a -> Point a -> Point a
offsetPoint dist start tangent =
start ^+^ (rotate90L $* dist *^ normVector tangent)
bezierOffsetPoint :: CubicBezier Double -> Double -> Double -> (DPoint, DPoint)
bezierOffsetPoint cb dist t = (offsetPoint dist p p', p')
where (p, p') = evalBezierDeriv cb t
^ maximum subcurves
bezierOffset cb dist (Just m) tol =
approximatePathMax m (bezierOffsetPoint cb dist) 15 tol 0 1 False
bezierOffset cb dist Nothing tol =
approximatePath (bezierOffsetPoint cb dist) 15 tol 0 1 False
|
676424a4eda5b99b5fe5caed105fee5efa515f0c6e05c1d8095762609a824cef | haskell-webgear/webgear | Status.hs | # OPTIONS_GHC -Wno - orphans #
| OpenApi implementation of ' Status ' trait .
module WebGear.OpenApi.Trait.Status where
import qualified Network.HTTP.Types as HTTP
import WebGear.Core.Response (Response)
import WebGear.Core.Trait (Linked, Set, setTrait)
import WebGear.Core.Trait.Status (Status (..))
import WebGear.OpenApi.Handler (DocNode (DocStatus), OpenApiHandler (..), singletonNode)
instance Set (OpenApiHandler m) Status Response where
# INLINE setTrait #
setTrait ::
Status ->
(Linked ts Response -> Response -> HTTP.Status -> Linked (Status : ts) Response) ->
OpenApiHandler m (Linked ts Response, HTTP.Status) (Linked (Status : ts) Response)
setTrait (Status status) _ = OpenApiHandler $ singletonNode (DocStatus status)
| null | https://raw.githubusercontent.com/haskell-webgear/webgear/52e90e28d81e4ce6d7c8e63b3f9769f6629b031f/webgear-openapi/src/WebGear/OpenApi/Trait/Status.hs | haskell | # OPTIONS_GHC -Wno - orphans #
| OpenApi implementation of ' Status ' trait .
module WebGear.OpenApi.Trait.Status where
import qualified Network.HTTP.Types as HTTP
import WebGear.Core.Response (Response)
import WebGear.Core.Trait (Linked, Set, setTrait)
import WebGear.Core.Trait.Status (Status (..))
import WebGear.OpenApi.Handler (DocNode (DocStatus), OpenApiHandler (..), singletonNode)
instance Set (OpenApiHandler m) Status Response where
# INLINE setTrait #
setTrait ::
Status ->
(Linked ts Response -> Response -> HTTP.Status -> Linked (Status : ts) Response) ->
OpenApiHandler m (Linked ts Response, HTTP.Status) (Linked (Status : ts) Response)
setTrait (Status status) _ = OpenApiHandler $ singletonNode (DocStatus status)
|
|
b125e80db53f577a92e6ed2c61bb4bbf35f64550298c9c668cfb067c9401bff6 | tov/dssl2 | class-posn.rkt | #lang dssl2
# A Posn that can move vertically but is fixed horizontally.
class Posn:
let x_: num?
let y_: num?
def __init__(foo, x, y):
foo.x_ = x
foo.y_ = y
def x(self): self.x_
def _x!(it, nx): it.x_ = nx # private!
def y!(self, ny): self.y_ = ny
def y(bees): bees.y_
def get_self(this): this
let p
p = Posn(3, 4)
assert Posn?(p)
assert 3 == p.x()
assert 4 == p.y()
p.y!(10)
assert 3 == p.x()
assert 10 == p.y()
p = Posn(3, 4)
assert_error p.x_
def assign_5():
p.x = 5
assert_error assign_5()
assert_error p._x!
p = Posn(3, 4)
let get_y = p.y
let set_y = p.y!
assert 4 == get_y()
set_y(5)
assert 5 == get_y()
| null | https://raw.githubusercontent.com/tov/dssl2/105d18069465781bd9b87466f8336d5ce9e9a0f3/test/dssl2/class-posn.rkt | racket | #lang dssl2
# A Posn that can move vertically but is fixed horizontally.
class Posn:
let x_: num?
let y_: num?
def __init__(foo, x, y):
foo.x_ = x
foo.y_ = y
def x(self): self.x_
def _x!(it, nx): it.x_ = nx # private!
def y!(self, ny): self.y_ = ny
def y(bees): bees.y_
def get_self(this): this
let p
p = Posn(3, 4)
assert Posn?(p)
assert 3 == p.x()
assert 4 == p.y()
p.y!(10)
assert 3 == p.x()
assert 10 == p.y()
p = Posn(3, 4)
assert_error p.x_
def assign_5():
p.x = 5
assert_error assign_5()
assert_error p._x!
p = Posn(3, 4)
let get_y = p.y
let set_y = p.y!
assert 4 == get_y()
set_y(5)
assert 5 == get_y()
|
|
b4099fd371df3fb3f90a1842022fadba6295887588548f0d56dda2949dd61c62 | flora-pm/flora-server | Component.hs | # LANGUAGE OverloadedLists #
# LANGUAGE QuasiQuotes #
module Flora.Model.Package.Component
( ComponentId (..)
, PackageComponent (..)
, ComponentType (..)
, CanonicalComponent (..)
, ComponentCondition (..)
, ComponentMetadata (..)
, deterministicComponentId
)
where
import Crypto.Hash.MD5 qualified as MD5
import Data.Aeson
import Data.Aeson.Orphans ()
import Data.ByteString
import Data.Text (Text)
import Data.Text qualified as T
import Data.Text.Display
import Data.Text.Encoding
import Data.Text.Lazy.Builder qualified as B
import Data.UUID
import Database.PostgreSQL.Entity
import Database.PostgreSQL.Entity.Types
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField (FromField (..), fromJSONField, returnError)
import Database.PostgreSQL.Simple.FromRow (FromRow (..))
import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField (..), toJSONField)
import Database.PostgreSQL.Simple.ToRow (ToRow (..))
import GHC.Generics
import Control.DeepSeq
import Data.Data
import Data.Maybe
import Distribution.Orphans ()
import Distribution.PackageDescription qualified as Condition
import Flora.Model.Release.Types
newtype ComponentId = ComponentId {getComponentId :: UUID}
deriving stock (Generic)
deriving
(Eq, Ord, Show, FromField, ToField, FromJSON, ToJSON, NFData)
via UUID
deriving (Display) via ShowInstance UUID
deterministicComponentId :: ReleaseId -> CanonicalComponent -> ComponentId
deterministicComponentId releaseId canonicalForm =
ComponentId . fromJust . fromByteString . fromStrict . MD5.hash . encodeUtf8 $! concatenated
where
concatenated = display releaseId <> display canonicalForm
data ComponentType
= Library
| Executable
| TestSuite
| Benchmark
| ForeignLib
deriving stock (Eq, Ord, Show, Generic, Bounded, Enum)
deriving anyclass (NFData, FromJSON, ToJSON)
instance Display ComponentType where
displayBuilder Library = "library"
displayBuilder Executable = "executable"
displayBuilder TestSuite = "test"
displayBuilder Benchmark = "benchmark"
displayBuilder ForeignLib = "foreign-library"
instance FromField ComponentType where
fromField f Nothing = returnError UnexpectedNull f ""
fromField _ (Just bs) | Just status <- parseComponentType bs = pure status
fromField f (Just bs) = returnError ConversionFailed f $! T.unpack $! "Conversion error: Expected component to be one of " <> display @[ComponentType] [minBound .. maxBound] <> ", but instead got " <> decodeUtf8 bs
parseComponentType :: ByteString -> Maybe ComponentType
parseComponentType "library" = Just Library
parseComponentType "executable" = Just Executable
parseComponentType "test" = Just TestSuite
parseComponentType "benchmark" = Just Benchmark
parseComponentType "foreign-library" = Just ForeignLib
parseComponentType _ = Nothing
instance ToField ComponentType where
toField = Escape . encodeUtf8 . display
data CanonicalComponent = CanonicalComponent
{ componentName :: Text
, componentType :: ComponentType
}
deriving stock (Eq, Ord, Show, Generic)
deriving anyclass (NFData, FromJSON, ToJSON)
instance Display CanonicalComponent where
displayBuilder CanonicalComponent{componentName, componentType} = displayBuilder componentType <> ":" <> B.fromText componentName
data PackageComponent = PackageComponent
{ componentId :: ComponentId
, releaseId :: ReleaseId
, canonicalForm :: CanonicalComponent
, metadata :: ComponentMetadata
}
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, FromJSON, ToJSON)
deriving (Display) via ShowInstance PackageComponent
instance Entity PackageComponent where
tableName = "package_components"
primaryKey = [field| package_component_id |]
fields =
[ [field| package_component_id |]
, [field| release_id |]
, [field| component_name |]
, [field| component_type |]
, [field| component_metadata |]
]
instance ToRow PackageComponent where
toRow PackageComponent{componentId, releaseId, canonicalForm, metadata} =
let componentId' = componentId
releaseId' = releaseId
componentMetadata' = metadata
componentName' = canonicalForm.componentName
componentType' = canonicalForm.componentType
in toRow PackageComponent'{..}
instance FromRow PackageComponent where
fromRow = do
PackageComponent'{..} <- fromRow
let canonicalForm = CanonicalComponent componentName' componentType'
pure $! PackageComponent componentId' releaseId' canonicalForm componentMetadata'
-- | Data Access Object used to serialise to the DB
data PackageComponent' = PackageComponent'
{ componentId' :: ComponentId
, releaseId' :: ReleaseId
, componentName' :: Text
, componentType' :: ComponentType
, componentMetadata' :: ComponentMetadata
}
deriving stock (Eq, Show, Generic)
deriving anyclass (ToRow, FromRow)
data ComponentMetadata = ComponentMetadata
{ conditions :: [ComponentCondition]
}
deriving stock (Eq, Show, Generic, Typeable)
deriving anyclass (ToJSON, FromJSON, NFData)
instance FromField ComponentMetadata where
fromField = fromJSONField
instance ToField ComponentMetadata where
toField = toJSONField
newtype ComponentCondition = ComponentCondition (Condition.Condition Condition.ConfVar)
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, NFData)
| null | https://raw.githubusercontent.com/flora-pm/flora-server/c214c0b5d5db71a8330eb69326284be5a4d5e858/src/core/Flora/Model/Package/Component.hs | haskell | | Data Access Object used to serialise to the DB | # LANGUAGE OverloadedLists #
# LANGUAGE QuasiQuotes #
module Flora.Model.Package.Component
( ComponentId (..)
, PackageComponent (..)
, ComponentType (..)
, CanonicalComponent (..)
, ComponentCondition (..)
, ComponentMetadata (..)
, deterministicComponentId
)
where
import Crypto.Hash.MD5 qualified as MD5
import Data.Aeson
import Data.Aeson.Orphans ()
import Data.ByteString
import Data.Text (Text)
import Data.Text qualified as T
import Data.Text.Display
import Data.Text.Encoding
import Data.Text.Lazy.Builder qualified as B
import Data.UUID
import Database.PostgreSQL.Entity
import Database.PostgreSQL.Entity.Types
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField (FromField (..), fromJSONField, returnError)
import Database.PostgreSQL.Simple.FromRow (FromRow (..))
import Database.PostgreSQL.Simple.ToField (Action (Escape), ToField (..), toJSONField)
import Database.PostgreSQL.Simple.ToRow (ToRow (..))
import GHC.Generics
import Control.DeepSeq
import Data.Data
import Data.Maybe
import Distribution.Orphans ()
import Distribution.PackageDescription qualified as Condition
import Flora.Model.Release.Types
newtype ComponentId = ComponentId {getComponentId :: UUID}
deriving stock (Generic)
deriving
(Eq, Ord, Show, FromField, ToField, FromJSON, ToJSON, NFData)
via UUID
deriving (Display) via ShowInstance UUID
deterministicComponentId :: ReleaseId -> CanonicalComponent -> ComponentId
deterministicComponentId releaseId canonicalForm =
ComponentId . fromJust . fromByteString . fromStrict . MD5.hash . encodeUtf8 $! concatenated
where
concatenated = display releaseId <> display canonicalForm
data ComponentType
= Library
| Executable
| TestSuite
| Benchmark
| ForeignLib
deriving stock (Eq, Ord, Show, Generic, Bounded, Enum)
deriving anyclass (NFData, FromJSON, ToJSON)
instance Display ComponentType where
displayBuilder Library = "library"
displayBuilder Executable = "executable"
displayBuilder TestSuite = "test"
displayBuilder Benchmark = "benchmark"
displayBuilder ForeignLib = "foreign-library"
instance FromField ComponentType where
fromField f Nothing = returnError UnexpectedNull f ""
fromField _ (Just bs) | Just status <- parseComponentType bs = pure status
fromField f (Just bs) = returnError ConversionFailed f $! T.unpack $! "Conversion error: Expected component to be one of " <> display @[ComponentType] [minBound .. maxBound] <> ", but instead got " <> decodeUtf8 bs
parseComponentType :: ByteString -> Maybe ComponentType
parseComponentType "library" = Just Library
parseComponentType "executable" = Just Executable
parseComponentType "test" = Just TestSuite
parseComponentType "benchmark" = Just Benchmark
parseComponentType "foreign-library" = Just ForeignLib
parseComponentType _ = Nothing
instance ToField ComponentType where
toField = Escape . encodeUtf8 . display
data CanonicalComponent = CanonicalComponent
{ componentName :: Text
, componentType :: ComponentType
}
deriving stock (Eq, Ord, Show, Generic)
deriving anyclass (NFData, FromJSON, ToJSON)
instance Display CanonicalComponent where
displayBuilder CanonicalComponent{componentName, componentType} = displayBuilder componentType <> ":" <> B.fromText componentName
data PackageComponent = PackageComponent
{ componentId :: ComponentId
, releaseId :: ReleaseId
, canonicalForm :: CanonicalComponent
, metadata :: ComponentMetadata
}
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, FromJSON, ToJSON)
deriving (Display) via ShowInstance PackageComponent
instance Entity PackageComponent where
tableName = "package_components"
primaryKey = [field| package_component_id |]
fields =
[ [field| package_component_id |]
, [field| release_id |]
, [field| component_name |]
, [field| component_type |]
, [field| component_metadata |]
]
instance ToRow PackageComponent where
toRow PackageComponent{componentId, releaseId, canonicalForm, metadata} =
let componentId' = componentId
releaseId' = releaseId
componentMetadata' = metadata
componentName' = canonicalForm.componentName
componentType' = canonicalForm.componentType
in toRow PackageComponent'{..}
instance FromRow PackageComponent where
fromRow = do
PackageComponent'{..} <- fromRow
let canonicalForm = CanonicalComponent componentName' componentType'
pure $! PackageComponent componentId' releaseId' canonicalForm componentMetadata'
data PackageComponent' = PackageComponent'
{ componentId' :: ComponentId
, releaseId' :: ReleaseId
, componentName' :: Text
, componentType' :: ComponentType
, componentMetadata' :: ComponentMetadata
}
deriving stock (Eq, Show, Generic)
deriving anyclass (ToRow, FromRow)
data ComponentMetadata = ComponentMetadata
{ conditions :: [ComponentCondition]
}
deriving stock (Eq, Show, Generic, Typeable)
deriving anyclass (ToJSON, FromJSON, NFData)
instance FromField ComponentMetadata where
fromField = fromJSONField
instance ToField ComponentMetadata where
toField = toJSONField
newtype ComponentCondition = ComponentCondition (Condition.Condition Condition.ConfVar)
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, NFData)
|
9f753b26206a7b8d643d06d4819bd0e7f7c6d6973415548eba6e09ebe587967b | g-andrade/fake_lager | lager_msg.erl | -module(lager_msg).
-include("lager.hrl").
%%-------------------------------------------------------------------
%% Function Exports
%%-------------------------------------------------------------------
-export([new/4, new/5]).
-export([message/1]).
-export([timestamp/1]).
-export([datetime/1]).
-export([severity/1]).
-export([severity_as_int/1]).
-export([metadata/1]).
-export([destinations/1]).
-ignore_xref(datetime/1).
-ignore_xref(destinations/1).
-ignore_xref(message/1).
-ignore_xref(metadata/1).
-ignore_xref(new/4).
-ignore_xref(new/5).
-ignore_xref(severity/1).
-ignore_xref(severity_as_int/1).
-ignore_xref(timestamp/1).
%%-------------------------------------------------------------------
Macro Definitions
%%-------------------------------------------------------------------
-define(MEGA, 1000000).
%%-------------------------------------------------------------------
%% Type Definitions
%%-------------------------------------------------------------------
-opaque lager_msg() :: logger:log_event().
-export_type([lager_msg/0]).
%%-------------------------------------------------------------------
%% Static Check Tweaks
%%-------------------------------------------------------------------
-hank([
{unnecessary_function_arguments, [
{destinations, 1, 1},
{new, 4, 4},
{new, 5, 5}
]}
]).
%%-------------------------------------------------------------------
%% Function Definitions
%%-------------------------------------------------------------------
%% create with provided timestamp, handy for testing mostly
-spec new(list(), erlang:timestamp(), lager:log_level(), [tuple()], list()) -> lager_msg().
new(_Msg, _Timestamp, none, _Metadata, _Destinations) ->
error(nosup);
new(Msg, {MSec, Sec, USec}, Level, Metadata, _Destinations) ->
Time = MSec + (Sec + (USec * ?MEGA) * ?MEGA),
Meta = maps:put(time, Time, maps:from_list(Metadata)),
#{level => Level, msg => {string, Msg}, meta => Meta}.
-spec new(list(), lager:log_level(), [tuple()], list()) -> lager_msg().
new(_Msg, none, _Metadata, _Destinations) ->
error(nosup);
new(Msg, Level, Metadata, _Destinations) ->
Time = logger:timestamp(),
Meta = maps:put(time, Time, maps:from_list(Metadata)),
#{level => Level, msg => {string, Msg}, meta => Meta}.
-spec message(lager_msg()) -> list().
message(#{msg := Msg, meta := Meta}) ->
unicode:characters_to_list(normalize_msg(Msg, Meta)).
normalize_msg({report, Report}, #{report_cb := Cb}) when is_function(Cb, 1) ->
Cb(Report);
normalize_msg({report, Report}, #{report_cb := Cb}) when is_function(Cb, 2) ->
Cb(Report, #{});
normalize_msg({report, Report}, _Meta) ->
[io_lib:fwrite("~w=~w", [Key, Value])
|| {Key, Value} <- maps:to_list(Report)];
normalize_msg({string, String}, _Meta) ->
String;
normalize_msg({Format, Data}, _Meta)
when is_atom(Format); is_list(Format); is_binary(Format) ->
io_lib:fwrite(Format, Data).
-spec timestamp(lager_msg()) -> erlang:timestamp().
timestamp(#{meta := #{time := Timestamp}}) ->
MicroSecs = Timestamp rem ?MEGA,
Secs = Timestamp div ?MEGA,
MegaSecs = Secs div ?MEGA,
{MegaSecs, Secs rem ?MEGA, MicroSecs}.
-spec datetime(lager_msg()) -> {string(), string()}.
datetime(#{meta := #{time := Timestamp}}) ->
{{Y, Mo, D}, {H, Mi, S}} = calendar:system_time_to_universal_time(Timestamp, millisecond),
DateStr = io_lib:format("~4..0B-~2..0B-~2..0B", [Y, Mo, D]),
TimeStr = io_lib:format("~2..0B:~2..0B:~2..0B", [H, Mi, S]),
{DateStr, TimeStr}.
-spec severity(lager_msg()) -> lager:log_level().
severity(#{level := Level}) ->
Level.
-spec severity_as_int(lager_msg()) -> lager:log_level_number().
severity_as_int(#{level := Level}) ->
?LEVEL2NUM(Level).
-spec metadata(lager_msg()) -> [tuple()].
metadata(#{meta := Meta}) ->
maps:to_list(Meta).
-spec destinations(lager_msg()) -> no_return().
-dialyzer({nowarn_function, destinations/1}).
destinations(_Msg) ->
error(nosup).
| null | https://raw.githubusercontent.com/g-andrade/fake_lager/ba71fd26e2415a28840162b52555e2599936acf4/src/lager_msg.erl | erlang | -------------------------------------------------------------------
Function Exports
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
Type Definitions
-------------------------------------------------------------------
-------------------------------------------------------------------
Static Check Tweaks
-------------------------------------------------------------------
-------------------------------------------------------------------
Function Definitions
-------------------------------------------------------------------
create with provided timestamp, handy for testing mostly | -module(lager_msg).
-include("lager.hrl").
-export([new/4, new/5]).
-export([message/1]).
-export([timestamp/1]).
-export([datetime/1]).
-export([severity/1]).
-export([severity_as_int/1]).
-export([metadata/1]).
-export([destinations/1]).
-ignore_xref(datetime/1).
-ignore_xref(destinations/1).
-ignore_xref(message/1).
-ignore_xref(metadata/1).
-ignore_xref(new/4).
-ignore_xref(new/5).
-ignore_xref(severity/1).
-ignore_xref(severity_as_int/1).
-ignore_xref(timestamp/1).
Macro Definitions
-define(MEGA, 1000000).
-opaque lager_msg() :: logger:log_event().
-export_type([lager_msg/0]).
-hank([
{unnecessary_function_arguments, [
{destinations, 1, 1},
{new, 4, 4},
{new, 5, 5}
]}
]).
-spec new(list(), erlang:timestamp(), lager:log_level(), [tuple()], list()) -> lager_msg().
new(_Msg, _Timestamp, none, _Metadata, _Destinations) ->
error(nosup);
new(Msg, {MSec, Sec, USec}, Level, Metadata, _Destinations) ->
Time = MSec + (Sec + (USec * ?MEGA) * ?MEGA),
Meta = maps:put(time, Time, maps:from_list(Metadata)),
#{level => Level, msg => {string, Msg}, meta => Meta}.
-spec new(list(), lager:log_level(), [tuple()], list()) -> lager_msg().
new(_Msg, none, _Metadata, _Destinations) ->
error(nosup);
new(Msg, Level, Metadata, _Destinations) ->
Time = logger:timestamp(),
Meta = maps:put(time, Time, maps:from_list(Metadata)),
#{level => Level, msg => {string, Msg}, meta => Meta}.
-spec message(lager_msg()) -> list().
message(#{msg := Msg, meta := Meta}) ->
unicode:characters_to_list(normalize_msg(Msg, Meta)).
normalize_msg({report, Report}, #{report_cb := Cb}) when is_function(Cb, 1) ->
Cb(Report);
normalize_msg({report, Report}, #{report_cb := Cb}) when is_function(Cb, 2) ->
Cb(Report, #{});
normalize_msg({report, Report}, _Meta) ->
[io_lib:fwrite("~w=~w", [Key, Value])
|| {Key, Value} <- maps:to_list(Report)];
normalize_msg({string, String}, _Meta) ->
String;
normalize_msg({Format, Data}, _Meta)
when is_atom(Format); is_list(Format); is_binary(Format) ->
io_lib:fwrite(Format, Data).
-spec timestamp(lager_msg()) -> erlang:timestamp().
timestamp(#{meta := #{time := Timestamp}}) ->
MicroSecs = Timestamp rem ?MEGA,
Secs = Timestamp div ?MEGA,
MegaSecs = Secs div ?MEGA,
{MegaSecs, Secs rem ?MEGA, MicroSecs}.
-spec datetime(lager_msg()) -> {string(), string()}.
datetime(#{meta := #{time := Timestamp}}) ->
{{Y, Mo, D}, {H, Mi, S}} = calendar:system_time_to_universal_time(Timestamp, millisecond),
DateStr = io_lib:format("~4..0B-~2..0B-~2..0B", [Y, Mo, D]),
TimeStr = io_lib:format("~2..0B:~2..0B:~2..0B", [H, Mi, S]),
{DateStr, TimeStr}.
-spec severity(lager_msg()) -> lager:log_level().
severity(#{level := Level}) ->
Level.
-spec severity_as_int(lager_msg()) -> lager:log_level_number().
severity_as_int(#{level := Level}) ->
?LEVEL2NUM(Level).
-spec metadata(lager_msg()) -> [tuple()].
metadata(#{meta := Meta}) ->
maps:to_list(Meta).
-spec destinations(lager_msg()) -> no_return().
-dialyzer({nowarn_function, destinations/1}).
destinations(_Msg) ->
error(nosup).
|
74aadfbb71bacf872f04214049fd143b93a1e2d5a0f29bd2ddad7561e889b23f | aiya000/haskell-examples | DeclareFunction.hs | # LANGUAGE TemplateHaskell #
module DeclareFunction where
import Language.Haskell.TH (Q, DecsQ, Dec(FunD), Clause(Clause), Pat(VarP, WildP), Exp(VarE, LitE), Body(NormalB), Lit(IntegerL), mkName)
-- | Create a function simply
declareFunc :: Q [Dec]
declareFunc = do
let id' = mkName "id'" -- the name of the function
x = mkName "x" -- the name of "id'"'s an argument
return [FunD id' [Clause [VarP x] (NormalB $ VarE x) []]]
-- | The const function in the compile time
DecsQ is a type synonym of Q [ Dec ]
metaConst x = do
let constX = mkName "constX"
litX = LitE . IntegerL $ fromIntegral x
return [FunD constX [Clause [WildP] (NormalB $ litX) []]]
| null | https://raw.githubusercontent.com/aiya000/haskell-examples/a337ba0e86be8bb1333e7eea852ba5fa1d177d8a/Language/Haskell/TH/DeclareFunction.hs | haskell | | Create a function simply
the name of the function
the name of "id'"'s an argument
| The const function in the compile time | # LANGUAGE TemplateHaskell #
module DeclareFunction where
import Language.Haskell.TH (Q, DecsQ, Dec(FunD), Clause(Clause), Pat(VarP, WildP), Exp(VarE, LitE), Body(NormalB), Lit(IntegerL), mkName)
declareFunc :: Q [Dec]
declareFunc = do
return [FunD id' [Clause [VarP x] (NormalB $ VarE x) []]]
DecsQ is a type synonym of Q [ Dec ]
metaConst x = do
let constX = mkName "constX"
litX = LitE . IntegerL $ fromIntegral x
return [FunD constX [Clause [WildP] (NormalB $ litX) []]]
|
38376fe90e1925906e27003320243479ba98c0f6364c4d0f979d13235b5f27fa | alphaHeavy/consul-haskell | Import.hs | -- | TODO: Document module
module Import
( module Control.Concurrent
, module Control.Monad.IO.Class
, module Control.Retry
ByteString
ByteString . Lazy
Hashmap . Strict
--, module T -- Text
, module TR -- Text.Read
, module Data.Word
, module V -- Vector
, module Network.Consul.Internal
, module Network.Consul.Types
, module Network.HTTP.Client -- (method, Manager, responseBody)
, module Network.HTTP.Types
-- functions and data types
Control . Monad
, forever
-- Data.Aeson
, Value(..)
, decode
, encode
-- Data.Text
, Text
Control . . Catch
, MonadMask
-- Data.Maybe
, catMaybes
, isJust
, listToMaybe
-- Data.Monoid
, (<>)
Network . HTTP.Client . TLS
, newTlsManager
, newTlsManagerWith
, tlsManagerSettings
-- Network.Socket
, PortNumber
-- UnliftIO
, MonadUnliftIO
, async
, cancel
, finally
, wait
, waitAnyCancel
, withAsync
) where
import Control.Concurrent hiding (killThread)
import Control.Monad (forever)
import Control.Monad.IO.Class
import Control.Monad.Catch (MonadMask)
import Control.Retry
import Data.Aeson (Value(..), decode,encode)
import Data.ByteString as B (concat)
import Data.ByteString.Lazy as BL (toStrict, fromStrict)
import Data.HashMap.Strict as H (toList)
import Data.Maybe (catMaybes, isJust, listToMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data . Text as T -- ( concat )
import Data.Text.Read as TR
import Data.Word
import Data.Vector as V (elem)
import Network.Consul.Types
import Network.HTTP.Client -- (method, Manager, responseBody)
import Network.HTTP.Client.TLS (newTlsManager, newTlsManagerWith, tlsManagerSettings)
import Network.HTTP.Types
import Network.Socket (PortNumber)
import UnliftIO (MonadUnliftIO, async, cancel, finally, wait, waitAnyCancel, withAsync)
import Network.Consul.Internal
| null | https://raw.githubusercontent.com/alphaHeavy/consul-haskell/ca39b39df7ad327b0c97536145aa658d46028a9f/src/Import.hs | haskell | | TODO: Document module
, module T -- Text
Text.Read
Vector
(method, Manager, responseBody)
functions and data types
Data.Aeson
Data.Text
Data.Maybe
Data.Monoid
Network.Socket
UnliftIO
( concat )
(method, Manager, responseBody) | module Import
( module Control.Concurrent
, module Control.Monad.IO.Class
, module Control.Retry
ByteString
ByteString . Lazy
Hashmap . Strict
, module Data.Word
, module Network.Consul.Internal
, module Network.Consul.Types
, module Network.HTTP.Types
Control . Monad
, forever
, Value(..)
, decode
, encode
, Text
Control . . Catch
, MonadMask
, catMaybes
, isJust
, listToMaybe
, (<>)
Network . HTTP.Client . TLS
, newTlsManager
, newTlsManagerWith
, tlsManagerSettings
, PortNumber
, MonadUnliftIO
, async
, cancel
, finally
, wait
, waitAnyCancel
, withAsync
) where
import Control.Concurrent hiding (killThread)
import Control.Monad (forever)
import Control.Monad.IO.Class
import Control.Monad.Catch (MonadMask)
import Control.Retry
import Data.Aeson (Value(..), decode,encode)
import Data.ByteString as B (concat)
import Data.ByteString.Lazy as BL (toStrict, fromStrict)
import Data.HashMap.Strict as H (toList)
import Data.Maybe (catMaybes, isJust, listToMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Text.Read as TR
import Data.Word
import Data.Vector as V (elem)
import Network.Consul.Types
import Network.HTTP.Client.TLS (newTlsManager, newTlsManagerWith, tlsManagerSettings)
import Network.HTTP.Types
import Network.Socket (PortNumber)
import UnliftIO (MonadUnliftIO, async, cancel, finally, wait, waitAnyCancel, withAsync)
import Network.Consul.Internal
|
9f30084cc05528757777e3930a49a461649010ed0b0e457549f58f758d8cf30a | haskell/cabal | cabal.test.hs | import Test.Cabal.Prelude
main = cabalTest $ do
tmpdir <- fmap testTmpDir getTestEnv
cabal "v2-sdist" ["--list-only", "--output-directory", tmpdir, "t7028"]
| null | https://raw.githubusercontent.com/haskell/cabal/d2bff20fae387cffef4425820b6c1975f1821188/cabal-testsuite/PackageTests/SDist/T7028/cabal.test.hs | haskell | import Test.Cabal.Prelude
main = cabalTest $ do
tmpdir <- fmap testTmpDir getTestEnv
cabal "v2-sdist" ["--list-only", "--output-directory", tmpdir, "t7028"]
|
|
ca5b05d0a1f3311511858c8a92716f33d5b2b23acf6a0e13c4cebc1d01889233 | spell-music/csound-expression | Cab.hs | module Csound.Typed.Gui.Cab(
Cab, CabProp, Col(..), cabbage,
-- * Widgets
button, filebutton, infobutton, checkbox, combobox, csoundoutput, encoder, gentable,
hrange, vrange, form, groupbox, image, keyboard, label, hslider, vslider,
rslider, soundfiler, signaldisplay, textbox, texteditor, xypad,
-- * Properties
bounds, channel, text1, text2, value, colour, colour0, colour1, backgroundcolour, textcolour, trackercolour, outlinecolour,
fontcolour, fontcolour0, fontcolour1, latched, identchannel, rotate, alpha, visible, caption, widgetarray, popuptext,
active, svgfile, populate, mode, file, shape, corners, channeltype, align, sliderincr, max, min, textbox', trackerthickness,
linethickness, range, range2, size, pluginid, guirefresh, plant, child, show, middlec, keywidth, scrollbars, fontstyle,
scrubberpos, zoom, displaytype, updaterate, wrap
) where
import Prelude hiding (show, min, max)
import Csound.Typed.Gui.Cabbage.Cabbage
import qualified Csound.Typed.GlobalState as G
import Csound.Typed.GlobalState(SE)
cabbage :: Cab -> SE ()
cabbage = G.geToSe . G.cabbage
| null | https://raw.githubusercontent.com/spell-music/csound-expression/29c1611172153347b16d0b6b133e4db61a7218d5/csound-expression-typed/src/Csound/Typed/Gui/Cab.hs | haskell | * Widgets
* Properties | module Csound.Typed.Gui.Cab(
Cab, CabProp, Col(..), cabbage,
button, filebutton, infobutton, checkbox, combobox, csoundoutput, encoder, gentable,
hrange, vrange, form, groupbox, image, keyboard, label, hslider, vslider,
rslider, soundfiler, signaldisplay, textbox, texteditor, xypad,
bounds, channel, text1, text2, value, colour, colour0, colour1, backgroundcolour, textcolour, trackercolour, outlinecolour,
fontcolour, fontcolour0, fontcolour1, latched, identchannel, rotate, alpha, visible, caption, widgetarray, popuptext,
active, svgfile, populate, mode, file, shape, corners, channeltype, align, sliderincr, max, min, textbox', trackerthickness,
linethickness, range, range2, size, pluginid, guirefresh, plant, child, show, middlec, keywidth, scrollbars, fontstyle,
scrubberpos, zoom, displaytype, updaterate, wrap
) where
import Prelude hiding (show, min, max)
import Csound.Typed.Gui.Cabbage.Cabbage
import qualified Csound.Typed.GlobalState as G
import Csound.Typed.GlobalState(SE)
cabbage :: Cab -> SE ()
cabbage = G.geToSe . G.cabbage
|
f141f3402a9242c7ac628994ed1283a4999f80cc314e0590472eeafa21d28020 | madnificent/SEXML | cl-attribs.lisp | cl-attribs.lisp
(in-package #:cl-attribs)
(defun pairup-list (list)
"returns a list containing lists with length of 2, made from the original list"
(loop
with results = nil
with counter = 0
while (<= counter (- (length list) 2))
do
(push (list (nth counter list) (nth (1+ counter) list)) results)
(setf counter (+ counter 2))
finally (return (reverse results))))
(defclass attributed-direct-slot (closer-mop:standard-direct-slot-definition)
((attributes :accessor attributes :initarg :attributes :initform nil)))
(defclass attributed-effective-slot (closer-mop:standard-effective-slot-definition)
((attributes :accessor attributes :initarg :attributes :initform nil)))
(defclass attributes-class (standard-class)
()
(:documentation "This is the metaclass used for managing attributes-object"))
(defmethod closer-mop:validate-superclass ((c attributes-class) (sc standard-class)) t)
(defmethod closer-mop:direct-slot-definition-class ((class attributes-class) &rest initargs)
(declare (ignore initargs))
(find-class 'attributed-direct-slot))
(defun attributed-slot-p (slot)
(or (typep slot 'attributed-direct-slot)
(typep slot 'attributed-effective-slot)))
(defmethod closer-mop:compute-effective-slot-definition ((class attributes-class) name direct-slots)
(if (every #'attributed-slot-p direct-slots)
(let ((normal-slot (call-next-method))
(all-attributes (compute-attribute-inheritance direct-slots)))
(setf all-attributes (append (mapcar #'eval all-attributes)))
(make-instance 'attributed-effective-slot
:attributes (copy-tree all-attributes)
:allocation-class class
:allocation (closer-mop:slot-definition-allocation normal-slot)
:class class
:documentation (documentation normal-slot t)
:initargs (closer-mop:slot-definition-initargs normal-slot)
:writers (closer-mop:slot-definition-writers normal-slot)
:readers (closer-mop:slot-definition-readers normal-slot)
:initfunction (closer-mop:slot-definition-initfunction normal-slot)
:initform (closer-mop:slot-definition-initform normal-slot)
:name name))
(call-next-method)))
(defun compute-attribute-inheritance (direct-slots)
"removes duplicated attributes from the list with the latest value for each attribute"
(let* ((all-attribs (reduce #'append (reverse direct-slots) :key 'attributes))
(filtered-pairs nil)
(attrib-pairs (pairup-list all-attribs)))
(loop for item in attrib-pairs
do (if (find (car item) filtered-pairs :key #'car)
(setf (cadr (assoc (car item) filtered-pairs)) (cadr item))
(push item filtered-pairs))
finally (return (reduce #'append filtered-pairs)))))
(defmethod closer-mop:compute-slots ((class attributes-class))
(let* ((slots (call-next-method))
(attributes (copy-tree (loop for slot in slots
if (attributed-slot-p slot)
collect (cons (closer-mop:slot-definition-name slot) (list (attributes slot)))))))
(cons (make-instance 'closer-mop:standard-effective-slot-definition
:name '%all-attributes
:initform attributes
:initfunction (lambda () attributes))
slots)))
(defclass attributes-object ()
()
(:metaclass attributes-class)
(:documentation "This is the class that provides functionality for getting and setting attributes of its slots.
use this format to set slot-value and slot-attribs at once:
(setf (some-slot object) '(:value/attribs (value (:attrib-a attrib-a-value))))"))
(defgeneric slot-attrib (object slot-name attrib-keyword)
(:documentation "Returns the attribute value of the given slot of the given object"))
(defgeneric slot-attribs (object slot-name)
(:documentation "Returns a list of all attributes and values of the given slot from the object"))
(defmethod slot-attrib ((object attributes-object) (slot-name symbol) (attrib-keyword symbol))
(getf (cadr (assoc slot-name (slot-value object '%all-attributes))) attrib-keyword))
(defmethod slot-attribs ((object attributes-object) (slot-name symbol))
(cadr (assoc slot-name (slot-value object '%all-attributes))))
(defun find-slot-initarg-by-name (object slot-name)
(let ((all-slots (closer-mop:class-slots (class-of object))))
(dolist (slot-def all-slots)
(if (eq (closer-mop:slot-definition-name slot-def) slot-name)
(return-from find-slot-initarg-by-name (car (closer-mop:slot-definition-initargs slot-def)))))))
(defgeneric (setf slot-attrib) (new-value object slot-name attrib-name))
(defgeneric (setf slot-attribs) (new-value object slot-name))
(defmethod (setf slot-attrib) (new-value (object attributes-object) (slot-name symbol) (attrib-keyword symbol))
(if (slot-boundp object '%all-attributes)
(setf (getf (cadr (assoc slot-name (slot-value object '%all-attributes))) attrib-keyword) new-value)))
(defmethod (setf slot-attribs) (new-value (object attributes-object) (slot-name symbol))
(if (slot-boundp object '%all-attributes)
(setf (cadr (assoc slot-name (slot-value object '%all-attributes))) (rest new-value))))
(defmethod (setf closer-mop:slot-value-using-class) :around (new-value (class attributes-class) (object attributes-object) (slotd attributed-effective-slot))
(if (and (slot-boundp object '%all-attributes) (listp new-value) (equal (car new-value) :value/attribs))
(let ((value (second new-value))
(attrib-pairs (pairup-list (third new-value)))
(slot-name (closer-mop:slot-definition-name slotd)))
(setf (slot-value object slot-name) value)
(dolist (pair attrib-pairs)
(setf (slot-attrib object slot-name (car pair)) (second pair)))
new-value)
(call-next-method)))
(defmethod initialize-instance :around ((object attributes-object) &rest initargs)
(declare (ignore initargs))
(let* ((all-slots (closer-mop:class-slots (class-of object)))
(attributes-slot (find '%all-attributes all-slots :key #'closer-mop:slot-definition-name))
(inherited-attributes (copy-tree (funcall (closer-mop:slot-definition-initfunction attributes-slot)))))
(setf (slot-value object '%all-attributes) inherited-attributes)
(call-next-method)))
| null | https://raw.githubusercontent.com/madnificent/SEXML/c4db46adb60674e81273adbaac7b5f54dd79a438/contrib/sexml-objects/cl-attribs/cl-attribs.lisp | lisp | cl-attribs.lisp
(in-package #:cl-attribs)
(defun pairup-list (list)
"returns a list containing lists with length of 2, made from the original list"
(loop
with results = nil
with counter = 0
while (<= counter (- (length list) 2))
do
(push (list (nth counter list) (nth (1+ counter) list)) results)
(setf counter (+ counter 2))
finally (return (reverse results))))
(defclass attributed-direct-slot (closer-mop:standard-direct-slot-definition)
((attributes :accessor attributes :initarg :attributes :initform nil)))
(defclass attributed-effective-slot (closer-mop:standard-effective-slot-definition)
((attributes :accessor attributes :initarg :attributes :initform nil)))
(defclass attributes-class (standard-class)
()
(:documentation "This is the metaclass used for managing attributes-object"))
(defmethod closer-mop:validate-superclass ((c attributes-class) (sc standard-class)) t)
(defmethod closer-mop:direct-slot-definition-class ((class attributes-class) &rest initargs)
(declare (ignore initargs))
(find-class 'attributed-direct-slot))
(defun attributed-slot-p (slot)
(or (typep slot 'attributed-direct-slot)
(typep slot 'attributed-effective-slot)))
(defmethod closer-mop:compute-effective-slot-definition ((class attributes-class) name direct-slots)
(if (every #'attributed-slot-p direct-slots)
(let ((normal-slot (call-next-method))
(all-attributes (compute-attribute-inheritance direct-slots)))
(setf all-attributes (append (mapcar #'eval all-attributes)))
(make-instance 'attributed-effective-slot
:attributes (copy-tree all-attributes)
:allocation-class class
:allocation (closer-mop:slot-definition-allocation normal-slot)
:class class
:documentation (documentation normal-slot t)
:initargs (closer-mop:slot-definition-initargs normal-slot)
:writers (closer-mop:slot-definition-writers normal-slot)
:readers (closer-mop:slot-definition-readers normal-slot)
:initfunction (closer-mop:slot-definition-initfunction normal-slot)
:initform (closer-mop:slot-definition-initform normal-slot)
:name name))
(call-next-method)))
(defun compute-attribute-inheritance (direct-slots)
"removes duplicated attributes from the list with the latest value for each attribute"
(let* ((all-attribs (reduce #'append (reverse direct-slots) :key 'attributes))
(filtered-pairs nil)
(attrib-pairs (pairup-list all-attribs)))
(loop for item in attrib-pairs
do (if (find (car item) filtered-pairs :key #'car)
(setf (cadr (assoc (car item) filtered-pairs)) (cadr item))
(push item filtered-pairs))
finally (return (reduce #'append filtered-pairs)))))
(defmethod closer-mop:compute-slots ((class attributes-class))
(let* ((slots (call-next-method))
(attributes (copy-tree (loop for slot in slots
if (attributed-slot-p slot)
collect (cons (closer-mop:slot-definition-name slot) (list (attributes slot)))))))
(cons (make-instance 'closer-mop:standard-effective-slot-definition
:name '%all-attributes
:initform attributes
:initfunction (lambda () attributes))
slots)))
(defclass attributes-object ()
()
(:metaclass attributes-class)
(:documentation "This is the class that provides functionality for getting and setting attributes of its slots.
use this format to set slot-value and slot-attribs at once:
(setf (some-slot object) '(:value/attribs (value (:attrib-a attrib-a-value))))"))
(defgeneric slot-attrib (object slot-name attrib-keyword)
(:documentation "Returns the attribute value of the given slot of the given object"))
(defgeneric slot-attribs (object slot-name)
(:documentation "Returns a list of all attributes and values of the given slot from the object"))
(defmethod slot-attrib ((object attributes-object) (slot-name symbol) (attrib-keyword symbol))
(getf (cadr (assoc slot-name (slot-value object '%all-attributes))) attrib-keyword))
(defmethod slot-attribs ((object attributes-object) (slot-name symbol))
(cadr (assoc slot-name (slot-value object '%all-attributes))))
(defun find-slot-initarg-by-name (object slot-name)
(let ((all-slots (closer-mop:class-slots (class-of object))))
(dolist (slot-def all-slots)
(if (eq (closer-mop:slot-definition-name slot-def) slot-name)
(return-from find-slot-initarg-by-name (car (closer-mop:slot-definition-initargs slot-def)))))))
(defgeneric (setf slot-attrib) (new-value object slot-name attrib-name))
(defgeneric (setf slot-attribs) (new-value object slot-name))
(defmethod (setf slot-attrib) (new-value (object attributes-object) (slot-name symbol) (attrib-keyword symbol))
(if (slot-boundp object '%all-attributes)
(setf (getf (cadr (assoc slot-name (slot-value object '%all-attributes))) attrib-keyword) new-value)))
(defmethod (setf slot-attribs) (new-value (object attributes-object) (slot-name symbol))
(if (slot-boundp object '%all-attributes)
(setf (cadr (assoc slot-name (slot-value object '%all-attributes))) (rest new-value))))
(defmethod (setf closer-mop:slot-value-using-class) :around (new-value (class attributes-class) (object attributes-object) (slotd attributed-effective-slot))
(if (and (slot-boundp object '%all-attributes) (listp new-value) (equal (car new-value) :value/attribs))
(let ((value (second new-value))
(attrib-pairs (pairup-list (third new-value)))
(slot-name (closer-mop:slot-definition-name slotd)))
(setf (slot-value object slot-name) value)
(dolist (pair attrib-pairs)
(setf (slot-attrib object slot-name (car pair)) (second pair)))
new-value)
(call-next-method)))
(defmethod initialize-instance :around ((object attributes-object) &rest initargs)
(declare (ignore initargs))
(let* ((all-slots (closer-mop:class-slots (class-of object)))
(attributes-slot (find '%all-attributes all-slots :key #'closer-mop:slot-definition-name))
(inherited-attributes (copy-tree (funcall (closer-mop:slot-definition-initfunction attributes-slot)))))
(setf (slot-value object '%all-attributes) inherited-attributes)
(call-next-method)))
|
|
762e65b469c8cead5285c649230d6b8486878bfe223371eded3641ed163468de | raffy2010/grand-slam | video_thumbnail.cljs | (ns ui.component.video-thumbnail
(:require [cljs.core.match :refer-macros [match]]
[cljs-react-material-ui.reagent :as ui]
[cljs-react-material-ui.icons :as ic]
[ui.ffmpeg :refer [preview-src]]
[ui.state :refer [active-files]]))
(defn- handle-video-move
"video move handler"
[video event]
(let [file-id (:id video)
img (.-target event)
pos (.getBoundingClientRect img)
mouse-pos (.-clientX event)
percent (/ (- mouse-pos (.-left pos))
(.-width pos))
preview-index (->> percent
(* 160)
(.round js/Math)
inc)]
(preview-src file-id preview-index)))
(defn debounce
[func span]
(let [last-time (atom (.now js/Date))]
(fn [& args]
(let [now (.now js/Date)]
(if (< span (- now @last-time))
(apply func args)
(reset! last-time now))))))
(defn video-thumbnail
[video]
(if-let [preview-src (:preview-src video)]
[:img {:class "video-preview"
:src preview-src
:on-mouse-move (partial handle-video-move video)}]
[:div {:class "video-preview"}
[ui/refresh-indicator {:status "loading"
:left 60
:top 25}]]))
| null | https://raw.githubusercontent.com/raffy2010/grand-slam/752984d606f4e201b305c6ac931dd0d03a12f4b4/ui_src/ui/component/video_thumbnail.cljs | clojure | (ns ui.component.video-thumbnail
(:require [cljs.core.match :refer-macros [match]]
[cljs-react-material-ui.reagent :as ui]
[cljs-react-material-ui.icons :as ic]
[ui.ffmpeg :refer [preview-src]]
[ui.state :refer [active-files]]))
(defn- handle-video-move
"video move handler"
[video event]
(let [file-id (:id video)
img (.-target event)
pos (.getBoundingClientRect img)
mouse-pos (.-clientX event)
percent (/ (- mouse-pos (.-left pos))
(.-width pos))
preview-index (->> percent
(* 160)
(.round js/Math)
inc)]
(preview-src file-id preview-index)))
(defn debounce
[func span]
(let [last-time (atom (.now js/Date))]
(fn [& args]
(let [now (.now js/Date)]
(if (< span (- now @last-time))
(apply func args)
(reset! last-time now))))))
(defn video-thumbnail
[video]
(if-let [preview-src (:preview-src video)]
[:img {:class "video-preview"
:src preview-src
:on-mouse-move (partial handle-video-move video)}]
[:div {:class "video-preview"}
[ui/refresh-indicator {:status "loading"
:left 60
:top 25}]]))
|
|
a1e08fc0279a185c1382ae2af178a97d6b322fed5ed04fc1ce73cfb1b08a3689 | apache/couchdb-rebar | test_SUITE.erl | -module(test_SUITE).
-export([all/0, simple_test/1, app_config_file_test/1]).
-include_lib("ct.hrl").
all() ->
[simple_test,
app_config_file_test].
simple_test(Config) ->
io:format("Test: ~p\n" [Config]).
app_config_file_test(_Config) ->
application:start(a1),
{ok, bar} = application:get_env(a1, foo),
application:stop(a1).
| null | https://raw.githubusercontent.com/apache/couchdb-rebar/8578221c20d0caa3deb724e5622a924045ffa8bf/inttest/ct_make_fails/test_SUITE.erl | erlang | -module(test_SUITE).
-export([all/0, simple_test/1, app_config_file_test/1]).
-include_lib("ct.hrl").
all() ->
[simple_test,
app_config_file_test].
simple_test(Config) ->
io:format("Test: ~p\n" [Config]).
app_config_file_test(_Config) ->
application:start(a1),
{ok, bar} = application:get_env(a1, foo),
application:stop(a1).
|
|
1c32242ad8a12c9f83ff50b54fa2c424032c344bed65f069f888b7ac170da30b | kolmodin/spdy | Utils.hs | module Network.SPDY.Utils where
import Control.Monad hiding (join)
import Data.Char
import Data.List
import Numeric
import Data.Word
import qualified Data.ByteString.Lazy as L
w2c :: Word8 -> Char
w2c = chr . fromIntegral
printHexBS :: L.ByteString -> IO ()
printHexBS = printHex . map w2c . L.unpack
printHex :: String -> IO ()
printHex str0 = do
putStr (join "" $ map (toHex' "") [0..lineLength-1])
putStr " "
putStrLn $ join " " $ map (toHex' " ") [0..lineLength-1]
loop str0
where
loop str = do
let (line, rest) = splitAt lineLength str
if null line
then return ()
else do
printHexLine line
loop rest
lineLength = 4
nicify :: String -> String
nicify = map (\x -> if isAlphaNum x || x `elem` "/+-:!#$%^&*(){\"'<>, " then x else '.')
join glue lst = concat $ intersperse glue lst
printHexLine :: String -> IO ()
printHexLine line = do
putStr (nicify line)
replicateM (lineLength - length line) (putStr " ")
putStr " "
putStrLn $ join " " (map (toHex . ord) line)
toHex :: Int -> String
toHex i = toHex' "0" i
toHex' :: String -> Int -> String
toHex' filling w =
case showHex w "" of
w1:w2:[] -> w1:w2:[]
w2:[] -> filling ++ w2:[]
_ -> error "showHex returned weird stuff"
vim : set ts=2 sw=2 tw=72 et ft = haskell :
| null | https://raw.githubusercontent.com/kolmodin/spdy/e81cb708695e2f08426a2fe8f2dc30de89e7a6db/Network/SPDY/Utils.hs | haskell | module Network.SPDY.Utils where
import Control.Monad hiding (join)
import Data.Char
import Data.List
import Numeric
import Data.Word
import qualified Data.ByteString.Lazy as L
w2c :: Word8 -> Char
w2c = chr . fromIntegral
printHexBS :: L.ByteString -> IO ()
printHexBS = printHex . map w2c . L.unpack
printHex :: String -> IO ()
printHex str0 = do
putStr (join "" $ map (toHex' "") [0..lineLength-1])
putStr " "
putStrLn $ join " " $ map (toHex' " ") [0..lineLength-1]
loop str0
where
loop str = do
let (line, rest) = splitAt lineLength str
if null line
then return ()
else do
printHexLine line
loop rest
lineLength = 4
nicify :: String -> String
nicify = map (\x -> if isAlphaNum x || x `elem` "/+-:!#$%^&*(){\"'<>, " then x else '.')
join glue lst = concat $ intersperse glue lst
printHexLine :: String -> IO ()
printHexLine line = do
putStr (nicify line)
replicateM (lineLength - length line) (putStr " ")
putStr " "
putStrLn $ join " " (map (toHex . ord) line)
toHex :: Int -> String
toHex i = toHex' "0" i
toHex' :: String -> Int -> String
toHex' filling w =
case showHex w "" of
w1:w2:[] -> w1:w2:[]
w2:[] -> filling ++ w2:[]
_ -> error "showHex returned weird stuff"
vim : set ts=2 sw=2 tw=72 et ft = haskell :
|
|
09fafbde89c23cce9913f3b9dca423ba4bbddb7dfeb37eaccff3561358b0df02 | iu-parfunc/HSBencher | Main.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE NamedFieldPuns #
-- |
-- Seeded from code by:
Copyright : [ 2014 ]
module Main where
-- Friends:
import HSBencher
import HSBencher.Internal.Config (augmentResultWithConfig, getConfig)
import HSBencher.Backend.Fusion
import Network.Google.OAuth2 (OAuth2Client(..))
import Network.Google.FusionTables(CellType(..))
-- Standard:
import Control.Monad
import Control.Monad.Reader
import Data.Default
import Data.List as L
import Data.Maybe (fromJust)
import System.Console.GetOpt (getOpt, getOpt', ArgOrder(Permute), OptDescr(Option), ArgDescr(..), usageInfo)
import System.Environment (getArgs)
import System.Exit
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Version (showVersion)
import Paths_hsbencher_fusion (version)
import Text.CSV
this_progname :: String
this_progname = "hsbencher-fusion-upload-csv"
----------------------------------------------------------------------------------------------------
data ExtraFlag = TableName String
| PrintHelp
| NoUpload
| MatchServerOrder
| OutFile FilePath
deriving (Eq,Ord,Show,Read)
extra_cli_options :: [OptDescr ExtraFlag]
extra_cli_options = [ Option ['h'] ["help"] (NoArg PrintHelp)
"Show this help message and exit."
, Option [] ["name"] (ReqArg TableName "NAME")
"Name for the fusion table to which we upload (discovered or created)."
, Option ['o'] ["out"] (ReqArg OutFile "FILE")
"Write the augmented CSV data out to FILE."
, Option [] ["noupload"] (NoArg NoUpload)
"Don't actually upload to the fusion table (but still possible write to disk)."
, Option [] ["matchserver"] (NoArg MatchServerOrder)
"Even if not uploading, retrieve the order of columns from the server and use it."
]
plug :: FusionPlug
plug = defaultFusionPlugin
main :: IO ()
main = do
cli_args <- getArgs
let (help,fusion_cli_options) = plugCmdOpts plug
let (opts1,plainargs,unrec,errs1) = getOpt' Permute extra_cli_options cli_args
let (opts2,_,errs2) = getOpt Permute fusion_cli_options unrec
let errs = errs1 ++ errs2
when (L.elem PrintHelp opts1 || not (null errs)) $ do
putStrLn $
this_progname++": "++showVersion version++"\n"++
"USAGE: "++this_progname++" [options] CSVFILE\n\n"++
"Upload pre-existing CSV, e.g. data as gathered by the 'dribble' plugin.\n"++
"\n"++
(usageInfo "Options:" extra_cli_options)++"\n"++
(usageInfo help fusion_cli_options)
if null errs then exitSuccess else exitFailure
let name = case [ n | TableName n <- opts1 ] of
[] -> error "Must supply a table name!"
[n] -> n
ls -> error $ "Multiple table names supplied!: "++show ls
This bit could be abstracted nicely by the HSBencher lib :
------------------------------------------------------------
-- Gather info about the benchmark platform:
gconf0 <- getConfig [] []
let gconf1 = gconf0 { benchsetName = Just name }
let fconf0 = getMyConf plug gconf1
let fconf1 = foldFlags plug opts2 fconf0
let gconf2 = setMyConf plug fconf1 gconf1
gconf3 <- if L.elem NoUpload opts1 &&
not (L.elem MatchServerOrder opts1)
then return gconf2
else plugInitialize plug gconf2
let outFiles = [ f | OutFile f <- opts1 ]
------------------------------------------------------------
case plainargs of
[] -> error "No file given to upload!"
reports -> do
allFiles <- fmap (L.map (L.filter goodLine)) $
mapM loadCSV reports
let headers = map head allFiles
distinctHeaders = S.fromList headers
combined = head headers : (L.concatMap tail allFiles)
unless (S.size distinctHeaders == 1) $
error $ unlines ("Not all the headers in CSV files matched: " :
(L.map show (S.toList distinctHeaders)))
putStrLn$ " ["++this_progname++"] File lengths: "++show (map length allFiles)
-- putStrLn$ " ["++this_progname++"] File headers:\n"++unlines (map show headers)
-- putStrLn$ "DEBUG:\n "++unlines (map show combined)
unless (null outFiles) $ do
putStrLn$ " ["++this_progname++"] First, write out CSVs to disk: "++show outFiles
putStrLn$ " ["++this_progname++"] Match server side schema?: "++ show(L.elem MatchServerOrder opts1)
let hdr = head combined
serverSchema <-
if L.elem MatchServerOrder opts1
then do s <- ensureMyColumns gconf3 hdr
putStrLn$ " ["++this_progname++"] Retrieved schema from server, using for CSV output:\n "++show s
return s
else return hdr
forM_ outFiles $ \f ->
writeOutFile gconf3 serverSchema f combined
if L.elem NoUpload opts1
then putStrLn$ " ["++this_progname++"] Skipping fusion table upload due to --noupload "
else doupload gconf3 combined
case ( reports , outFiles ) of
( _ , [ ] ) - > forM _ reports loadCSV f > > =
-- ([inp],[out]) ->
do c inp
-- writeOutFile gconf3 out c
doupload gconf3 c
-- ([inp],ls) -> error $ "Given multiple CSV output files: "++show ls
( ls1,ls2 ) - > error $ " Given multiple input files but also asked to output to a CSV file . "
writeOutFile :: Config -> [String] -> FilePath -> CSV -> IO ()
writeOutFile _ _ _ [] = error $ "Bad CSV file, not even a header line."
writeOutFile confs serverSchema path (hdr:rst) = do
augmented <- augmentRows confs serverSchema hdr rst
putStrLn$ " ["++this_progname++"] Writing out CSV with schema:\n "++show serverSchema
let untuple :: [[(String,String)]] -> [[String]]
= map fst ( head tups ) : map ( map snd ) tups
Convert while using the ordering from :
untuple tups = serverSchema :
[ [ fJ (lookup k tup)
| k <- serverSchema
, let fJ (Just x) = x
fJ Nothing = "" -- Custom fields!
error$ " field " + + k++ " in server schema missing in tuple:\n"++unlines ( map show tup )
]
| tup <- tups ]
writeFile path $ printCSV $
untuple $ map resultToTuple augmented
putStrLn$ " ["++this_progname++"] Successfully wrote file: "++path
goodLine :: [String] -> Bool
goodLine [] = False
goodLine [""] = False -- Why does Text.CSV produce these?
goodLine _ = True
loadCSV :: FilePath -> IO CSV
loadCSV f = do
x <- parseCSVFromFile f
case x of
Left err -> error $ "Failed to read CSV file: \n"++show err
Right [] -> error $ "Bad CSV file, not even a header line: "++ f
Right v -> return v
doupload :: Config -> CSV -> IO ()
doupload confs x = do
case x of
[] -> error $ "Bad CSV file, not even a header line."
(hdr:rst) -> do
checkHeader hdr
putStrLn$ " ["++this_progname++"] Beginning upload CSV data with Schema: "++show hdr
FIXUP server schema .
putStrLn$ " ["++this_progname++"] Uploading "++show (length rst)++" rows of CSV data..."
putStrLn "================================================================================"
augmented <- augmentRows confs serverSchema hdr rst
prepped <- prepRows serverSchema augmented
fusionUploader prepped confs
-- TODO: Add checking to see if the rows are already there. However
that would be expensive if we do one query per row . The ideal
-- implementation would examine the structure of the rowset and make
-- fewer queries.
-- | Perform the actual upload of N rows
-- uprows :: Config -> [String] -> [String] -> [[String]] -> Uploader -> IO ()
augmentRows :: Config -> [String] -> [String] -> [[String]] -> IO [BenchmarkResult]
augmentRows confs serverSchema hdr rst = do
let missing = S.difference (S.fromList serverSchema) (S.fromList hdr)
-- Compute a base benchResult to fill in our missing fields:
base <- if S.null missing then
return def -- Don't bother computing it, nothing missing.
else do
putStrLn $ "\n\n ["++this_progname++"] Fields missing, filling in defaults: "++show (S.toList missing)
-- Start with the empty tuple, augmented with environmental info:
x <- augmentResultWithConfig confs def
return $ x { _WHO = "" } -- Don't use who output from the point in time where we run THIS command.
let tuples = map (zip hdr) rst
augmented = map (`unionBR` base) tuples
return augmented
prepRows :: Schema -> [BenchmarkResult] -> IO [PreppedTuple]
prepRows serverSchema augmented = do
let prepped = map (prepBenchResult serverSchema) augmented
putStrLn$ " ["++this_progname++"] Tuples prepped. Here's the first one: "++ show (head prepped)
-- Layer on what we have.
return prepped
fusionUploader :: [PreppedTuple] -> Config -> IO ()
fusionUploader prepped confs = do
flg <- runReaderT (uploadRows prepped) confs
unless flg $ error $ this_progname++"/uprows: failed to upload rows."
| Union a tuple with a BenchmarkResult . Any unmentioned keys in
the tuple retain their value from the input BenchmarkResult .
unionBR :: [(String,String)] -> BenchmarkResult -> BenchmarkResult
unionBR tuple br1 =
tupleToResult (M.toList (M.union (M.fromList tuple)
(M.fromList (resultToTuple br1))))
FIXUP : FusionConfig does n't document our additional CUSTOM columns .
-- During initialization it ensures the table has the core schema, but that's it.
-- Thus we need to make sure ALL columns are present.
ensureMyColumns :: Config -> [String] -> IO Schema
ensureMyColumns confs hdr = do
let FusionConfig{fusionTableID,fusionClientID,fusionClientSecret,serverColumns} = getMyConf plug confs
(Just tid, Just cid, Just sec) = (fusionTableID, fusionClientID, fusionClientSecret)
auth = OAuth2Client { clientId=cid, clientSecret=sec }
missing = S.difference (S.fromList hdr) (S.fromList serverColumns)
-- HACK: we pretend everything in a STRING here... we should probably look at the data in the CSV
-- and guess if its a number. However, if columns already exist we DONT change their type, so it
-- can always be done manually on the server.
schema = [ case lookup nm fusionSchema of
Nothing -> (nm,STRING)
Just t -> (nm,t)
| nm <- serverColumns ++ S.toList missing ]
if S.null missing
then putStrLn$ " ["++this_progname++"] Server has all the columns appearing in the CSV file. Good."
else putStrLn$ " ["++this_progname++"] Adding missing columns: "++show missing
res <- runReaderT (ensureColumns auth tid schema) confs
putStrLn$ " ["++this_progname++"] Done adding, final server schema:"++show res
return res
checkHeader :: Record -> IO ()
checkHeader hdr
| L.elem "PROGNAME" hdr = return ()
| otherwise = error $ "Bad HEADER line on CSV file. Expecting at least PROGNAME to be present: "++show hdr
| null | https://raw.githubusercontent.com/iu-parfunc/HSBencher/76782b75b3a4b276c45a2c159e0b4cb6bd8a2360/hsbencher-fusion/CSVUploader/Main.hs | haskell | # LANGUAGE OverloadedStrings #
|
Seeded from code by:
Friends:
Standard:
--------------------------------------------------------------------------------------------------
----------------------------------------------------------
Gather info about the benchmark platform:
----------------------------------------------------------
putStrLn$ " ["++this_progname++"] File headers:\n"++unlines (map show headers)
putStrLn$ "DEBUG:\n "++unlines (map show combined)
([inp],[out]) ->
writeOutFile gconf3 out c
([inp],ls) -> error $ "Given multiple CSV output files: "++show ls
Custom fields!
Why does Text.CSV produce these?
TODO: Add checking to see if the rows are already there. However
implementation would examine the structure of the rowset and make
fewer queries.
| Perform the actual upload of N rows
uprows :: Config -> [String] -> [String] -> [[String]] -> Uploader -> IO ()
Compute a base benchResult to fill in our missing fields:
Don't bother computing it, nothing missing.
Start with the empty tuple, augmented with environmental info:
Don't use who output from the point in time where we run THIS command.
Layer on what we have.
During initialization it ensures the table has the core schema, but that's it.
Thus we need to make sure ALL columns are present.
HACK: we pretend everything in a STRING here... we should probably look at the data in the CSV
and guess if its a number. However, if columns already exist we DONT change their type, so it
can always be done manually on the server. | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE NamedFieldPuns #
Copyright : [ 2014 ]
module Main where
import HSBencher
import HSBencher.Internal.Config (augmentResultWithConfig, getConfig)
import HSBencher.Backend.Fusion
import Network.Google.OAuth2 (OAuth2Client(..))
import Network.Google.FusionTables(CellType(..))
import Control.Monad
import Control.Monad.Reader
import Data.Default
import Data.List as L
import Data.Maybe (fromJust)
import System.Console.GetOpt (getOpt, getOpt', ArgOrder(Permute), OptDescr(Option), ArgDescr(..), usageInfo)
import System.Environment (getArgs)
import System.Exit
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Version (showVersion)
import Paths_hsbencher_fusion (version)
import Text.CSV
this_progname :: String
this_progname = "hsbencher-fusion-upload-csv"
data ExtraFlag = TableName String
| PrintHelp
| NoUpload
| MatchServerOrder
| OutFile FilePath
deriving (Eq,Ord,Show,Read)
extra_cli_options :: [OptDescr ExtraFlag]
extra_cli_options = [ Option ['h'] ["help"] (NoArg PrintHelp)
"Show this help message and exit."
, Option [] ["name"] (ReqArg TableName "NAME")
"Name for the fusion table to which we upload (discovered or created)."
, Option ['o'] ["out"] (ReqArg OutFile "FILE")
"Write the augmented CSV data out to FILE."
, Option [] ["noupload"] (NoArg NoUpload)
"Don't actually upload to the fusion table (but still possible write to disk)."
, Option [] ["matchserver"] (NoArg MatchServerOrder)
"Even if not uploading, retrieve the order of columns from the server and use it."
]
plug :: FusionPlug
plug = defaultFusionPlugin
main :: IO ()
main = do
cli_args <- getArgs
let (help,fusion_cli_options) = plugCmdOpts plug
let (opts1,plainargs,unrec,errs1) = getOpt' Permute extra_cli_options cli_args
let (opts2,_,errs2) = getOpt Permute fusion_cli_options unrec
let errs = errs1 ++ errs2
when (L.elem PrintHelp opts1 || not (null errs)) $ do
putStrLn $
this_progname++": "++showVersion version++"\n"++
"USAGE: "++this_progname++" [options] CSVFILE\n\n"++
"Upload pre-existing CSV, e.g. data as gathered by the 'dribble' plugin.\n"++
"\n"++
(usageInfo "Options:" extra_cli_options)++"\n"++
(usageInfo help fusion_cli_options)
if null errs then exitSuccess else exitFailure
let name = case [ n | TableName n <- opts1 ] of
[] -> error "Must supply a table name!"
[n] -> n
ls -> error $ "Multiple table names supplied!: "++show ls
This bit could be abstracted nicely by the HSBencher lib :
gconf0 <- getConfig [] []
let gconf1 = gconf0 { benchsetName = Just name }
let fconf0 = getMyConf plug gconf1
let fconf1 = foldFlags plug opts2 fconf0
let gconf2 = setMyConf plug fconf1 gconf1
gconf3 <- if L.elem NoUpload opts1 &&
not (L.elem MatchServerOrder opts1)
then return gconf2
else plugInitialize plug gconf2
let outFiles = [ f | OutFile f <- opts1 ]
case plainargs of
[] -> error "No file given to upload!"
reports -> do
allFiles <- fmap (L.map (L.filter goodLine)) $
mapM loadCSV reports
let headers = map head allFiles
distinctHeaders = S.fromList headers
combined = head headers : (L.concatMap tail allFiles)
unless (S.size distinctHeaders == 1) $
error $ unlines ("Not all the headers in CSV files matched: " :
(L.map show (S.toList distinctHeaders)))
putStrLn$ " ["++this_progname++"] File lengths: "++show (map length allFiles)
unless (null outFiles) $ do
putStrLn$ " ["++this_progname++"] First, write out CSVs to disk: "++show outFiles
putStrLn$ " ["++this_progname++"] Match server side schema?: "++ show(L.elem MatchServerOrder opts1)
let hdr = head combined
serverSchema <-
if L.elem MatchServerOrder opts1
then do s <- ensureMyColumns gconf3 hdr
putStrLn$ " ["++this_progname++"] Retrieved schema from server, using for CSV output:\n "++show s
return s
else return hdr
forM_ outFiles $ \f ->
writeOutFile gconf3 serverSchema f combined
if L.elem NoUpload opts1
then putStrLn$ " ["++this_progname++"] Skipping fusion table upload due to --noupload "
else doupload gconf3 combined
case ( reports , outFiles ) of
( _ , [ ] ) - > forM _ reports loadCSV f > > =
do c inp
doupload gconf3 c
( ls1,ls2 ) - > error $ " Given multiple input files but also asked to output to a CSV file . "
writeOutFile :: Config -> [String] -> FilePath -> CSV -> IO ()
writeOutFile _ _ _ [] = error $ "Bad CSV file, not even a header line."
writeOutFile confs serverSchema path (hdr:rst) = do
augmented <- augmentRows confs serverSchema hdr rst
putStrLn$ " ["++this_progname++"] Writing out CSV with schema:\n "++show serverSchema
let untuple :: [[(String,String)]] -> [[String]]
= map fst ( head tups ) : map ( map snd ) tups
Convert while using the ordering from :
untuple tups = serverSchema :
[ [ fJ (lookup k tup)
| k <- serverSchema
, let fJ (Just x) = x
error$ " field " + + k++ " in server schema missing in tuple:\n"++unlines ( map show tup )
]
| tup <- tups ]
writeFile path $ printCSV $
untuple $ map resultToTuple augmented
putStrLn$ " ["++this_progname++"] Successfully wrote file: "++path
goodLine :: [String] -> Bool
goodLine [] = False
goodLine _ = True
loadCSV :: FilePath -> IO CSV
loadCSV f = do
x <- parseCSVFromFile f
case x of
Left err -> error $ "Failed to read CSV file: \n"++show err
Right [] -> error $ "Bad CSV file, not even a header line: "++ f
Right v -> return v
doupload :: Config -> CSV -> IO ()
doupload confs x = do
case x of
[] -> error $ "Bad CSV file, not even a header line."
(hdr:rst) -> do
checkHeader hdr
putStrLn$ " ["++this_progname++"] Beginning upload CSV data with Schema: "++show hdr
FIXUP server schema .
putStrLn$ " ["++this_progname++"] Uploading "++show (length rst)++" rows of CSV data..."
putStrLn "================================================================================"
augmented <- augmentRows confs serverSchema hdr rst
prepped <- prepRows serverSchema augmented
fusionUploader prepped confs
that would be expensive if we do one query per row . The ideal
augmentRows :: Config -> [String] -> [String] -> [[String]] -> IO [BenchmarkResult]
augmentRows confs serverSchema hdr rst = do
let missing = S.difference (S.fromList serverSchema) (S.fromList hdr)
base <- if S.null missing then
else do
putStrLn $ "\n\n ["++this_progname++"] Fields missing, filling in defaults: "++show (S.toList missing)
x <- augmentResultWithConfig confs def
let tuples = map (zip hdr) rst
augmented = map (`unionBR` base) tuples
return augmented
prepRows :: Schema -> [BenchmarkResult] -> IO [PreppedTuple]
prepRows serverSchema augmented = do
let prepped = map (prepBenchResult serverSchema) augmented
putStrLn$ " ["++this_progname++"] Tuples prepped. Here's the first one: "++ show (head prepped)
return prepped
fusionUploader :: [PreppedTuple] -> Config -> IO ()
fusionUploader prepped confs = do
flg <- runReaderT (uploadRows prepped) confs
unless flg $ error $ this_progname++"/uprows: failed to upload rows."
| Union a tuple with a BenchmarkResult . Any unmentioned keys in
the tuple retain their value from the input BenchmarkResult .
unionBR :: [(String,String)] -> BenchmarkResult -> BenchmarkResult
unionBR tuple br1 =
tupleToResult (M.toList (M.union (M.fromList tuple)
(M.fromList (resultToTuple br1))))
FIXUP : FusionConfig does n't document our additional CUSTOM columns .
ensureMyColumns :: Config -> [String] -> IO Schema
ensureMyColumns confs hdr = do
let FusionConfig{fusionTableID,fusionClientID,fusionClientSecret,serverColumns} = getMyConf plug confs
(Just tid, Just cid, Just sec) = (fusionTableID, fusionClientID, fusionClientSecret)
auth = OAuth2Client { clientId=cid, clientSecret=sec }
missing = S.difference (S.fromList hdr) (S.fromList serverColumns)
schema = [ case lookup nm fusionSchema of
Nothing -> (nm,STRING)
Just t -> (nm,t)
| nm <- serverColumns ++ S.toList missing ]
if S.null missing
then putStrLn$ " ["++this_progname++"] Server has all the columns appearing in the CSV file. Good."
else putStrLn$ " ["++this_progname++"] Adding missing columns: "++show missing
res <- runReaderT (ensureColumns auth tid schema) confs
putStrLn$ " ["++this_progname++"] Done adding, final server schema:"++show res
return res
checkHeader :: Record -> IO ()
checkHeader hdr
| L.elem "PROGNAME" hdr = return ()
| otherwise = error $ "Bad HEADER line on CSV file. Expecting at least PROGNAME to be present: "++show hdr
|
95378533a80c6bfca6db1264798420ffa1b1031b5ad06e8b5e0c101a55f20a93 | theodormoroianu/SecondYearCourses | HaskellChurch_20210415163652.hs | {-# LANGUAGE RankNTypes #-}
module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
An instance to show as regular Booleans
instance Show CBool where
show b = "cBool " <> show (cIf b True False)
The boolean constant true always chooses the first alternative
cTrue :: CBool
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: CBool
cFalse = undefined
cBool :: Bool -> CBool
cBool True = cTrue
cBool False = cFalse
--The boolean negation switches the alternatives
cNot :: CBool -> CBool
cNot = undefined
--The boolean conjunction can be built as a conditional
(&&:) :: CBool -> CBool -> CBool
(&&:) = undefined
infixr 3 &&:
--The boolean disjunction can be built as a conditional
(||:) :: CBool -> CBool -> CBool
(||:) = undefined
infixr 2 ||:
-- a pair is a way to compute something based on the values
-- contained within the pair.
newtype CPair a b = CPair { cOn :: forall c . (a -> b -> c) -> c }
An instance to show CPairs as regular pairs .
instance (Show a, Show b) => Show (CPair a b) where
show p = "cPair " <> show (cOn p (,))
builds a pair out of two values as an object which , when given
--a function to be applied on the values, it will apply it on them.
cPair :: a -> b -> CPair a b
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: CPair a b -> a
cFst = undefined
second projection
cSnd :: CPair a b -> b
cSnd = undefined
-- A natural number is any way to iterate a function s a number of times
-- over an initial value z
newtype CNat = CNat { cFor :: forall t. (t -> t) -> t -> t }
-- An instance to show CNats as regular natural numbers
instance Show CNat where
show n = show $ cFor n (1 +) (0 :: Integer)
--0 will iterate the function s 0 times over z, producing z
c0 :: CNat
c0 = undefined
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat
c1 = undefined
--Successor n either
- applies s one more time in addition to what n does
-- - iterates s n times over (s z)
cS :: CNat -> CNat
cS = undefined
--Addition of m and n is done by iterating s n times over m
(+:) :: CNat -> CNat -> CNat
(+:) = undefined
infixl 6 +:
--Multiplication of m and n can be done by composing n and m
(*:) :: CNat -> CNat -> CNat
(*:) = \n m -> CNat $ cFor n . cFor m
infixl 7 *:
--Exponentiation of m and n can be done by applying n to m
(^:) :: CNat -> CNat -> CNat
(^:) = \m n -> CNat $ cFor n (cFor m)
infixr 8 ^:
--Testing whether a value is 0 can be done through iteration
-- using a function constantly false and an initial value true
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
Predecessor ( evaluating to 0 for 0 ) can be defined iterating
over pairs , starting from an initial value ( 0 , 0 )
cPred :: CNat -> CNat
cPred = undefined
substraction from m n ( evaluating to 0 if m < n ) is repeated application
-- of the predeccesor function
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
Transform a value into a CNat ( should yield c0 for nums < = 0 )
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
-- arithmetic
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
-- m is less than (or equal to) n if when substracting n from m we get 0
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
-- equality on naturals can be defined my means of comparisons
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
--Fun with arithmetic and pairs
--Define factorial. You can iterate over a pair to contain the current index and so far factorial
cFactorial :: CNat -> CNat
cFactorial = undefined
Define Fibonacci . You can iterate over a pair to contain two consecutive numbers in the sequence
cFibonacci :: CNat -> CNat
cFibonacci = undefined
--Given m and n, compute q and r satisfying m = q * n + r. If n is not 0 then r should be less than n.
--hint repeated substraction, iterated for at most m times.
cDivMod :: CNat -> CNat -> CPair CNat CNat
cDivMod = undefined
-- a list is a way to aggregate a sequence of elements given an aggregation function and an initial value.
newtype CList a = CList { cFoldR :: forall b. (a -> b -> b) -> b -> b }
make CList an instance of Foldable
instance Foldable CList where
--An instance to show CLists as regular lists.
instance (Show a) => Show (CList a) where
show l = "cList " <> (show $ toList l)
-- The empty list is that which when aggregated it will always produce the initial value
cNil :: CList a
cNil = undefined
-- Adding an element to a list means that, when aggregating the list, the newly added
-- element will be aggregated with the result obtained by aggregating the remainder of the list
(.:) :: a -> CList a -> CList a
(.:) = undefined
we can obtain a CList from a regular list by replacing : with . : and
cList :: [a] -> CList a
cList = undefined
churchNatList :: [Integer] -> Term
churchNatList = churchList . map churchNat
cNatList :: [Integer] -> CList CNat
cNatList = cList . map cNat
churchSum :: Term
churchSum = lam "l" (v "l" $$ churchPlus $$ church0)
cSum :: CList CNat -> CNat
since CList is an instance of Foldable ; otherwise : \l - > cFoldR l ( + ) 0
churchIsNil :: Term
churchIsNil = lam "l" (v "l" $$ lams ["x", "a"] churchFalse $$ churchTrue)
cIsNil :: CList a -> CBool
cIsNil = \l -> cFoldR l (\_ _ -> cFalse) cTrue
churchHead :: Term
churchHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cHead :: CList a -> a -> a
cHead = \l d -> cFoldR l (\x _ -> x) d
churchTail :: Term
churchTail = lam "l" (churchFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (churchPair $$ v "t" $$ (churchCons $$ v "x" $$ v "t"))
$$ (churchSnd $$ v "p"))
$$ (churchPair $$ churchNil $$ churchNil)
))
cTail :: CList a -> CList a
cTail = \l -> cFst $ cFoldR l (\x p -> (\t -> cPair t (x .: t)) (cSnd p)) (cPair cNil cNil)
cLength :: CList a -> CNat
cLength = \l -> cFoldR l (\_ n -> cS n) 0
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
divmod :: (Enum a, Num a, Ord b, Num b) => b -> b -> (a, b)
divmod m n = divmod' (0, 0)
where
divmod' (x, y)
| x' <= m = divmod' (x', succ y)
| otherwise = (y, m - x)
where x' = x + n
divmod' m n =
if n == 0 then (0, m)
else
Function.fix
(\f p ->
(\x' ->
if x' > 0 then f ((,) (succ (fst p)) x')
else if (<=) n (snd p) then ((,) (succ (fst p)) 0)
else p)
((-) (snd p) n))
(0, m)
churchDivMod' :: Term
churchDivMod' = lams ["m", "n"]
(churchIs0 $$ v "n"
$$ (churchPair $$ church0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(churchIs0 $$ v "x"
$$ (churchLte $$ v "n" $$ (churchSnd $$ v "p")
$$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ church0)
$$ v "p"
)
$$ (v "f" $$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ v "x"))
)
$$ (churchSub $$ (churchSnd $$ v "p") $$ v "n")
)
$$ (churchPair $$ church0 $$ v "m")
)
)
churchSudan :: Term
churchSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(churchIs0 $$ v "n"
$$ (churchPlus $$ v "x" $$ v "y")
$$ (churchIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (churchPred $$ v "n")
$$ v "fnpy"
$$ (churchPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (churchPred $$ v "y"))
)
)
))
churchAckermann :: Term
churchAckermann = fix $$ lam "A" (lams ["m", "n"]
(churchIs0 $$ v "m"
$$ (churchS $$ v "n")
$$ (churchIs0 $$ v "n"
$$ (v "A" $$ (churchPred $$ v "m") $$ church1)
$$ (v "A" $$ (churchPred $$ v "m")
$$ (v "A" $$ v "m" $$ (churchPred $$ v "n")))
)
)
)
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/HaskellChurch_20210415163652.hs | haskell | # LANGUAGE RankNTypes #
The boolean negation switches the alternatives
The boolean conjunction can be built as a conditional
The boolean disjunction can be built as a conditional
a pair is a way to compute something based on the values
contained within the pair.
a function to be applied on the values, it will apply it on them.
A natural number is any way to iterate a function s a number of times
over an initial value z
An instance to show CNats as regular natural numbers
0 will iterate the function s 0 times over z, producing z
Successor n either
- iterates s n times over (s z)
Addition of m and n is done by iterating s n times over m
Multiplication of m and n can be done by composing n and m
Exponentiation of m and n can be done by applying n to m
Testing whether a value is 0 can be done through iteration
using a function constantly false and an initial value true
of the predeccesor function
arithmetic
m is less than (or equal to) n if when substracting n from m we get 0
equality on naturals can be defined my means of comparisons
Fun with arithmetic and pairs
Define factorial. You can iterate over a pair to contain the current index and so far factorial
Given m and n, compute q and r satisfying m = q * n + r. If n is not 0 then r should be less than n.
hint repeated substraction, iterated for at most m times.
a list is a way to aggregate a sequence of elements given an aggregation function and an initial value.
An instance to show CLists as regular lists.
The empty list is that which when aggregated it will always produce the initial value
Adding an element to a list means that, when aggregating the list, the newly added
element will be aggregated with the result obtained by aggregating the remainder of the list | module HaskellChurch where
A boolean is any way to choose between two alternatives
newtype CBool = CBool {cIf :: forall t. t -> t -> t}
An instance to show as regular Booleans
instance Show CBool where
show b = "cBool " <> show (cIf b True False)
The boolean constant true always chooses the first alternative
cTrue :: CBool
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: CBool
cFalse = undefined
cBool :: Bool -> CBool
cBool True = cTrue
cBool False = cFalse
cNot :: CBool -> CBool
cNot = undefined
(&&:) :: CBool -> CBool -> CBool
(&&:) = undefined
infixr 3 &&:
(||:) :: CBool -> CBool -> CBool
(||:) = undefined
infixr 2 ||:
newtype CPair a b = CPair { cOn :: forall c . (a -> b -> c) -> c }
An instance to show CPairs as regular pairs .
instance (Show a, Show b) => Show (CPair a b) where
show p = "cPair " <> show (cOn p (,))
builds a pair out of two values as an object which , when given
cPair :: a -> b -> CPair a b
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: CPair a b -> a
cFst = undefined
second projection
cSnd :: CPair a b -> b
cSnd = undefined
newtype CNat = CNat { cFor :: forall t. (t -> t) -> t -> t }
instance Show CNat where
show n = show $ cFor n (1 +) (0 :: Integer)
c0 :: CNat
c0 = undefined
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat
c1 = undefined
- applies s one more time in addition to what n does
cS :: CNat -> CNat
cS = undefined
(+:) :: CNat -> CNat -> CNat
(+:) = undefined
infixl 6 +:
(*:) :: CNat -> CNat -> CNat
(*:) = \n m -> CNat $ cFor n . cFor m
infixl 7 *:
(^:) :: CNat -> CNat -> CNat
(^:) = \m n -> CNat $ cFor n (cFor m)
infixr 8 ^:
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
Predecessor ( evaluating to 0 for 0 ) can be defined iterating
over pairs , starting from an initial value ( 0 , 0 )
cPred :: CNat -> CNat
cPred = undefined
substraction from m n ( evaluating to 0 if m < n ) is repeated application
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
Transform a value into a CNat ( should yield c0 for nums < = 0 )
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
cFactorial :: CNat -> CNat
cFactorial = undefined
Define Fibonacci . You can iterate over a pair to contain two consecutive numbers in the sequence
cFibonacci :: CNat -> CNat
cFibonacci = undefined
cDivMod :: CNat -> CNat -> CPair CNat CNat
cDivMod = undefined
newtype CList a = CList { cFoldR :: forall b. (a -> b -> b) -> b -> b }
make CList an instance of Foldable
instance Foldable CList where
instance (Show a) => Show (CList a) where
show l = "cList " <> (show $ toList l)
cNil :: CList a
cNil = undefined
(.:) :: a -> CList a -> CList a
(.:) = undefined
we can obtain a CList from a regular list by replacing : with . : and
cList :: [a] -> CList a
cList = undefined
churchNatList :: [Integer] -> Term
churchNatList = churchList . map churchNat
cNatList :: [Integer] -> CList CNat
cNatList = cList . map cNat
churchSum :: Term
churchSum = lam "l" (v "l" $$ churchPlus $$ church0)
cSum :: CList CNat -> CNat
since CList is an instance of Foldable ; otherwise : \l - > cFoldR l ( + ) 0
churchIsNil :: Term
churchIsNil = lam "l" (v "l" $$ lams ["x", "a"] churchFalse $$ churchTrue)
cIsNil :: CList a -> CBool
cIsNil = \l -> cFoldR l (\_ _ -> cFalse) cTrue
churchHead :: Term
churchHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cHead :: CList a -> a -> a
cHead = \l d -> cFoldR l (\x _ -> x) d
churchTail :: Term
churchTail = lam "l" (churchFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (churchPair $$ v "t" $$ (churchCons $$ v "x" $$ v "t"))
$$ (churchSnd $$ v "p"))
$$ (churchPair $$ churchNil $$ churchNil)
))
cTail :: CList a -> CList a
cTail = \l -> cFst $ cFoldR l (\x p -> (\t -> cPair t (x .: t)) (cSnd p)) (cPair cNil cNil)
cLength :: CList a -> CNat
cLength = \l -> cFoldR l (\_ n -> cS n) 0
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
divmod :: (Enum a, Num a, Ord b, Num b) => b -> b -> (a, b)
divmod m n = divmod' (0, 0)
where
divmod' (x, y)
| x' <= m = divmod' (x', succ y)
| otherwise = (y, m - x)
where x' = x + n
divmod' m n =
if n == 0 then (0, m)
else
Function.fix
(\f p ->
(\x' ->
if x' > 0 then f ((,) (succ (fst p)) x')
else if (<=) n (snd p) then ((,) (succ (fst p)) 0)
else p)
((-) (snd p) n))
(0, m)
churchDivMod' :: Term
churchDivMod' = lams ["m", "n"]
(churchIs0 $$ v "n"
$$ (churchPair $$ church0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(churchIs0 $$ v "x"
$$ (churchLte $$ v "n" $$ (churchSnd $$ v "p")
$$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ church0)
$$ v "p"
)
$$ (v "f" $$ (churchPair $$ (churchS $$ (churchFst $$ v "p")) $$ v "x"))
)
$$ (churchSub $$ (churchSnd $$ v "p") $$ v "n")
)
$$ (churchPair $$ church0 $$ v "m")
)
)
churchSudan :: Term
churchSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(churchIs0 $$ v "n"
$$ (churchPlus $$ v "x" $$ v "y")
$$ (churchIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (churchPred $$ v "n")
$$ v "fnpy"
$$ (churchPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (churchPred $$ v "y"))
)
)
))
churchAckermann :: Term
churchAckermann = fix $$ lam "A" (lams ["m", "n"]
(churchIs0 $$ v "m"
$$ (churchS $$ v "n")
$$ (churchIs0 $$ v "n"
$$ (v "A" $$ (churchPred $$ v "m") $$ church1)
$$ (v "A" $$ (churchPred $$ v "m")
$$ (v "A" $$ v "m" $$ (churchPred $$ v "n")))
)
)
)
|
4345e76c3e7d8eddb8bce32a6ef87d698565569edebcef22d3c441e2a18b3a1a | dmillett/political-canvas | constituent.clj | (ns political-canvas.shared.example.constituent
(:require [political-canvas.shared.example.ward :as local]))
;;
; A mock list of consituents for local/regional/federal districts
;
(def mock_constituents
[{:id 1
:name "Joe Plumber"
:aliases []
:districts [local/ward1]
:contact {[:address {} :email {} :phone {} :date]} ; because districs can change (boo?), people move
:affiliations [{}]
:admin_history [{}] ; mostly unused, track warnings from forum moderators
},
{:id 2
:name "Maryann Perez"
:aliases []
:districts [local/ward1]
:contact {[:address {} :email {} :phone {} :date]}
:affiliations [{}]
:admin_history [{}]
}
])
| null | https://raw.githubusercontent.com/dmillett/political-canvas/ec59a065b832277ec06f80e67977eee196a6a194/src/political_canvas/shared/example/constituent.clj | clojure |
A mock list of consituents for local/regional/federal districts
because districs can change (boo?), people move
mostly unused, track warnings from forum moderators | (ns political-canvas.shared.example.constituent
(:require [political-canvas.shared.example.ward :as local]))
(def mock_constituents
[{:id 1
:name "Joe Plumber"
:aliases []
:districts [local/ward1]
:affiliations [{}]
},
{:id 2
:name "Maryann Perez"
:aliases []
:districts [local/ward1]
:contact {[:address {} :email {} :phone {} :date]}
:affiliations [{}]
:admin_history [{}]
}
])
|
b425e559072c8046770c0d1ef66ceaf67f3618ebb9424c69c0a549a3a26d4085 | cxphoe/SICP-solutions | 4.04.rkt | (define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp)
(make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((and? exp) (eval (and->if exp) env))
((or? exp) (eval (or->if exp) env))
((application? exp)
(apply (eval (operator exp) env)
(list-of-values (operands exp) env)))
(else
(error "Unknown expression type -- EVAL" exp))))
;;and: (and <predicate1> <predicate2> ... <predicaten>)
(define (and? exp)
(tagged-list? exp 'and))
(define (and-predicates exp) (cdr exp))
(define (and->if exp)
(expand-and-predicates (and-predicates exp)))
(define (expand-and-predicates preds)
(if (null? preds)
'true
(let ((first (car preds))
(rest (cdr preds)))
(if (null? rest)
(make-if first first 'false)
(make-if first
(expand-and-predicates rest)
'false))))
;;or: (or <predicate1> <predicate2> ... <predicaten>)
(define (or? exp)
(tagged-list? exp 'or))
(define (or-predicates exp) (cdr exp))
(define (or->if exp)
(expand-or-predicates (or-predicates exp)))
(define (expand-or-predicates preds)
(if (null? preds)
'false
(make-if (car preds)
(car preds)
(expand-or-predicates (cdr preds))))) | null | https://raw.githubusercontent.com/cxphoe/SICP-solutions/d35bb688db0320f6efb3b3bde1a14ce21da319bd/Chapter%204-Metalinguistic%20Abstraction/1.The%20Meta-cycle%20Evaluator/4.04.rkt | racket | and: (and <predicate1> <predicate2> ... <predicaten>)
or: (or <predicate1> <predicate2> ... <predicaten>) | (define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp)
(make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((and? exp) (eval (and->if exp) env))
((or? exp) (eval (or->if exp) env))
((application? exp)
(apply (eval (operator exp) env)
(list-of-values (operands exp) env)))
(else
(error "Unknown expression type -- EVAL" exp))))
(define (and? exp)
(tagged-list? exp 'and))
(define (and-predicates exp) (cdr exp))
(define (and->if exp)
(expand-and-predicates (and-predicates exp)))
(define (expand-and-predicates preds)
(if (null? preds)
'true
(let ((first (car preds))
(rest (cdr preds)))
(if (null? rest)
(make-if first first 'false)
(make-if first
(expand-and-predicates rest)
'false))))
(define (or? exp)
(tagged-list? exp 'or))
(define (or-predicates exp) (cdr exp))
(define (or->if exp)
(expand-or-predicates (or-predicates exp)))
(define (expand-or-predicates preds)
(if (null? preds)
'false
(make-if (car preds)
(car preds)
(expand-or-predicates (cdr preds))))) |
1f4e9bd0eed2e2482dde8451c7bff42318436307acfeb01b0e502e0b50a6759c | 40ants/reblocks-text-editor | html.lisp | (uiop:define-package #:reblocks-text-editor/typed-pieces/html
(:use #:cl)
(:import-from #:reblocks-text-editor/typed-pieces/base
#:typed-piece))
(in-package #:reblocks-text-editor/typed-pieces/html)
(defclass html-piece (typed-piece)
())
(defun make-html-piece (string caret)
(check-type string string)
(check-type caret integer)
(make-instance 'html-piece
:document string
:caret caret))
| null | https://raw.githubusercontent.com/40ants/reblocks-text-editor/b80a3fd75a527c7be8615c19ba4d6e2951c9d3e4/src/typed-pieces/html.lisp | lisp | (uiop:define-package #:reblocks-text-editor/typed-pieces/html
(:use #:cl)
(:import-from #:reblocks-text-editor/typed-pieces/base
#:typed-piece))
(in-package #:reblocks-text-editor/typed-pieces/html)
(defclass html-piece (typed-piece)
())
(defun make-html-piece (string caret)
(check-type string string)
(check-type caret integer)
(make-instance 'html-piece
:document string
:caret caret))
|
|
5d6dc97e43eda693032d367c753fe77eff457ece1fedba54e3ee04d99b2a26a3 | LesBoloss-es/sorting | arrays.ml | open Genlib.Genarray
open Sorting_array
let test_gen ~prep ~cmp ~sort ~gen ~nb ~len =
let rec aux = function
| 0 -> ()
| n ->
let t = prep (gen len) in
let t' = Array.copy t in
Array.stable_sort cmp t';
sort cmp t;
if t <> t' then failwith "test_gen";
aux (n-1)
in
aux nb
let test ~sort ~gen ~nb ~len =
test_gen ~prep:Fun.id ~cmp:Int.compare ~sort ~gen ~nb ~len
let test_stable ~sort ~gen ~nb ~len =
let prep = Array.mapi (fun i x -> (i, x)) in
let cmp = fun (_, x) (_, y) -> Int.compare x y in
test_gen ~prep ~cmp ~sort ~gen ~nb ~len
let failure = ref false
let test_one sorter =
Format.printf "Checking that %s sorts correctly... @?" sorter.name;
try
let nb = 100 in
Format.printf "[0] @?";
test ~sort:sorter.sorter ~gen:gen_unif ~nb ~len:0;
for log2_len = 0 to 5 do
let len = 1 lsl (3 * log2_len) in
Format.printf "[%d] @?" len;
test ~sort:sorter.sorter ~gen:gen_unif ~nb ~len;
test ~sort:sorter.sorter ~gen:(gen_k_runs 5) ~nb ~len
done;
Format.printf "done.@."
with
Failure _ ->
Format.printf "it does NOT!@.";
failure := true
let test_one_stable sorter =
Format.printf "Checking that %s sorts in a stable way... @?" sorter.name;
try
let nb = 20 in
for log2_len = 1 to 5 do
let len = 1 lsl (3 * log2_len) in
Format.printf "[%d] @?" len;
test_stable ~sort:sorter.sorter ~gen:(gen_unif ~limit:(len / 4)) ~nb ~len;
done;
Format.printf "done.@."
with
Failure _ ->
Format.printf "it does NOT!@.";
failure := true
let () =
all_sorters |> List.iter @@ fun sorter ->
test_one sorter;
if sorter.stable then
test_one_stable sorter
| null | https://raw.githubusercontent.com/LesBoloss-es/sorting/40702b8ff99ac001261af424bd7e0834e2a26bf1/tests/arrays.ml | ocaml | open Genlib.Genarray
open Sorting_array
let test_gen ~prep ~cmp ~sort ~gen ~nb ~len =
let rec aux = function
| 0 -> ()
| n ->
let t = prep (gen len) in
let t' = Array.copy t in
Array.stable_sort cmp t';
sort cmp t;
if t <> t' then failwith "test_gen";
aux (n-1)
in
aux nb
let test ~sort ~gen ~nb ~len =
test_gen ~prep:Fun.id ~cmp:Int.compare ~sort ~gen ~nb ~len
let test_stable ~sort ~gen ~nb ~len =
let prep = Array.mapi (fun i x -> (i, x)) in
let cmp = fun (_, x) (_, y) -> Int.compare x y in
test_gen ~prep ~cmp ~sort ~gen ~nb ~len
let failure = ref false
let test_one sorter =
Format.printf "Checking that %s sorts correctly... @?" sorter.name;
try
let nb = 100 in
Format.printf "[0] @?";
test ~sort:sorter.sorter ~gen:gen_unif ~nb ~len:0;
for log2_len = 0 to 5 do
let len = 1 lsl (3 * log2_len) in
Format.printf "[%d] @?" len;
test ~sort:sorter.sorter ~gen:gen_unif ~nb ~len;
test ~sort:sorter.sorter ~gen:(gen_k_runs 5) ~nb ~len
done;
Format.printf "done.@."
with
Failure _ ->
Format.printf "it does NOT!@.";
failure := true
let test_one_stable sorter =
Format.printf "Checking that %s sorts in a stable way... @?" sorter.name;
try
let nb = 20 in
for log2_len = 1 to 5 do
let len = 1 lsl (3 * log2_len) in
Format.printf "[%d] @?" len;
test_stable ~sort:sorter.sorter ~gen:(gen_unif ~limit:(len / 4)) ~nb ~len;
done;
Format.printf "done.@."
with
Failure _ ->
Format.printf "it does NOT!@.";
failure := true
let () =
all_sorters |> List.iter @@ fun sorter ->
test_one sorter;
if sorter.stable then
test_one_stable sorter
|
|
7b0806108014e17a68ae696ec8afc73f6d0b9c74960e1b6dedf83d627419da7e | LexiFi/landmarks | mapper.ml | This file is released under the terms of an MIT - like license .
(* See the attached LICENSE file. *)
Copyright 2016 by LexiFi .
let default_auto, default_remove, default_threads =
match Sys.getenv "OCAML_LANDMARKS" with
| exception Not_found -> false, false, false
| env ->
let opts = String.split_on_char ',' env in
List.mem "auto" opts,
List.mem "remove" opts,
List.mem "threads" opts
let auto = ref default_auto
let remove = ref default_remove
let threads = ref default_threads
open Ppxlib
open Ast_helper
open Asttypes
open Parsetree
open Longident
open Location
let mkloc txt loc =
{ txt; loc }
let mknoloc txt =
mkloc txt !Ast_helper.default_loc
let digest x =
Digest.to_hex (Digest.string (Marshal.to_string x []))
let error loc code =
let open Printf in
let message = function
| `Too_many_attributes -> "too many attributes"
| `Expecting_payload l ->
sprintf "expecting payload in [%s]"
(String.concat "," (List.map (sprintf "\"%s\"") l))
| `Payload_not_a_string -> "payload is not a string"
| `Payload_not_an_expression -> "payload is not an expression"
| `Provide_a_name -> "this landmark annotation requires a name argument"
in
Location.Error.raise (Location.Error.make ~loc ~sub:[]
(Printf.sprintf "ppx_landmark: %s" (message code)))
let landmark_hash = ref ""
let landmark_id = ref 0
let landmarks_to_register = ref []
let has_name key {attr_name = {txt; _}; _} = txt = key
let remove_attribute key =
List.filter (fun x -> not (has_name key x))
let has_attribute ?(auto = false) key l =
if auto || List.exists (has_name key) l then
Some (remove_attribute key l)
else
None
type landmark =
| Constant of string
| Dynamic of Parsetree.expression
let get_payload key = function
{attr_name = {txt; _}; attr_payload = PStr [{pstr_desc = Pstr_eval ({
pexp_desc = Pexp_constant (Pconst_string (x, _, None)); _
}, _); _}]; _} when txt = key ->
Some (Some (Constant x))
| {attr_name = {txt; _}; attr_payload = PStr [{pstr_desc = Pstr_eval (expression, _); _}]; _} when txt = key ->
Some (Some (Dynamic expression))
| {attr_name = {txt; _}; attr_payload = PStr []; _} when txt = key -> Some None
| {attr_name = {txt; _}; attr_loc; _} when txt = key -> error attr_loc `Payload_not_an_expression
| _ -> None
let get_string_payload key ({attr_loc; _} as e) =
match get_payload key e with
| Some None -> Some None
| Some (Some (Constant x)) -> Some (Some x)
| Some (Some (Dynamic _)) -> error attr_loc `Payload_not_a_string
| None -> None
let has_landmark_attribute ?auto = has_attribute ?auto "landmark"
let payload_of_string x =
PStr [Str.eval (Exp.constant (Const.string x))]
let var x = Exp.ident (mknoloc (Longident.parse x))
let rec filter_map f = function
| [] -> []
| hd :: tl ->
match f hd with
| Some x -> x :: (filter_map f tl)
| None -> filter_map f tl
let string_of_loc (l : Location.t) =
let file = l.loc_start.pos_fname in
let line = l.loc_start.pos_lnum in
Printf.sprintf "%s:%d" file line
let enter_landmark lm =
let landmark_enter =
if !threads then "Landmark_threads.enter" else "Landmark.enter"
in
Exp.apply (var landmark_enter) [Nolabel, var lm]
let exit_landmark lm =
let landmark_exit =
if !threads then "Landmark_threads.exit" else "Landmark.exit"
in
Exp.apply (var landmark_exit) [Nolabel, var lm]
let register_landmark ?id name location =
let args = [ Labelled "location", Const.string location |> Exp.constant; Nolabel, name] in
Exp.apply (var "Landmark.register")
(match id with
| None -> args
| Some id -> (Labelled "id", Const.string id |> Exp.constant) :: args)
let register_constant_landmark ?id name location =
register_landmark ?id (Exp.constant (Const.string name)) location
let new_landmark landmark_name loc =
incr landmark_id;
let id = Printf.sprintf "%s_%d" !landmark_hash !landmark_id in
let landmark = "__generated_landmark_" ^ id in
let landmark_location = string_of_loc loc in
landmarks_to_register :=
(landmark, landmark_name, landmark_location, id) :: !landmarks_to_register;
landmark
let qualified ctx name = String.concat "." (List.rev (name :: ctx))
let raise_ident = "Landmark.raise"
let unit = Exp.construct (mknoloc (Longident.parse "()")) None
let wrap_landmark ctx landmark loc expr =
let generate landmark =
Exp.sequence (enter_landmark landmark)
(Exp.let_ Nonrecursive
[Vb.mk (Pat.var (mknoloc "r"))
(Exp.try_ expr
[Exp.case (Pat.var (mknoloc "e"))
(Exp.sequence
(exit_landmark landmark)
(Exp.apply (var raise_ident) [Nolabel, var "e"]))])]
(Exp.sequence
(exit_landmark landmark)
(var "r")))
in
match landmark with
| Constant landmark_name ->
let landmark_name = qualified ctx landmark_name in
let landmark = new_landmark landmark_name loc in
generate landmark
| Dynamic expression ->
let landmark = Printf.sprintf "__dynamic_landmark__%s" !landmark_hash in
(Exp.ifthenelse
(Exp.apply (var "Landmark.profiling") [Nolabel, unit])
(Exp.let_ Nonrecursive
[Vb.mk
(Pat.var
(mknoloc landmark)
)
(register_landmark expression (string_of_loc loc))
]
(generate landmark)
)
(Some expr)
)
let rec arity {pexp_desc; _} =
match pexp_desc with
| Pexp_fun (a, _, _, e) -> a :: arity e
| Pexp_function cases ->
let max_list l1 l2 =
if List.length l1 < List.length l2 then
l1
else
l2
in
Nolabel :: (List.fold_left
(fun acc {pc_rhs; _} -> max_list (arity pc_rhs) acc)
[] cases)
| Pexp_newtype (_, e) -> arity e
| Pexp_constraint (e, _) -> arity e
| Pexp_poly (e, _) -> arity e
| _ -> []
let rec wrap_landmark_method ctx landmark loc ({pexp_desc; _} as expr) =
match pexp_desc with
| Pexp_fun (label, def, pat, e) ->
{ expr with pexp_desc = Pexp_fun (label, def, pat, wrap_landmark_method ctx landmark loc e)}
| Pexp_poly (e, typ) ->
{ expr with pexp_desc = Pexp_poly (wrap_landmark_method ctx landmark loc e, typ)}
| _ -> wrap_landmark ctx landmark loc expr
let eta_expand f t n =
let vars =
List.mapi (fun k x -> (x, Printf.sprintf "__x%d" k)) n
in
let rec app acc = function
| [] -> acc
| (l,x) :: tl -> app (Exp.apply acc [l, Exp.ident (mknoloc (Lident x))]) tl
in
let rec lam = function
| [] -> f (app t vars)
| (l,x) :: tl -> Exp.fun_ l None (Pat.var (mknoloc x)) (lam tl)
in
lam vars
let rec not_a_constant expr = match expr.pexp_desc with
| Pexp_constant _ | Pexp_ident _ -> false
| Pexp_coerce (e, _, _) | Pexp_poly (e, _) | Pexp_constraint (e, _) -> not_a_constant e
| _ -> true
let rec name_of_pattern pat =
match pat.ppat_desc with
| Ppat_var {txt; _} -> Some txt
| Ppat_constraint (pat, _) -> name_of_pattern pat
| _ -> None
let translate_value_bindings ctx value_binding auto vbs =
let vbs_arity_name =
List.map
(fun vb -> match vb, has_landmark_attribute ~auto vb.pvb_attributes with
| { pvb_expr; pvb_loc; pvb_pat; _}, Some attr
when not_a_constant pvb_expr ->
let arity = arity pvb_expr in
let from_names arity fun_name landmark_name =
if auto && arity = [] then
(vb, None)
else
(vb, Some (arity, fun_name, landmark_name, pvb_loc, attr))
in
(match name_of_pattern pvb_pat,
filter_map (get_payload "landmark") vb.pvb_attributes
with
| Some fun_name, []
| Some fun_name, [ None ] ->
from_names arity fun_name (Constant fun_name)
| Some fun_name, [ Some landmark ] ->
from_names arity fun_name landmark
| _, [Some name] -> from_names [] "" name
| _, [None] -> error pvb_loc `Provide_a_name
| _, [] -> (vb, None)
| _, _ :: _ :: _ -> error pvb_loc `Too_many_attributes
)
| _, _ -> (vb, None))
vbs
in
let vbs = List.map (function
| (vb, None) -> value_binding vb
| {pvb_pat; pvb_loc; pvb_expr; _}, Some (arity, _, name, loc, attrs) ->
(* Remove landmark attribute: *)
let vb =
Vb.mk ~attrs ~loc:pvb_loc pvb_pat pvb_expr
|> value_binding
in
if arity = [] then
{ vb with pvb_expr = wrap_landmark ctx name loc vb.pvb_expr}
else
vb) vbs_arity_name
in
let new_vbs = filter_map (function
| (_, Some (_ :: _ as arity, fun_name, landmark_name, loc, _)) ->
let ident = Exp.ident (mknoloc (Lident fun_name)) in
let expr = eta_expand (wrap_landmark ctx landmark_name loc) ident arity in
Some (Vb.mk (Pat.var (mknoloc fun_name)) expr)
| _ -> None) vbs_arity_name
in
vbs, new_vbs
let mapper =
object(self)
inherit [bool * string list] Ast_traverse.fold_map as super
method! module_binding ({pmb_name; _} as binding) ((auto, ctx) as acc) =
let acc =
match pmb_name.txt with
| None -> acc
| Some txt -> auto, txt :: ctx
in
let result, (_, ctx) = super#module_binding binding acc in
result, (auto, ctx)
method! structure l (auto, ctx) =
let _, results =
List.fold_left (fun (auto, acc) expr ->
match expr with
| { pstr_desc = Pstr_attribute attr; pstr_loc; _} as pstr ->
(match get_string_payload "landmark" attr with
| Some (Some "auto") -> true, acc
| Some (Some "auto-off") -> false, acc
| None -> auto, pstr :: acc
| _ -> error pstr_loc (`Expecting_payload ["auto"; "auto-off"]))
| { pstr_desc = Pstr_value (rec_flag, vbs); pstr_loc} ->
let value_binding vb =
fst (self # value_binding vb (auto, ctx))
in
let vbs, new_vbs =
translate_value_bindings ctx value_binding auto vbs
in
let str = Str.value ~loc:pstr_loc rec_flag vbs in
if new_vbs = [] then auto, str :: acc
else
let warning_off =
Str.attribute {attr_name = mknoloc "ocaml.warning"; attr_payload = payload_of_string "-32";
attr_loc = Location.none}
in
let include_wrapper = new_vbs
|> Str.value Nonrecursive
|> fun x -> Mod.structure [warning_off; x]
|> Incl.mk
|> Str.include_
in
auto, include_wrapper :: str :: acc
| sti ->
let sti, _ = super # structure_item sti (auto, ctx) in
auto, sti :: acc) (auto, []) l
in
List.rev results, (auto, ctx)
method! class_field class_field ((auto, ctx) as acc) =
match class_field with
| { pcf_desc = Pcf_method (loc, privat, Cfk_concrete (flag, expr)); pcf_loc; pcf_attributes; _ } ->
begin
let landmark =
match filter_map (get_payload "landmark") pcf_attributes, auto with
| [Some landmark_name], _ -> Some landmark_name
| [None], _ | _, true -> Some (Constant loc.txt)
| [], false -> None
| _ :: _ :: _, _ -> error pcf_loc `Too_many_attributes
in
match landmark with
| None ->
super # class_field class_field acc
| Some landmark ->
let expr =
wrap_landmark_method ctx landmark pcf_loc (fst (self # expression expr acc))
in
{ class_field with
pcf_desc = Pcf_method (loc, privat, Cfk_concrete (flag, expr));
pcf_attributes = remove_attribute "landmark" pcf_attributes
}, acc
end
| _ -> super # class_field class_field acc
method! class_expr class_expr ((_, ctx) as acc) =
match class_expr with
| {pcl_desc = Pcl_let (rec_flag, vbs, body); _} ->
let vbs, new_vbs =
let value_binding vb =
fst (self # value_binding vb acc)
in
translate_value_bindings ctx value_binding false vbs
in
let body, _ = self # class_expr body acc in
let body =
if new_vbs = [] then
body
else
Cl.let_ Nonrecursive new_vbs body
in
{ class_expr with pcl_desc = Pcl_let (rec_flag, vbs, body) }, acc
| _ -> super # class_expr class_expr acc
method! expression expr ((_, ctx) as acc) =
let expr = match expr with
| ({pexp_desc = Pexp_let (rec_flag, vbs, body); _} as expr) ->
let vbs, new_vbs =
let value_binding vb =
fst (self # value_binding vb acc)
in
translate_value_bindings ctx value_binding false vbs
in
let body = fst (self # expression body acc) in
let body =
if new_vbs = [] then
body
else
Exp.let_ Nonrecursive new_vbs body
in
{ expr with pexp_desc = Pexp_let (rec_flag, vbs, body) }
| expr -> fst (super # expression expr acc)
in
let {pexp_attributes; pexp_loc; _} = expr in
match filter_map (get_payload "landmark") pexp_attributes with
| [Some landmark_name] ->
{ expr with pexp_attributes =
remove_attribute "landmark" pexp_attributes }
|> wrap_landmark ctx landmark_name pexp_loc, acc
| [ None ] -> error pexp_loc `Provide_a_name
| [] -> expr, acc
| _ -> error pexp_loc `Too_many_attributes
end
let remove_attributes =
object
inherit Ast_traverse.map as super
method! structure l =
let l =
List.filter (function {pstr_desc = Pstr_attribute attr; _ }
when has_landmark_attribute [attr] <> None -> false | _ -> true) l
in
super # structure l
method! attributes attributes =
super # attributes
(match has_landmark_attribute attributes with
| Some attrs ->
attrs
| None ->
attributes)
end
let has_disable l =
let disable = ref false in
let f = function
| { pstr_desc = Pstr_attribute attr; pstr_loc; _} as pstr ->
(match get_string_payload "landmark" attr with
| Some (Some "disable") -> disable := true; None
| Some (Some "auto-off") | Some (Some "auto") | None -> Some pstr
| _ -> error pstr_loc
(`Expecting_payload ["auto"; "auto-off"; "disable"]))
| i -> Some i
in
let res = filter_map f l in
!disable, res
let toplevel_mapper auto =
object
inherit Ast_traverse.map
method! signature si = si
method! structure l =
match l with [] -> [] | l ->
assert (!landmark_hash = "");
landmark_hash := digest l;
let disable, l = has_disable l in
if disable then l else begin
let first_loc = (List.hd l).pstr_loc in
let module_name = Filename.remove_extension (Filename.basename !Ocaml_common.Location.input_name) in
let ctx = [String.capitalize_ascii module_name] in
let l, _ = mapper # structure l (auto, ctx) in
let landmark_name = Printf.sprintf "load(%s)" module_name in
let lm =
if auto then
Some (new_landmark landmark_name first_loc)
else
None
in
if !landmarks_to_register = [] then l else
let landmarks =
Str.value Nonrecursive
(List.map (fun (landmark, landmark_name, landmark_location, id) ->
Vb.mk (Pat.var (mknoloc landmark))
(register_constant_landmark ~id landmark_name landmark_location))
(List.rev !landmarks_to_register))
in
match lm with
| Some lm ->
let begin_load =
Str.value Nonrecursive
[Vb.mk (Pat.construct (mknoloc (Longident.parse "()")) None)
(enter_landmark lm)]
in
let exit_load =
Str.value Nonrecursive
[Vb.mk (Pat.construct (mknoloc (Longident.parse "()")) None)
(exit_landmark lm)]
in
landmarks :: (begin_load :: l @ [exit_load])
| None ->
landmarks :: l
end
end
| null | https://raw.githubusercontent.com/LexiFi/landmarks/0c1162ecaf1360d0b183cd7229907f50a98567d3/ppx/mapper.ml | ocaml | See the attached LICENSE file.
Remove landmark attribute: | This file is released under the terms of an MIT - like license .
Copyright 2016 by LexiFi .
let default_auto, default_remove, default_threads =
match Sys.getenv "OCAML_LANDMARKS" with
| exception Not_found -> false, false, false
| env ->
let opts = String.split_on_char ',' env in
List.mem "auto" opts,
List.mem "remove" opts,
List.mem "threads" opts
let auto = ref default_auto
let remove = ref default_remove
let threads = ref default_threads
open Ppxlib
open Ast_helper
open Asttypes
open Parsetree
open Longident
open Location
let mkloc txt loc =
{ txt; loc }
let mknoloc txt =
mkloc txt !Ast_helper.default_loc
let digest x =
Digest.to_hex (Digest.string (Marshal.to_string x []))
let error loc code =
let open Printf in
let message = function
| `Too_many_attributes -> "too many attributes"
| `Expecting_payload l ->
sprintf "expecting payload in [%s]"
(String.concat "," (List.map (sprintf "\"%s\"") l))
| `Payload_not_a_string -> "payload is not a string"
| `Payload_not_an_expression -> "payload is not an expression"
| `Provide_a_name -> "this landmark annotation requires a name argument"
in
Location.Error.raise (Location.Error.make ~loc ~sub:[]
(Printf.sprintf "ppx_landmark: %s" (message code)))
let landmark_hash = ref ""
let landmark_id = ref 0
let landmarks_to_register = ref []
let has_name key {attr_name = {txt; _}; _} = txt = key
let remove_attribute key =
List.filter (fun x -> not (has_name key x))
let has_attribute ?(auto = false) key l =
if auto || List.exists (has_name key) l then
Some (remove_attribute key l)
else
None
type landmark =
| Constant of string
| Dynamic of Parsetree.expression
let get_payload key = function
{attr_name = {txt; _}; attr_payload = PStr [{pstr_desc = Pstr_eval ({
pexp_desc = Pexp_constant (Pconst_string (x, _, None)); _
}, _); _}]; _} when txt = key ->
Some (Some (Constant x))
| {attr_name = {txt; _}; attr_payload = PStr [{pstr_desc = Pstr_eval (expression, _); _}]; _} when txt = key ->
Some (Some (Dynamic expression))
| {attr_name = {txt; _}; attr_payload = PStr []; _} when txt = key -> Some None
| {attr_name = {txt; _}; attr_loc; _} when txt = key -> error attr_loc `Payload_not_an_expression
| _ -> None
let get_string_payload key ({attr_loc; _} as e) =
match get_payload key e with
| Some None -> Some None
| Some (Some (Constant x)) -> Some (Some x)
| Some (Some (Dynamic _)) -> error attr_loc `Payload_not_a_string
| None -> None
let has_landmark_attribute ?auto = has_attribute ?auto "landmark"
let payload_of_string x =
PStr [Str.eval (Exp.constant (Const.string x))]
let var x = Exp.ident (mknoloc (Longident.parse x))
let rec filter_map f = function
| [] -> []
| hd :: tl ->
match f hd with
| Some x -> x :: (filter_map f tl)
| None -> filter_map f tl
let string_of_loc (l : Location.t) =
let file = l.loc_start.pos_fname in
let line = l.loc_start.pos_lnum in
Printf.sprintf "%s:%d" file line
let enter_landmark lm =
let landmark_enter =
if !threads then "Landmark_threads.enter" else "Landmark.enter"
in
Exp.apply (var landmark_enter) [Nolabel, var lm]
let exit_landmark lm =
let landmark_exit =
if !threads then "Landmark_threads.exit" else "Landmark.exit"
in
Exp.apply (var landmark_exit) [Nolabel, var lm]
let register_landmark ?id name location =
let args = [ Labelled "location", Const.string location |> Exp.constant; Nolabel, name] in
Exp.apply (var "Landmark.register")
(match id with
| None -> args
| Some id -> (Labelled "id", Const.string id |> Exp.constant) :: args)
let register_constant_landmark ?id name location =
register_landmark ?id (Exp.constant (Const.string name)) location
let new_landmark landmark_name loc =
incr landmark_id;
let id = Printf.sprintf "%s_%d" !landmark_hash !landmark_id in
let landmark = "__generated_landmark_" ^ id in
let landmark_location = string_of_loc loc in
landmarks_to_register :=
(landmark, landmark_name, landmark_location, id) :: !landmarks_to_register;
landmark
let qualified ctx name = String.concat "." (List.rev (name :: ctx))
let raise_ident = "Landmark.raise"
let unit = Exp.construct (mknoloc (Longident.parse "()")) None
let wrap_landmark ctx landmark loc expr =
let generate landmark =
Exp.sequence (enter_landmark landmark)
(Exp.let_ Nonrecursive
[Vb.mk (Pat.var (mknoloc "r"))
(Exp.try_ expr
[Exp.case (Pat.var (mknoloc "e"))
(Exp.sequence
(exit_landmark landmark)
(Exp.apply (var raise_ident) [Nolabel, var "e"]))])]
(Exp.sequence
(exit_landmark landmark)
(var "r")))
in
match landmark with
| Constant landmark_name ->
let landmark_name = qualified ctx landmark_name in
let landmark = new_landmark landmark_name loc in
generate landmark
| Dynamic expression ->
let landmark = Printf.sprintf "__dynamic_landmark__%s" !landmark_hash in
(Exp.ifthenelse
(Exp.apply (var "Landmark.profiling") [Nolabel, unit])
(Exp.let_ Nonrecursive
[Vb.mk
(Pat.var
(mknoloc landmark)
)
(register_landmark expression (string_of_loc loc))
]
(generate landmark)
)
(Some expr)
)
let rec arity {pexp_desc; _} =
match pexp_desc with
| Pexp_fun (a, _, _, e) -> a :: arity e
| Pexp_function cases ->
let max_list l1 l2 =
if List.length l1 < List.length l2 then
l1
else
l2
in
Nolabel :: (List.fold_left
(fun acc {pc_rhs; _} -> max_list (arity pc_rhs) acc)
[] cases)
| Pexp_newtype (_, e) -> arity e
| Pexp_constraint (e, _) -> arity e
| Pexp_poly (e, _) -> arity e
| _ -> []
let rec wrap_landmark_method ctx landmark loc ({pexp_desc; _} as expr) =
match pexp_desc with
| Pexp_fun (label, def, pat, e) ->
{ expr with pexp_desc = Pexp_fun (label, def, pat, wrap_landmark_method ctx landmark loc e)}
| Pexp_poly (e, typ) ->
{ expr with pexp_desc = Pexp_poly (wrap_landmark_method ctx landmark loc e, typ)}
| _ -> wrap_landmark ctx landmark loc expr
let eta_expand f t n =
let vars =
List.mapi (fun k x -> (x, Printf.sprintf "__x%d" k)) n
in
let rec app acc = function
| [] -> acc
| (l,x) :: tl -> app (Exp.apply acc [l, Exp.ident (mknoloc (Lident x))]) tl
in
let rec lam = function
| [] -> f (app t vars)
| (l,x) :: tl -> Exp.fun_ l None (Pat.var (mknoloc x)) (lam tl)
in
lam vars
let rec not_a_constant expr = match expr.pexp_desc with
| Pexp_constant _ | Pexp_ident _ -> false
| Pexp_coerce (e, _, _) | Pexp_poly (e, _) | Pexp_constraint (e, _) -> not_a_constant e
| _ -> true
let rec name_of_pattern pat =
match pat.ppat_desc with
| Ppat_var {txt; _} -> Some txt
| Ppat_constraint (pat, _) -> name_of_pattern pat
| _ -> None
let translate_value_bindings ctx value_binding auto vbs =
let vbs_arity_name =
List.map
(fun vb -> match vb, has_landmark_attribute ~auto vb.pvb_attributes with
| { pvb_expr; pvb_loc; pvb_pat; _}, Some attr
when not_a_constant pvb_expr ->
let arity = arity pvb_expr in
let from_names arity fun_name landmark_name =
if auto && arity = [] then
(vb, None)
else
(vb, Some (arity, fun_name, landmark_name, pvb_loc, attr))
in
(match name_of_pattern pvb_pat,
filter_map (get_payload "landmark") vb.pvb_attributes
with
| Some fun_name, []
| Some fun_name, [ None ] ->
from_names arity fun_name (Constant fun_name)
| Some fun_name, [ Some landmark ] ->
from_names arity fun_name landmark
| _, [Some name] -> from_names [] "" name
| _, [None] -> error pvb_loc `Provide_a_name
| _, [] -> (vb, None)
| _, _ :: _ :: _ -> error pvb_loc `Too_many_attributes
)
| _, _ -> (vb, None))
vbs
in
let vbs = List.map (function
| (vb, None) -> value_binding vb
| {pvb_pat; pvb_loc; pvb_expr; _}, Some (arity, _, name, loc, attrs) ->
let vb =
Vb.mk ~attrs ~loc:pvb_loc pvb_pat pvb_expr
|> value_binding
in
if arity = [] then
{ vb with pvb_expr = wrap_landmark ctx name loc vb.pvb_expr}
else
vb) vbs_arity_name
in
let new_vbs = filter_map (function
| (_, Some (_ :: _ as arity, fun_name, landmark_name, loc, _)) ->
let ident = Exp.ident (mknoloc (Lident fun_name)) in
let expr = eta_expand (wrap_landmark ctx landmark_name loc) ident arity in
Some (Vb.mk (Pat.var (mknoloc fun_name)) expr)
| _ -> None) vbs_arity_name
in
vbs, new_vbs
let mapper =
object(self)
inherit [bool * string list] Ast_traverse.fold_map as super
method! module_binding ({pmb_name; _} as binding) ((auto, ctx) as acc) =
let acc =
match pmb_name.txt with
| None -> acc
| Some txt -> auto, txt :: ctx
in
let result, (_, ctx) = super#module_binding binding acc in
result, (auto, ctx)
method! structure l (auto, ctx) =
let _, results =
List.fold_left (fun (auto, acc) expr ->
match expr with
| { pstr_desc = Pstr_attribute attr; pstr_loc; _} as pstr ->
(match get_string_payload "landmark" attr with
| Some (Some "auto") -> true, acc
| Some (Some "auto-off") -> false, acc
| None -> auto, pstr :: acc
| _ -> error pstr_loc (`Expecting_payload ["auto"; "auto-off"]))
| { pstr_desc = Pstr_value (rec_flag, vbs); pstr_loc} ->
let value_binding vb =
fst (self # value_binding vb (auto, ctx))
in
let vbs, new_vbs =
translate_value_bindings ctx value_binding auto vbs
in
let str = Str.value ~loc:pstr_loc rec_flag vbs in
if new_vbs = [] then auto, str :: acc
else
let warning_off =
Str.attribute {attr_name = mknoloc "ocaml.warning"; attr_payload = payload_of_string "-32";
attr_loc = Location.none}
in
let include_wrapper = new_vbs
|> Str.value Nonrecursive
|> fun x -> Mod.structure [warning_off; x]
|> Incl.mk
|> Str.include_
in
auto, include_wrapper :: str :: acc
| sti ->
let sti, _ = super # structure_item sti (auto, ctx) in
auto, sti :: acc) (auto, []) l
in
List.rev results, (auto, ctx)
method! class_field class_field ((auto, ctx) as acc) =
match class_field with
| { pcf_desc = Pcf_method (loc, privat, Cfk_concrete (flag, expr)); pcf_loc; pcf_attributes; _ } ->
begin
let landmark =
match filter_map (get_payload "landmark") pcf_attributes, auto with
| [Some landmark_name], _ -> Some landmark_name
| [None], _ | _, true -> Some (Constant loc.txt)
| [], false -> None
| _ :: _ :: _, _ -> error pcf_loc `Too_many_attributes
in
match landmark with
| None ->
super # class_field class_field acc
| Some landmark ->
let expr =
wrap_landmark_method ctx landmark pcf_loc (fst (self # expression expr acc))
in
{ class_field with
pcf_desc = Pcf_method (loc, privat, Cfk_concrete (flag, expr));
pcf_attributes = remove_attribute "landmark" pcf_attributes
}, acc
end
| _ -> super # class_field class_field acc
method! class_expr class_expr ((_, ctx) as acc) =
match class_expr with
| {pcl_desc = Pcl_let (rec_flag, vbs, body); _} ->
let vbs, new_vbs =
let value_binding vb =
fst (self # value_binding vb acc)
in
translate_value_bindings ctx value_binding false vbs
in
let body, _ = self # class_expr body acc in
let body =
if new_vbs = [] then
body
else
Cl.let_ Nonrecursive new_vbs body
in
{ class_expr with pcl_desc = Pcl_let (rec_flag, vbs, body) }, acc
| _ -> super # class_expr class_expr acc
method! expression expr ((_, ctx) as acc) =
let expr = match expr with
| ({pexp_desc = Pexp_let (rec_flag, vbs, body); _} as expr) ->
let vbs, new_vbs =
let value_binding vb =
fst (self # value_binding vb acc)
in
translate_value_bindings ctx value_binding false vbs
in
let body = fst (self # expression body acc) in
let body =
if new_vbs = [] then
body
else
Exp.let_ Nonrecursive new_vbs body
in
{ expr with pexp_desc = Pexp_let (rec_flag, vbs, body) }
| expr -> fst (super # expression expr acc)
in
let {pexp_attributes; pexp_loc; _} = expr in
match filter_map (get_payload "landmark") pexp_attributes with
| [Some landmark_name] ->
{ expr with pexp_attributes =
remove_attribute "landmark" pexp_attributes }
|> wrap_landmark ctx landmark_name pexp_loc, acc
| [ None ] -> error pexp_loc `Provide_a_name
| [] -> expr, acc
| _ -> error pexp_loc `Too_many_attributes
end
let remove_attributes =
object
inherit Ast_traverse.map as super
method! structure l =
let l =
List.filter (function {pstr_desc = Pstr_attribute attr; _ }
when has_landmark_attribute [attr] <> None -> false | _ -> true) l
in
super # structure l
method! attributes attributes =
super # attributes
(match has_landmark_attribute attributes with
| Some attrs ->
attrs
| None ->
attributes)
end
let has_disable l =
let disable = ref false in
let f = function
| { pstr_desc = Pstr_attribute attr; pstr_loc; _} as pstr ->
(match get_string_payload "landmark" attr with
| Some (Some "disable") -> disable := true; None
| Some (Some "auto-off") | Some (Some "auto") | None -> Some pstr
| _ -> error pstr_loc
(`Expecting_payload ["auto"; "auto-off"; "disable"]))
| i -> Some i
in
let res = filter_map f l in
!disable, res
let toplevel_mapper auto =
object
inherit Ast_traverse.map
method! signature si = si
method! structure l =
match l with [] -> [] | l ->
assert (!landmark_hash = "");
landmark_hash := digest l;
let disable, l = has_disable l in
if disable then l else begin
let first_loc = (List.hd l).pstr_loc in
let module_name = Filename.remove_extension (Filename.basename !Ocaml_common.Location.input_name) in
let ctx = [String.capitalize_ascii module_name] in
let l, _ = mapper # structure l (auto, ctx) in
let landmark_name = Printf.sprintf "load(%s)" module_name in
let lm =
if auto then
Some (new_landmark landmark_name first_loc)
else
None
in
if !landmarks_to_register = [] then l else
let landmarks =
Str.value Nonrecursive
(List.map (fun (landmark, landmark_name, landmark_location, id) ->
Vb.mk (Pat.var (mknoloc landmark))
(register_constant_landmark ~id landmark_name landmark_location))
(List.rev !landmarks_to_register))
in
match lm with
| Some lm ->
let begin_load =
Str.value Nonrecursive
[Vb.mk (Pat.construct (mknoloc (Longident.parse "()")) None)
(enter_landmark lm)]
in
let exit_load =
Str.value Nonrecursive
[Vb.mk (Pat.construct (mknoloc (Longident.parse "()")) None)
(exit_landmark lm)]
in
landmarks :: (begin_load :: l @ [exit_load])
| None ->
landmarks :: l
end
end
|
e9d5ebd69bcccc58c002356c21891c4f7f7fb1b07ff956b25fc01901e023995a | d-cent/objective8 | launch.clj | (ns dev-helpers.launch
(:require [org.httpkit.server :as server]
[clojure.tools.logging :as log]
[dev-helpers.profiling :as profiling]
[objective8.core :as core]
[objective8.config :as config]
[objective8.back-end.storage.database :as db]))
;; Launching / relaunching / loading
(defonce the-system nil)
(defn- start-back-end-server [system]
(let [api-port (:api-port system)
server (server/run-server (core/back-end-handler) {:port api-port :thread 4})]
(prn "Starting api server on port: " api-port)
(assoc system :back-end-server server)))
(defn- stop-back-end-server [system]
(when-let [srv (:back-end-server system)]
(srv))
(dissoc system :back-end-server))
(defn- start-front-end-server [system]
(let [conf (:config system)
front-end-port (:front-end-port system)
server (server/run-server (core/front-end-handler conf) {:port front-end-port :thread 4})]
(prn "Starting front-end server on port: " front-end-port)
(assoc system :front-end-server server)))
(defn- stop-front-end-server [system]
(when-let [srv (:front-end-server system)]
(srv))
(dissoc system :front-end-server))
(defn- init
([system]
(init system {:app-config core/app-config}))
([system {:keys [app-config profile?] :as conf}]
(let [db-connection (db/connect!)]
(core/initialise-api)
(assoc system
:config app-config
:profiling profile?
:front-end-port (:front-end-port config/environment)
:api-port (:api-port config/environment)
:db-connection db-connection))))
(defn- instrument [system]
(when (:profiling system)
(profiling/instrument (:profiling system)))
system)
(defn- clear-profiling [system]
(when (:profiling system)
(profiling/clear (:profiling system)))
system)
(defn- make-launcher [config-name launcher-config]
(fn []
(alter-var-root #'the-system #(-> %
(init launcher-config)
instrument
start-front-end-server
start-back-end-server))
(log/info (str "Objective8 started\nfront-end on port: " (:front-end-port the-system)
"\napi on port:" (:api-port the-system)
" in configuration " config-name))))
(defn stop []
(alter-var-root #'the-system #(-> %
stop-back-end-server
stop-front-end-server
clear-profiling))
(log/info "Objective8 server stopped."))
(defn make-launcher-map [configs]
(doall
(apply merge
(for [[config-kwd config] configs]
(let [config-name (name config-kwd)
launcher-name (str "start-" config-name)]
(intern *ns*
(symbol launcher-name)
(make-launcher config-name config))
{config-kwd (symbol (str "user/" launcher-name))})))))
| null | https://raw.githubusercontent.com/d-cent/objective8/db8344ba4425ca0b38a31c99a3b282d7c8ddaef0/dev/dev_helpers/launch.clj | clojure | Launching / relaunching / loading | (ns dev-helpers.launch
(:require [org.httpkit.server :as server]
[clojure.tools.logging :as log]
[dev-helpers.profiling :as profiling]
[objective8.core :as core]
[objective8.config :as config]
[objective8.back-end.storage.database :as db]))
(defonce the-system nil)
(defn- start-back-end-server [system]
(let [api-port (:api-port system)
server (server/run-server (core/back-end-handler) {:port api-port :thread 4})]
(prn "Starting api server on port: " api-port)
(assoc system :back-end-server server)))
(defn- stop-back-end-server [system]
(when-let [srv (:back-end-server system)]
(srv))
(dissoc system :back-end-server))
(defn- start-front-end-server [system]
(let [conf (:config system)
front-end-port (:front-end-port system)
server (server/run-server (core/front-end-handler conf) {:port front-end-port :thread 4})]
(prn "Starting front-end server on port: " front-end-port)
(assoc system :front-end-server server)))
(defn- stop-front-end-server [system]
(when-let [srv (:front-end-server system)]
(srv))
(dissoc system :front-end-server))
(defn- init
([system]
(init system {:app-config core/app-config}))
([system {:keys [app-config profile?] :as conf}]
(let [db-connection (db/connect!)]
(core/initialise-api)
(assoc system
:config app-config
:profiling profile?
:front-end-port (:front-end-port config/environment)
:api-port (:api-port config/environment)
:db-connection db-connection))))
(defn- instrument [system]
(when (:profiling system)
(profiling/instrument (:profiling system)))
system)
(defn- clear-profiling [system]
(when (:profiling system)
(profiling/clear (:profiling system)))
system)
(defn- make-launcher [config-name launcher-config]
(fn []
(alter-var-root #'the-system #(-> %
(init launcher-config)
instrument
start-front-end-server
start-back-end-server))
(log/info (str "Objective8 started\nfront-end on port: " (:front-end-port the-system)
"\napi on port:" (:api-port the-system)
" in configuration " config-name))))
(defn stop []
(alter-var-root #'the-system #(-> %
stop-back-end-server
stop-front-end-server
clear-profiling))
(log/info "Objective8 server stopped."))
(defn make-launcher-map [configs]
(doall
(apply merge
(for [[config-kwd config] configs]
(let [config-name (name config-kwd)
launcher-name (str "start-" config-name)]
(intern *ns*
(symbol launcher-name)
(make-launcher config-name config))
{config-kwd (symbol (str "user/" launcher-name))})))))
|
f3ec027871bfae3164b6b239382aae10383ec345ac31bbb4238a0a76539ca3db | typedclojure/typedclojure | deprecated_wrapper_macros.clj | (in-ns 'clojure.core.typed)
(defmacro ^:deprecated doseq
"DEPRECATED: Use clojure.core/doseq.
Like clojure.core/doseq with optional annotations.
:let option uses clojure.core.typed/let
eg.
(doseq [a :- (U nil AnyInteger) [1 nil 2 3]
:when a]
(inc a))"
[seq-exprs & body]
(@#'core/assert-args
(vector? seq-exprs) "a vector for its binding"
(even? (count seq-exprs)) "an even number of forms in binding vector")
(core/let
[normalise-args
; change [a :- b c] to [[a :- b] c]
(core/fn [seq-exprs]
(core/loop [flat-result []
seq-exprs seq-exprs]
(cond
(empty? seq-exprs) flat-result
;for options (:let, :while etc)
(keyword? (first seq-exprs)) (core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for option missing " (first seq-exprs)))
[k v & rst] seq-exprs]
(recur (conj flat-result k v)
rst))
:else (if (#{:-} (second seq-exprs))
(core/let
[_ (assert (#{4} (count (take 4 seq-exprs)))
(str "for parameter missing after ':-'"))
[b colon t init & rst] seq-exprs]
(recur (conj flat-result [b colon t] init)
rst))
(core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for binding needs initial values"))
[b init & rst] seq-exprs]
(recur (conj flat-result [b :- `Any] init)
rst))))))
; normalise seq-exprs to be flat pairs
seq-exprs (normalise-args seq-exprs)
step (core/fn step [recform exprs]
(if-not exprs
[true `(do ~@body)]
(core/let
[k (first exprs)
v (second exprs)]
(if (keyword? k)
(core/let
[steppair (step recform (nnext exprs))
needrec (steppair 0)
subform (steppair 1)]
(cond
;typed let
(= k :let) [needrec `(let ~v ~subform)]
(= k :while) [false `(when ~v
~subform
~@(when needrec [recform]))]
(= k :when) [false `(if ~v
(do
~subform
~@(when needrec [recform]))
~recform)]))
;; k is [k :- k-ann]
(core/let
[_ (assert (and (vector? k)
(#{3} (count k))
(#{:-} (second k)))
"Binder must be of the form [lhs :- type]")
k-ann (nth k 2)
k (nth k 0)
k is the lhs binding
seq- (gensym "seq_")
chunk- (with-meta (gensym "chunk_")
{:tag 'clojure.lang.IChunk})
count- (gensym "count_")
i- (gensym "i_")
recform `(recur (next ~seq-) nil 0 0)
steppair (step recform (nnext exprs))
needrec (steppair 0)
subform (steppair 1)
recform-chunk
`(recur ~seq- ~chunk- ~count- (unchecked-inc ~i-))
steppair-chunk (step recform-chunk (nnext exprs))
subform-chunk (steppair-chunk 1)]
[true
`(loop [~seq- :- (U nil (Seq ~k-ann)) (seq ~v),
~chunk- :- (U nil (clojure.lang.IChunk ~k-ann)) nil
~count- :- Int 0,
~i- :- Int 0]
(if (and (< ~i- ~count-)
FIXME review this
;; core.typed thinks chunk- could be nil here
~chunk-)
(core/let
[;~k (.nth ~chunk- ~i-)
~k (nth ~chunk- ~i-)]
~subform-chunk
~@(when needrec [recform-chunk]))
(when-let [~seq- (seq ~seq-)]
(if (chunked-seq? ~seq-)
(core/let [c# (chunk-first ~seq-)]
(recur (chunk-rest ~seq-) c#
(int (count c#)) (int 0)))
(core/let [~k (first ~seq-)]
~subform
~@(when needrec [recform]))))))])))))]
(nth (step nil (seq seq-exprs)) 1)))
(defmacro ^:deprecated for
"DEPRECATED: Use clojure.core/for.
Like clojure.core/for with optional type annotations.
All types default to Any.
The :let option uses clojure.core.typed/let.
eg. (for [a :- (U nil Int) [1 nil 2 3]
:when a]
:- Number
(inc a))
Metadata using the :clojure.core.typed/ann keyword
can also be used for annotation.
eg. (for ^{::ann Number}
[^{::ann (U nil Int)} a [1 nil 2 3]
:when a]
(inc a))
"
[seq-exprs & maybe-ann-body-expr]
(@#'core/assert-args
(vector? seq-exprs) "a vector for its binding"
(even? (count seq-exprs)) "an even number of forms in binding vector")
(core/let
[orig-seq-exprs seq-exprs
has-explicit-return-type? (#{:-} (first maybe-ann-body-expr))
[ret-ann body-expr] (if has-explicit-return-type?
(core/let
[_ (assert (#{3} (count maybe-ann-body-expr))
(str "Wrong arguments to for: " maybe-ann-body-expr))
[colon t body] maybe-ann-body-expr]
[t body])
(core/let
[_ (assert (#{1} (count maybe-ann-body-expr))
(str "Wrong arguments to for: " maybe-ann-body-expr))
[body] maybe-ann-body-expr]
[`Any body]))
ret-ann (if-let [[_ meta-ann] (find (meta seq-exprs) ::ann)]
(do (assert (not has-explicit-return-type?)
"Cannot mix explicit and metadata return type in for.")
meta-ann)
ret-ann)
_ ( prn " ret - ann " ret - ann )
normalise-args
; change [a :- b c] to [[a :- b] c]
(core/fn [seq-exprs]
(core/loop [flat-result []
seq-exprs seq-exprs]
(cond
(empty? seq-exprs) flat-result
;for options (:let, :while etc)
(keyword? (first seq-exprs)) (core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for option missing " (first seq-exprs)))
[k v & rst] seq-exprs]
(recur (conj flat-result k v)
rst))
:else (core/let
[[meta-ann has-meta-ann?]
(when-let [[_ meta-ann] (find (meta (first seq-exprs)) ::ann)]
[meta-ann true])]
(if (#{:-} (second seq-exprs))
(core/let
[_ (assert (#{4} (count (take 4 seq-exprs)))
(str "for parameter missing after ':-'"))
[b colon t init & rst] seq-exprs]
(assert (not meta-ann)
"Cannot mix metadata annotation and explicit annotation in for.")
(recur (conj flat-result [b colon t] init)
rst))
(core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for binding needs initial values"))
[b init & rst] seq-exprs
ann (if has-meta-ann? meta-ann `Any)]
(recur (conj flat-result [b :- ann] init)
rst)))))))
; normalise seq-exprs to be flat pairs
seq-exprs (normalise-args seq-exprs)
to-groups (core/fn [seq-exprs]
(reduce (core/fn [groups [k v]]
(if (keyword? k)
(conj (pop groups) (conj (peek groups) [k v]))
(conj groups [k v])))
[] (partition 2 seq-exprs)))
err (core/fn [& msg] (throw (IllegalArgumentException. ^String (apply str msg))))
emit-bind (core/fn emit-bind [[[bind expr & mod-pairs]
& [[_ next-expr] :as next-groups]]]
(core/let
[_ (assert (and (vector? bind)
(#{3} (count bind))
(#{:-} (second bind)))
"Binder must be of the form [lhs :- type]")
bind-ann (nth bind 2)
bind (nth bind 0)
giter (gensym "iter__")
gxs (gensym "s__")
do-mod (core/fn do-mod [[[k v :as pair] & etc]]
(cond
;typed let
(= k :let) `(let ~v ~(do-mod etc))
(= k :while) `(when ~v ~(do-mod etc))
(= k :when) `(if ~v
~(do-mod etc)
(recur (rest ~gxs)))
(keyword? k) (err "Invalid 'for' keyword " k)
next-groups
`(core/let
[iterys# ~(emit-bind next-groups)
fs# (seq (iterys# ~next-expr))]
(if fs#
(concat fs# (~giter (rest ~gxs)))
(recur (rest ~gxs))))
:else `(cons (ann-form ~body-expr ~ret-ann) ;; ann-form for better error messages
(~giter (rest ~gxs)))))]
(if next-groups
#_"not the inner-most loop"
`(fn ~giter
[~gxs :- (Option (Seqable ~bind-ann))]
:- (Seq ~ret-ann)
(lazy-seq
(map (fn [t# :- ~ret-ann] :- ~ret-ann
(core/let
[^{::auto-ann ~(meta orig-seq-exprs)
::track-kind ::for-return}
t# t#]
;(prn "tracked t#" t#)
t#))
(loop [~gxs :- (Option (Seqable ~bind-ann)) ~gxs]
(when-let [xs# (seq ~gxs)]
(core/let
[^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (first xs#)
;_# (prn "for param x#" x#)
~bind x#]
~(do-mod mod-pairs)))))))
#_"inner-most loop"
(core/let
[gi (gensym "i__")
gb (gensym "b__")
do-cmod (core/fn do-cmod [[[k v :as pair] & etc]]
(cond
; typed let
(= k :let) `(let ~v ~(do-cmod etc))
(= k :while) `(when ~v ~(do-cmod etc))
(= k :when) `(if ~v
~(do-cmod etc)
(recur
(unchecked-inc ~gi)))
(keyword? k)
(err "Invalid 'for' keyword " k)
:else
`(do (chunk-append ~gb
; put an ann-form here so at least one error message
; points to code the user can recognise.
(ann-form ~body-expr
~ret-ann))
(recur (unchecked-inc ~gi)))))]
`(fn ~giter [~gxs :- (Option (Seqable ~bind-ann))]
:- (Seq ~ret-ann)
(lazy-seq
(map (fn [t# :- ~ret-ann] :- ~ret-ann
(core/let
[^{::auto-ann ~(meta orig-seq-exprs)
::track-kind ::for-return}
t# t#]
t#))
(loop [~gxs :- (Option (Seqable ~bind-ann)) ~gxs]
(when-let [~gxs (seq ~gxs)]
(if (chunked-seq? ~gxs)
(core/let
[c# (chunk-first ~gxs)
size# (int (count c#))
~gb (ann-form (chunk-buffer size#)
(~'clojure.lang.ChunkBuffer ~ret-ann))]
(if (loop [~gi :- Int, (int 0)]
(if (< ~gi size#)
(core/let
[;~bind (.nth c# ~gi)]
^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (nth c# ~gi)
~bind x#]
~(do-cmod mod-pairs))
true))
(chunk-cons
(chunk ~gb)
(~giter (chunk-rest ~gxs)))
(chunk-cons (chunk ~gb) nil)))
(core/let
[^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (first ~gxs)
;_# (prn "for param x#" x#)
~bind x#]
~(do-mod mod-pairs))))))))))))]
`(core/let [iter# ~(emit-bind (to-groups seq-exprs))]
(iter# ~(second seq-exprs)))))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/c7be1ddb61eb27c524078da6c3673a3dd246c26d/typed/clj.runtime/src/clojure/core/typed/deprecated_wrapper_macros.clj | clojure | change [a :- b c] to [[a :- b] c]
for options (:let, :while etc)
normalise seq-exprs to be flat pairs
typed let
k is [k :- k-ann]
core.typed thinks chunk- could be nil here
~k (.nth ~chunk- ~i-)
change [a :- b c] to [[a :- b] c]
for options (:let, :while etc)
normalise seq-exprs to be flat pairs
typed let
ann-form for better error messages
(prn "tracked t#" t#)
_# (prn "for param x#" x#)
typed let
put an ann-form here so at least one error message
points to code the user can recognise.
~bind (.nth c# ~gi)]
_# (prn "for param x#" x#) | (in-ns 'clojure.core.typed)
(defmacro ^:deprecated doseq
"DEPRECATED: Use clojure.core/doseq.
Like clojure.core/doseq with optional annotations.
:let option uses clojure.core.typed/let
eg.
(doseq [a :- (U nil AnyInteger) [1 nil 2 3]
:when a]
(inc a))"
[seq-exprs & body]
(@#'core/assert-args
(vector? seq-exprs) "a vector for its binding"
(even? (count seq-exprs)) "an even number of forms in binding vector")
(core/let
[normalise-args
(core/fn [seq-exprs]
(core/loop [flat-result []
seq-exprs seq-exprs]
(cond
(empty? seq-exprs) flat-result
(keyword? (first seq-exprs)) (core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for option missing " (first seq-exprs)))
[k v & rst] seq-exprs]
(recur (conj flat-result k v)
rst))
:else (if (#{:-} (second seq-exprs))
(core/let
[_ (assert (#{4} (count (take 4 seq-exprs)))
(str "for parameter missing after ':-'"))
[b colon t init & rst] seq-exprs]
(recur (conj flat-result [b colon t] init)
rst))
(core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for binding needs initial values"))
[b init & rst] seq-exprs]
(recur (conj flat-result [b :- `Any] init)
rst))))))
seq-exprs (normalise-args seq-exprs)
step (core/fn step [recform exprs]
(if-not exprs
[true `(do ~@body)]
(core/let
[k (first exprs)
v (second exprs)]
(if (keyword? k)
(core/let
[steppair (step recform (nnext exprs))
needrec (steppair 0)
subform (steppair 1)]
(cond
(= k :let) [needrec `(let ~v ~subform)]
(= k :while) [false `(when ~v
~subform
~@(when needrec [recform]))]
(= k :when) [false `(if ~v
(do
~subform
~@(when needrec [recform]))
~recform)]))
(core/let
[_ (assert (and (vector? k)
(#{3} (count k))
(#{:-} (second k)))
"Binder must be of the form [lhs :- type]")
k-ann (nth k 2)
k (nth k 0)
k is the lhs binding
seq- (gensym "seq_")
chunk- (with-meta (gensym "chunk_")
{:tag 'clojure.lang.IChunk})
count- (gensym "count_")
i- (gensym "i_")
recform `(recur (next ~seq-) nil 0 0)
steppair (step recform (nnext exprs))
needrec (steppair 0)
subform (steppair 1)
recform-chunk
`(recur ~seq- ~chunk- ~count- (unchecked-inc ~i-))
steppair-chunk (step recform-chunk (nnext exprs))
subform-chunk (steppair-chunk 1)]
[true
`(loop [~seq- :- (U nil (Seq ~k-ann)) (seq ~v),
~chunk- :- (U nil (clojure.lang.IChunk ~k-ann)) nil
~count- :- Int 0,
~i- :- Int 0]
(if (and (< ~i- ~count-)
FIXME review this
~chunk-)
(core/let
~k (nth ~chunk- ~i-)]
~subform-chunk
~@(when needrec [recform-chunk]))
(when-let [~seq- (seq ~seq-)]
(if (chunked-seq? ~seq-)
(core/let [c# (chunk-first ~seq-)]
(recur (chunk-rest ~seq-) c#
(int (count c#)) (int 0)))
(core/let [~k (first ~seq-)]
~subform
~@(when needrec [recform]))))))])))))]
(nth (step nil (seq seq-exprs)) 1)))
(defmacro ^:deprecated for
"DEPRECATED: Use clojure.core/for.
Like clojure.core/for with optional type annotations.
All types default to Any.
The :let option uses clojure.core.typed/let.
eg. (for [a :- (U nil Int) [1 nil 2 3]
:when a]
:- Number
(inc a))
Metadata using the :clojure.core.typed/ann keyword
can also be used for annotation.
eg. (for ^{::ann Number}
[^{::ann (U nil Int)} a [1 nil 2 3]
:when a]
(inc a))
"
[seq-exprs & maybe-ann-body-expr]
(@#'core/assert-args
(vector? seq-exprs) "a vector for its binding"
(even? (count seq-exprs)) "an even number of forms in binding vector")
(core/let
[orig-seq-exprs seq-exprs
has-explicit-return-type? (#{:-} (first maybe-ann-body-expr))
[ret-ann body-expr] (if has-explicit-return-type?
(core/let
[_ (assert (#{3} (count maybe-ann-body-expr))
(str "Wrong arguments to for: " maybe-ann-body-expr))
[colon t body] maybe-ann-body-expr]
[t body])
(core/let
[_ (assert (#{1} (count maybe-ann-body-expr))
(str "Wrong arguments to for: " maybe-ann-body-expr))
[body] maybe-ann-body-expr]
[`Any body]))
ret-ann (if-let [[_ meta-ann] (find (meta seq-exprs) ::ann)]
(do (assert (not has-explicit-return-type?)
"Cannot mix explicit and metadata return type in for.")
meta-ann)
ret-ann)
_ ( prn " ret - ann " ret - ann )
normalise-args
(core/fn [seq-exprs]
(core/loop [flat-result []
seq-exprs seq-exprs]
(cond
(empty? seq-exprs) flat-result
(keyword? (first seq-exprs)) (core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for option missing " (first seq-exprs)))
[k v & rst] seq-exprs]
(recur (conj flat-result k v)
rst))
:else (core/let
[[meta-ann has-meta-ann?]
(when-let [[_ meta-ann] (find (meta (first seq-exprs)) ::ann)]
[meta-ann true])]
(if (#{:-} (second seq-exprs))
(core/let
[_ (assert (#{4} (count (take 4 seq-exprs)))
(str "for parameter missing after ':-'"))
[b colon t init & rst] seq-exprs]
(assert (not meta-ann)
"Cannot mix metadata annotation and explicit annotation in for.")
(recur (conj flat-result [b colon t] init)
rst))
(core/let
[_ (assert (#{2} (count (take 2 seq-exprs)))
(str "for binding needs initial values"))
[b init & rst] seq-exprs
ann (if has-meta-ann? meta-ann `Any)]
(recur (conj flat-result [b :- ann] init)
rst)))))))
seq-exprs (normalise-args seq-exprs)
to-groups (core/fn [seq-exprs]
(reduce (core/fn [groups [k v]]
(if (keyword? k)
(conj (pop groups) (conj (peek groups) [k v]))
(conj groups [k v])))
[] (partition 2 seq-exprs)))
err (core/fn [& msg] (throw (IllegalArgumentException. ^String (apply str msg))))
emit-bind (core/fn emit-bind [[[bind expr & mod-pairs]
& [[_ next-expr] :as next-groups]]]
(core/let
[_ (assert (and (vector? bind)
(#{3} (count bind))
(#{:-} (second bind)))
"Binder must be of the form [lhs :- type]")
bind-ann (nth bind 2)
bind (nth bind 0)
giter (gensym "iter__")
gxs (gensym "s__")
do-mod (core/fn do-mod [[[k v :as pair] & etc]]
(cond
(= k :let) `(let ~v ~(do-mod etc))
(= k :while) `(when ~v ~(do-mod etc))
(= k :when) `(if ~v
~(do-mod etc)
(recur (rest ~gxs)))
(keyword? k) (err "Invalid 'for' keyword " k)
next-groups
`(core/let
[iterys# ~(emit-bind next-groups)
fs# (seq (iterys# ~next-expr))]
(if fs#
(concat fs# (~giter (rest ~gxs)))
(recur (rest ~gxs))))
(~giter (rest ~gxs)))))]
(if next-groups
#_"not the inner-most loop"
`(fn ~giter
[~gxs :- (Option (Seqable ~bind-ann))]
:- (Seq ~ret-ann)
(lazy-seq
(map (fn [t# :- ~ret-ann] :- ~ret-ann
(core/let
[^{::auto-ann ~(meta orig-seq-exprs)
::track-kind ::for-return}
t# t#]
t#))
(loop [~gxs :- (Option (Seqable ~bind-ann)) ~gxs]
(when-let [xs# (seq ~gxs)]
(core/let
[^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (first xs#)
~bind x#]
~(do-mod mod-pairs)))))))
#_"inner-most loop"
(core/let
[gi (gensym "i__")
gb (gensym "b__")
do-cmod (core/fn do-cmod [[[k v :as pair] & etc]]
(cond
(= k :let) `(let ~v ~(do-cmod etc))
(= k :while) `(when ~v ~(do-cmod etc))
(= k :when) `(if ~v
~(do-cmod etc)
(recur
(unchecked-inc ~gi)))
(keyword? k)
(err "Invalid 'for' keyword " k)
:else
`(do (chunk-append ~gb
(ann-form ~body-expr
~ret-ann))
(recur (unchecked-inc ~gi)))))]
`(fn ~giter [~gxs :- (Option (Seqable ~bind-ann))]
:- (Seq ~ret-ann)
(lazy-seq
(map (fn [t# :- ~ret-ann] :- ~ret-ann
(core/let
[^{::auto-ann ~(meta orig-seq-exprs)
::track-kind ::for-return}
t# t#]
t#))
(loop [~gxs :- (Option (Seqable ~bind-ann)) ~gxs]
(when-let [~gxs (seq ~gxs)]
(if (chunked-seq? ~gxs)
(core/let
[c# (chunk-first ~gxs)
size# (int (count c#))
~gb (ann-form (chunk-buffer size#)
(~'clojure.lang.ChunkBuffer ~ret-ann))]
(if (loop [~gi :- Int, (int 0)]
(if (< ~gi size#)
(core/let
^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (nth c# ~gi)
~bind x#]
~(do-cmod mod-pairs))
true))
(chunk-cons
(chunk ~gb)
(~giter (chunk-rest ~gxs)))
(chunk-cons (chunk ~gb) nil)))
(core/let
[^{::auto-ann ~(meta bind)
::track-kind ::for-param}
x# (first ~gxs)
~bind x#]
~(do-mod mod-pairs))))))))))))]
`(core/let [iter# ~(emit-bind (to-groups seq-exprs))]
(iter# ~(second seq-exprs)))))
|
8f3c1ada77b23e143cf334f00ac923a34e8a763904547c22bcb060dec52ba69d | liangjingyang/everrank | everrank_handler.erl |
-module(everrank_handler).
-export([
init/3,
handle/2,
terminate/3
]).
-include("everrank.hrl").
init(_Transport, Req, []) ->
{ok, Req, undefined}.
handle(Req, State) ->
case catch do_handle(Req, State) of
{ok, Req2} ->
ok;
{error, Req2, Reason} ->
do_handle_error(Req2, Reason);
{error, Req2, Reason, Type} ->
do_handle_error(Req2, Reason, Type);
_Else ->
Req2 = Req,
io:format("error else: ~w~n", [_Else]),
ignore
end,
{ok, Req2, State}.
terminate(_Reason, _Req, _State) ->
ok.
reply(Content, Req) ->
cowboy_req:reply(200, [{<<"content-encoding">>, <<"utf-8">>}], Content, Req).
abort(Req, Reason) ->
erlang:throw({error, Req, Reason}).
do_handle_error(Req, Reason) ->
do_handle_error(Req, Reason, 400).
do_handle_error(Req, Reason, Type) ->
io:format("handle error, reason:~w~n", [Reason]),
cowboy_req:reply(Type, [], Reason, Req).
do_handle(Req, State) ->
case cowboy_req:method(Req) of
{<<"POST">>, Req2} ->
do_handle_post(Req2, State);
_ ->
abort(Req, ?RES_ERROR_METHOD)
end.
do_handle_post(Req, State) ->
case cowboy_req:has_body(Req) of
true ->
do_handle_body(Req, State);
false ->
abort(Req, ?RES_ERROR_BODY)
end.
do_handle_body(Req, State) ->
{ok, PostVals, Req2} = cowboy_req:body_qs(Req),
case proplists:get_value(<<"content">>, PostVals) of
undefined ->
abort(Req2, ?RES_ERROR_CONTENT);
Data ->
Data2 = do_handle_decrypt(Data, Req2),
Data3 = do_handle_decode(Data2, Req2),
do_handle_protocol(Data3, Req2, State)
end.
do_handle_decrypt(Data, _Req) ->
Data.
do_handle_decode(Data, Req) ->
case jsx:is_json(Data) of
true ->
jsx:decode(Data);
false ->
abort(Req, ?RES_ERROR_JSON)
end.
do_handle_protocol([{?PROTOCOL_INIT, Data}], Req, State) ->
do_handle_init(Data, Req, State);
do_handle_protocol([{?PROTOCOL_UPDATE_FRIEND, Data}], Req, State) ->
do_handle_update_friend(Data, Req, State);
do_handle_protocol([{?PROTOCOL_SET_USERDATA, Data}], Req, State) ->
do_handle_set_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_USERDATA, Data}], Req, State) ->
do_handle_get_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_FRIEND_USERDATA, Data}], Req, State) ->
do_handle_get_friend_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_SET_PRIVATE_USERDATA, Data}], Req, State) ->
do_handle_set_private_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_PRIVATE_USERDATA, Data}], Req, State) ->
do_handle_get_private_userdata(Data, Req, State);
do_handle_protocol(_Protocol, Req, _State) ->
io:format("handle protocol error, data:~p~n", [_Protocol]),
abort(Req, ?RES_ERROR_PROTOCOL).
do_handle_init(Data, Req, _State) ->
[SnsType, SnsId, FSnsIdList] = check_init_field(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
RNTab = everrank_lib:sns_to_relation_tab(SnsType),
ever_db:dirty_write(Tab, #t{snsId = SnsId}),
ever_db:dirty_write(FDTab, #t_fd{snsId = SnsId}),
ever_db:dirty_write(FWTab, #t_fw{snsId = SnsId}),
[Inited, NotInited] = split_friends(FSnsIdList, Tab, [], []),
add_inited(Inited, SnsId, Tab, FDTab, FWTab),
add_notinited(NotInited, SnsId, RNTab),
add_relation(SnsId, FDTab, FWTab, RNTab),
reply(?RES_SUCC, Req);
_ ->
abort(Req, ?RES_ERROR_ALREADY_INITED)
end.
check_init_field(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
FSnsIdList = check_field_friendlist(Data, Req),
[SnsType, SnsId, FSnsIdList].
do_handle_update_friend(Data, Req, _State) ->
[SnsType, SnsId, Cmd, FSnsIdList] = check_update_friend_field(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
_ ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
RNTab = everrank_lib:sns_to_relation_tab(SnsType),
case Cmd of
?CMD_ADD ->
[Inited, NotInited] = split_friends(FSnsIdList, Tab, [], []),
add_inited(Inited, SnsId, Tab, FDTab, FWTab),
add_notinited(NotInited, SnsId, RNTab),
add_relation(SnsId, FDTab, FWTab, RNTab),
reply(?RES_SUCC, Req);
?CMD_DEL ->
del_fd(FSnsIdList, SnsId, FDTab),
del_rn_and_fw(FSnsIdList, SnsId, RNTab, FWTab),
reply(?RES_SUCC, Req)
end
end.
check_update_friend_field(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
Cmd = check_field_cmd(Data, Req, ?CMD_LIST_UPDATE_FRIEND),
FSnsIdList = check_field_friendlist(Data, Req),
[SnsType, SnsId, Cmd, FSnsIdList].
do_handle_set_userdata(Data, Req, _State) ->
[SnsType, SnsId, UserData] = check_set_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{data = OldUserData} = Rec] ->
case is_replace_userdata(OldUserData, UserData) of
false ->
reply(?RES_SUCC, Req);
true ->
Time = ever_time:now(),
Rec2 = Rec#t{data = UserData, time = Time},
ever_db:dirty_write(Tab, Rec2),
spawn(fun() -> update_follow(Rec2, SnsId, SnsType) end),
reply(?RES_SUCC, Req)
end
end.
check_set_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
UserData = check_field_userdata(Data, Req),
[SnsType, SnsId, UserData].
do_handle_get_userdata(Data, Req, _State) ->
[SnsType, SnsId] = check_get_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{data = UserData, time = Time}] ->
Res = jsx:encode([{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]),
reply(Res, Req)
end.
check_get_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
[SnsType, SnsId].
do_handle_get_friend_userdata(Data, Req, _State) ->
[SnsType, SnsId, Time] = check_get_friend_userdata(Data, Req),
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
case ever_db:dirty_read(FDTab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t_fd{friendList = FDList}] ->
case Time of
0 ->
Res = fdl_to_json(FDList);
_ ->
Res = fdl_to_json([FDL||FDL<-FDList, FDL#t_fdl.time >= Time])
end,
reply(Res, Req)
end.
check_get_friend_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
Time = check_field_time(Data, Req),
[SnsType, SnsId, Time].
do_handle_set_private_userdata(Data, Req, _State) ->
[SnsType, SnsId, UserData] = check_set_private_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{} = Rec] ->
Time = ever_time:now(),
ever_db:dirty_write(Tab, Rec#t{privateData = UserData, privateTime = Time}),
reply(?RES_SUCC, Req)
end.
check_set_private_userdata(Data, Req) ->
check_set_userdata(Data, Req).
do_handle_get_private_userdata(Data, Req, _State) ->
[SnsType, SnsId] = check_get_private_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{privateData = UserData, privateTime = Time}] ->
Res = jsx:encode([{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]),
reply(Res, Req)
end.
check_get_private_userdata(Data, Req) ->
check_get_userdata(Data, Req).
%%===================================================================
is_replace_userdata(_Old, _New) ->
true.
fdl_to_json(FDList) ->
TermList = fdl_to_json2(FDList, []),
jsx:encode(TermList).
fdl_to_json2([FDL|List], TermList) ->
#t_fdl{snsId = SnsId, data = UserData, time = Time} = FDL,
fdl_to_json2(List, [[{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]|TermList]);
fdl_to_json2([], TermList) ->
TermList.
update_follow(Rec, SnsId, SnsType) ->
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
case ever_db:dirty_read(FWTab, SnsId) of
[] ->
ignore;
[#t_fw{followList = FSnsIdList}] ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FDLRec = #t_fdl{snsId = SnsId, data = Rec#t.data, time = Rec#t.time},
update_follow2(FSnsIdList, FDLRec, SnsId, FDTab)
end.
update_follow2([FSnsId|List], FDLRec, MSnsId, FDTab) ->
%%TODO:transaction
case ever_db:dirty_read(FDTab, FSnsId) of
[] ->
ignore;
[#t_fd{friendList = FDList} = FDRec] ->
FDList2 = lists:keystore(MSnsId, #t_fd.snsId, FDList, FDLRec),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FDList2})
end,
update_follow2(List, FDLRec, MSnsId, FDTab);
update_follow2([], _FDLRec, _MSnsId, _FDTab) ->
ok.
del_rn_and_fw([FSnsId|List], SnsId, RNTab, FWTab) ->
%%TODO:transaction
case ever_db:dirty_read(RNTab, FSnsId) of
[] ->
ignore;
[#t_rn{relationList = RNList} = RNRec] ->
RNList2 = lists:delete(SnsId, RNList),
ever_db:dirty_write(RNTab, RNRec#t_rn{relationList = RNList2})
end,
case ever_db:dirty_read(FWTab, FSnsId) of
[] ->
ignore;
[#t_fw{followList = FWList} = FWRec] ->
FWList2 = lists:delete(SnsId, FWList),
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FWList2})
end,
del_rn_and_fw(List, SnsId, RNTab, FWTab);
del_rn_and_fw([], _SnsId, _RNTab, _FWTab) ->
ok.
del_fd(FSnsIdList, SnsId, FDTab) ->
case ever_db:dirty_read(FDTab, SnsId) of
[] ->
ignore;
[#t_fd{friendList = FDList} = FDRec] ->
FDList2 = del_fd2(FSnsIdList, FDList),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FDList2})
end.
del_fd2([FSnsId|List], FDList) ->
FDList2 = lists:keydelete(FSnsId, #t_fdl.snsId, FDList),
del_fd2(List, FDList2);
del_fd2([], FDList) ->
FDList.
remove_dup_fd(FDList , SnsId , FDTab)- >
case ever_db : dirty_read(FDTab , SnsId ) of
%[] ->
FDList ;
%[#t_fd{friendList = FDList2}] ->
%remove_dup_fd2(FDList, FDList2, [])
%end.
%remove_dup_fd2([FSnsId|FDList], FDList2, FDList3) ->
%case lists:keymember(FSnsId, #t_fdl.snsId, FDList2) of
%true ->
%remove_dup_fd2(FDList, FDList2, FDList3);
%false ->
%remove_dup_fd2(FDList, FDList2, [FSnsId|FDList3])
%end;
%remove_dup_fd2([], _FDList2, FDList3) ->
%FDList3.
merge_snsid([SnsId|T], List) ->
case lists:member(SnsId, List) of
true ->
merge_snsid(T, List);
false ->
merge_snsid(T, [SnsId|List])
end;
merge_snsid([], List) ->
List.
add_relation(SnsId, FDTab, FWTab, RNTab) ->
case ever_db:dirty_read(RNTab, SnsId) of
[] ->
ignore;
[#t_rn{relationList = RelationList}] ->
Time = ever_time:now(),
add_relation2(RelationList, SnsId, FDTab, Time),
case ever_db:dirty_read(FWTab, SnsId) of
[] ->
ever_db:dirty_write(FWTab, #t_fw{snsId = SnsId, followList = RelationList});
[#t_fw{followList = FollowList} = FWRec] ->
FollowList2 = merge_snsid(RelationList, FollowList),
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FollowList2})
end,
ever_db:dirty_delete(RNTab, SnsId)
end,
ok.
add_relation2([RSnsId|RelationList], MSnsId, FDTab, Time) ->
%%TODO:transaction
case ever_db:dirty_read(FDTab, RSnsId) of
[] ->
ignore;
[#t_fd{friendList = FriendList} = FDRec] ->
case lists:keymember(MSnsId, #t_fdl.snsId, FriendList) of
true ->
ignore;
false ->
Friend = #t_fdl{snsId = MSnsId, time = Time},
FDRec2 = FDRec#t_fd{friendList = [Friend|FriendList]},
ever_db:dirty_write(FDTab, FDRec2)
end
end,
add_relation2(RelationList, MSnsId, FDTab, Time);
add_relation2([], _MSnsId, _FDTab, _Time) ->
ok.
add_notinited([FSnsId|NotInited], MSnsId, RNTab) ->
%%TODO:transaction
case ever_db:dirty_read(RNTab, FSnsId) of
[] ->
ever_db:dirty_write(RNTab, #t_rn{snsId = FSnsId, relationList = [MSnsId]});
[#t_rn{relationList = RelationList} = RNRec] ->
RNRec2 = RNRec#t_rn{relationList = [MSnsId|lists:delete(MSnsId, RelationList)]},
ever_db:dirty_write(RNTab, RNRec2)
end,
add_notinited(NotInited, MSnsId, RNTab);
add_notinited([], _MSnsId, _RNTab) ->
ok.
merge_fdl([H|T], List) ->
case lists:keymember(H#t_fdl.snsId, #t_fdl.snsId, List) of
true ->
merge_fdl(T, List);
false ->
merge_fdl(T, [H|List])
end;
merge_fdl([], List) ->
List.
add_inited(Inited, SnsId, Tab, FDTab, FWTab) ->
Time = ever_time:now(),
FriendList = add_inited2(Inited, SnsId, Tab, FWTab, Time, []),
[#t_fd{friendList = FriendList2} = FDRec] = ever_db:dirty_read(FDTab, SnsId),
FriendList3 = merge_fdl(FriendList2, FriendList),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FriendList3}),
ok.
add_inited2([FSnsId|Inited], MSnsId, Tab, FWTab, Time, FriendList) ->
case ever_db:dirty_read(Tab, FSnsId) of
[#t{data = Data}] ->
Friend = #t_fdl{snsId = FSnsId, data = Data, time = Time},
FriendList2 = [Friend|FriendList];
_ ->
FriendList2 = FriendList
end,
%%TODO:transaction
case ever_db:dirty_read(FWTab, FSnsId) of
[#t_fw{followList = FollowList} = FWRec] ->
FollowList2 = [MSnsId|lists:delete(MSnsId, FollowList)],
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FollowList2});
_ ->
ignore
end,
add_inited2(Inited, MSnsId, Tab, FWTab, Time, FriendList2);
add_inited2([], _MSnsId, _Tab, _FWTab, _Time, FriendList) ->
FriendList.
split_friends([SnsId|Friends], Tab, Inited, NotInited) ->
case ever_db:dirty_read(Tab, SnsId) of
[] ->
split_friends(Friends, Tab, Inited, [SnsId|NotInited]);
_ ->
split_friends(Friends, Tab, [SnsId|Inited], NotInited)
end;
split_friends([], _Tab, Inited, NotInited) ->
[Inited, NotInited].
check_field_userdata(Data, Req) ->
case proplists:get_value(?FIELD_USERDATA, Data) of
undefined ->
UserData = undefined,
abort(Req, ?RES_ERROR_FIELD);
UserData ->
ok
end,
UserData.
check_field_cmd(Data, Req, CmdList) ->
case proplists:get_value(?FIELD_CMD, Data) of
undefined ->
Cmd = undeifned,
abort(Req, ?RES_ERROR_FIELD);
Cmd ->
case lists:member(Cmd, CmdList) of
true ->
ok;
false ->
abort(Req, ?RES_ERROR_SNSTYPE)
end
end,
Cmd.
check_field_friendlist(Data, Req) ->
case proplists:get_value(?FIELD_FRIENDLIST, Data) of
FriendList when is_list(FriendList) ->
ok;
undefined ->
FriendList = [],
abort(Req, ?RES_ERROR_FIELD)
end,
FriendList.
check_field_snsid(Data, Req) ->
case proplists:get_value(?FIELD_SNSID, Data) of
undefined ->
SnsId = undeifned,
abort(Req, ?RES_ERROR_FIELD);
SnsId ->
ok
end,
SnsId.
check_field_snstype(Data, Req) ->
case proplists:get_value(?FIELD_SNSTYPE, Data) of
undefined ->
SnsType = undeifned,
abort(Req, ?RES_ERROR_FIELD);
SnsType ->
case lists:member(SnsType, ?SNSTYPE_LIST) of
true ->
ok;
false ->
abort(Req, ?RES_ERROR_SNSTYPE)
end
end,
SnsType.
check_field_time(Data, _Req) ->
case proplists:get_value(?FIELD_TIME, Data) of
Time when is_integer(Time) ->
ok;
_ ->
Time = 0
end,
Time.
| null | https://raw.githubusercontent.com/liangjingyang/everrank/d4d2b86680117304f0ce98c1da5dd71728cc7d02/src/everrank_handler.erl | erlang | ===================================================================
TODO:transaction
TODO:transaction
[] ->
[#t_fd{friendList = FDList2}] ->
remove_dup_fd2(FDList, FDList2, [])
end.
remove_dup_fd2([FSnsId|FDList], FDList2, FDList3) ->
case lists:keymember(FSnsId, #t_fdl.snsId, FDList2) of
true ->
remove_dup_fd2(FDList, FDList2, FDList3);
false ->
remove_dup_fd2(FDList, FDList2, [FSnsId|FDList3])
end;
remove_dup_fd2([], _FDList2, FDList3) ->
FDList3.
TODO:transaction
TODO:transaction
TODO:transaction |
-module(everrank_handler).
-export([
init/3,
handle/2,
terminate/3
]).
-include("everrank.hrl").
init(_Transport, Req, []) ->
{ok, Req, undefined}.
handle(Req, State) ->
case catch do_handle(Req, State) of
{ok, Req2} ->
ok;
{error, Req2, Reason} ->
do_handle_error(Req2, Reason);
{error, Req2, Reason, Type} ->
do_handle_error(Req2, Reason, Type);
_Else ->
Req2 = Req,
io:format("error else: ~w~n", [_Else]),
ignore
end,
{ok, Req2, State}.
terminate(_Reason, _Req, _State) ->
ok.
reply(Content, Req) ->
cowboy_req:reply(200, [{<<"content-encoding">>, <<"utf-8">>}], Content, Req).
abort(Req, Reason) ->
erlang:throw({error, Req, Reason}).
do_handle_error(Req, Reason) ->
do_handle_error(Req, Reason, 400).
do_handle_error(Req, Reason, Type) ->
io:format("handle error, reason:~w~n", [Reason]),
cowboy_req:reply(Type, [], Reason, Req).
do_handle(Req, State) ->
case cowboy_req:method(Req) of
{<<"POST">>, Req2} ->
do_handle_post(Req2, State);
_ ->
abort(Req, ?RES_ERROR_METHOD)
end.
do_handle_post(Req, State) ->
case cowboy_req:has_body(Req) of
true ->
do_handle_body(Req, State);
false ->
abort(Req, ?RES_ERROR_BODY)
end.
do_handle_body(Req, State) ->
{ok, PostVals, Req2} = cowboy_req:body_qs(Req),
case proplists:get_value(<<"content">>, PostVals) of
undefined ->
abort(Req2, ?RES_ERROR_CONTENT);
Data ->
Data2 = do_handle_decrypt(Data, Req2),
Data3 = do_handle_decode(Data2, Req2),
do_handle_protocol(Data3, Req2, State)
end.
do_handle_decrypt(Data, _Req) ->
Data.
do_handle_decode(Data, Req) ->
case jsx:is_json(Data) of
true ->
jsx:decode(Data);
false ->
abort(Req, ?RES_ERROR_JSON)
end.
do_handle_protocol([{?PROTOCOL_INIT, Data}], Req, State) ->
do_handle_init(Data, Req, State);
do_handle_protocol([{?PROTOCOL_UPDATE_FRIEND, Data}], Req, State) ->
do_handle_update_friend(Data, Req, State);
do_handle_protocol([{?PROTOCOL_SET_USERDATA, Data}], Req, State) ->
do_handle_set_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_USERDATA, Data}], Req, State) ->
do_handle_get_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_FRIEND_USERDATA, Data}], Req, State) ->
do_handle_get_friend_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_SET_PRIVATE_USERDATA, Data}], Req, State) ->
do_handle_set_private_userdata(Data, Req, State);
do_handle_protocol([{?PROTOCOL_GET_PRIVATE_USERDATA, Data}], Req, State) ->
do_handle_get_private_userdata(Data, Req, State);
do_handle_protocol(_Protocol, Req, _State) ->
io:format("handle protocol error, data:~p~n", [_Protocol]),
abort(Req, ?RES_ERROR_PROTOCOL).
do_handle_init(Data, Req, _State) ->
[SnsType, SnsId, FSnsIdList] = check_init_field(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
RNTab = everrank_lib:sns_to_relation_tab(SnsType),
ever_db:dirty_write(Tab, #t{snsId = SnsId}),
ever_db:dirty_write(FDTab, #t_fd{snsId = SnsId}),
ever_db:dirty_write(FWTab, #t_fw{snsId = SnsId}),
[Inited, NotInited] = split_friends(FSnsIdList, Tab, [], []),
add_inited(Inited, SnsId, Tab, FDTab, FWTab),
add_notinited(NotInited, SnsId, RNTab),
add_relation(SnsId, FDTab, FWTab, RNTab),
reply(?RES_SUCC, Req);
_ ->
abort(Req, ?RES_ERROR_ALREADY_INITED)
end.
check_init_field(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
FSnsIdList = check_field_friendlist(Data, Req),
[SnsType, SnsId, FSnsIdList].
do_handle_update_friend(Data, Req, _State) ->
[SnsType, SnsId, Cmd, FSnsIdList] = check_update_friend_field(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
_ ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
RNTab = everrank_lib:sns_to_relation_tab(SnsType),
case Cmd of
?CMD_ADD ->
[Inited, NotInited] = split_friends(FSnsIdList, Tab, [], []),
add_inited(Inited, SnsId, Tab, FDTab, FWTab),
add_notinited(NotInited, SnsId, RNTab),
add_relation(SnsId, FDTab, FWTab, RNTab),
reply(?RES_SUCC, Req);
?CMD_DEL ->
del_fd(FSnsIdList, SnsId, FDTab),
del_rn_and_fw(FSnsIdList, SnsId, RNTab, FWTab),
reply(?RES_SUCC, Req)
end
end.
check_update_friend_field(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
Cmd = check_field_cmd(Data, Req, ?CMD_LIST_UPDATE_FRIEND),
FSnsIdList = check_field_friendlist(Data, Req),
[SnsType, SnsId, Cmd, FSnsIdList].
do_handle_set_userdata(Data, Req, _State) ->
[SnsType, SnsId, UserData] = check_set_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{data = OldUserData} = Rec] ->
case is_replace_userdata(OldUserData, UserData) of
false ->
reply(?RES_SUCC, Req);
true ->
Time = ever_time:now(),
Rec2 = Rec#t{data = UserData, time = Time},
ever_db:dirty_write(Tab, Rec2),
spawn(fun() -> update_follow(Rec2, SnsId, SnsType) end),
reply(?RES_SUCC, Req)
end
end.
check_set_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
UserData = check_field_userdata(Data, Req),
[SnsType, SnsId, UserData].
do_handle_get_userdata(Data, Req, _State) ->
[SnsType, SnsId] = check_get_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{data = UserData, time = Time}] ->
Res = jsx:encode([{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]),
reply(Res, Req)
end.
check_get_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
[SnsType, SnsId].
do_handle_get_friend_userdata(Data, Req, _State) ->
[SnsType, SnsId, Time] = check_get_friend_userdata(Data, Req),
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
case ever_db:dirty_read(FDTab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t_fd{friendList = FDList}] ->
case Time of
0 ->
Res = fdl_to_json(FDList);
_ ->
Res = fdl_to_json([FDL||FDL<-FDList, FDL#t_fdl.time >= Time])
end,
reply(Res, Req)
end.
check_get_friend_userdata(Data, Req) ->
SnsType = check_field_snstype(Data, Req),
SnsId = check_field_snsid(Data, Req),
Time = check_field_time(Data, Req),
[SnsType, SnsId, Time].
do_handle_set_private_userdata(Data, Req, _State) ->
[SnsType, SnsId, UserData] = check_set_private_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{} = Rec] ->
Time = ever_time:now(),
ever_db:dirty_write(Tab, Rec#t{privateData = UserData, privateTime = Time}),
reply(?RES_SUCC, Req)
end.
check_set_private_userdata(Data, Req) ->
check_set_userdata(Data, Req).
do_handle_get_private_userdata(Data, Req, _State) ->
[SnsType, SnsId] = check_get_private_userdata(Data, Req),
Tab = everrank_lib:sns_to_tab(SnsType),
case ever_db:dirty_read(Tab, SnsId) of
[] ->
abort(Req, ?RES_ERROR_NOT_INITED);
[#t{privateData = UserData, privateTime = Time}] ->
Res = jsx:encode([{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]),
reply(Res, Req)
end.
check_get_private_userdata(Data, Req) ->
check_get_userdata(Data, Req).
is_replace_userdata(_Old, _New) ->
true.
fdl_to_json(FDList) ->
TermList = fdl_to_json2(FDList, []),
jsx:encode(TermList).
fdl_to_json2([FDL|List], TermList) ->
#t_fdl{snsId = SnsId, data = UserData, time = Time} = FDL,
fdl_to_json2(List, [[{?FIELD_SNSID, SnsId}, {?FIELD_USERDATA, UserData}, {?FIELD_TIME, Time}]|TermList]);
fdl_to_json2([], TermList) ->
TermList.
update_follow(Rec, SnsId, SnsType) ->
FWTab = everrank_lib:sns_to_follow_tab(SnsType),
case ever_db:dirty_read(FWTab, SnsId) of
[] ->
ignore;
[#t_fw{followList = FSnsIdList}] ->
FDTab = everrank_lib:sns_to_friend_tab(SnsType),
FDLRec = #t_fdl{snsId = SnsId, data = Rec#t.data, time = Rec#t.time},
update_follow2(FSnsIdList, FDLRec, SnsId, FDTab)
end.
update_follow2([FSnsId|List], FDLRec, MSnsId, FDTab) ->
case ever_db:dirty_read(FDTab, FSnsId) of
[] ->
ignore;
[#t_fd{friendList = FDList} = FDRec] ->
FDList2 = lists:keystore(MSnsId, #t_fd.snsId, FDList, FDLRec),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FDList2})
end,
update_follow2(List, FDLRec, MSnsId, FDTab);
update_follow2([], _FDLRec, _MSnsId, _FDTab) ->
ok.
del_rn_and_fw([FSnsId|List], SnsId, RNTab, FWTab) ->
case ever_db:dirty_read(RNTab, FSnsId) of
[] ->
ignore;
[#t_rn{relationList = RNList} = RNRec] ->
RNList2 = lists:delete(SnsId, RNList),
ever_db:dirty_write(RNTab, RNRec#t_rn{relationList = RNList2})
end,
case ever_db:dirty_read(FWTab, FSnsId) of
[] ->
ignore;
[#t_fw{followList = FWList} = FWRec] ->
FWList2 = lists:delete(SnsId, FWList),
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FWList2})
end,
del_rn_and_fw(List, SnsId, RNTab, FWTab);
del_rn_and_fw([], _SnsId, _RNTab, _FWTab) ->
ok.
del_fd(FSnsIdList, SnsId, FDTab) ->
case ever_db:dirty_read(FDTab, SnsId) of
[] ->
ignore;
[#t_fd{friendList = FDList} = FDRec] ->
FDList2 = del_fd2(FSnsIdList, FDList),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FDList2})
end.
del_fd2([FSnsId|List], FDList) ->
FDList2 = lists:keydelete(FSnsId, #t_fdl.snsId, FDList),
del_fd2(List, FDList2);
del_fd2([], FDList) ->
FDList.
remove_dup_fd(FDList , SnsId , FDTab)- >
case ever_db : dirty_read(FDTab , SnsId ) of
FDList ;
merge_snsid([SnsId|T], List) ->
case lists:member(SnsId, List) of
true ->
merge_snsid(T, List);
false ->
merge_snsid(T, [SnsId|List])
end;
merge_snsid([], List) ->
List.
add_relation(SnsId, FDTab, FWTab, RNTab) ->
case ever_db:dirty_read(RNTab, SnsId) of
[] ->
ignore;
[#t_rn{relationList = RelationList}] ->
Time = ever_time:now(),
add_relation2(RelationList, SnsId, FDTab, Time),
case ever_db:dirty_read(FWTab, SnsId) of
[] ->
ever_db:dirty_write(FWTab, #t_fw{snsId = SnsId, followList = RelationList});
[#t_fw{followList = FollowList} = FWRec] ->
FollowList2 = merge_snsid(RelationList, FollowList),
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FollowList2})
end,
ever_db:dirty_delete(RNTab, SnsId)
end,
ok.
add_relation2([RSnsId|RelationList], MSnsId, FDTab, Time) ->
case ever_db:dirty_read(FDTab, RSnsId) of
[] ->
ignore;
[#t_fd{friendList = FriendList} = FDRec] ->
case lists:keymember(MSnsId, #t_fdl.snsId, FriendList) of
true ->
ignore;
false ->
Friend = #t_fdl{snsId = MSnsId, time = Time},
FDRec2 = FDRec#t_fd{friendList = [Friend|FriendList]},
ever_db:dirty_write(FDTab, FDRec2)
end
end,
add_relation2(RelationList, MSnsId, FDTab, Time);
add_relation2([], _MSnsId, _FDTab, _Time) ->
ok.
add_notinited([FSnsId|NotInited], MSnsId, RNTab) ->
case ever_db:dirty_read(RNTab, FSnsId) of
[] ->
ever_db:dirty_write(RNTab, #t_rn{snsId = FSnsId, relationList = [MSnsId]});
[#t_rn{relationList = RelationList} = RNRec] ->
RNRec2 = RNRec#t_rn{relationList = [MSnsId|lists:delete(MSnsId, RelationList)]},
ever_db:dirty_write(RNTab, RNRec2)
end,
add_notinited(NotInited, MSnsId, RNTab);
add_notinited([], _MSnsId, _RNTab) ->
ok.
merge_fdl([H|T], List) ->
case lists:keymember(H#t_fdl.snsId, #t_fdl.snsId, List) of
true ->
merge_fdl(T, List);
false ->
merge_fdl(T, [H|List])
end;
merge_fdl([], List) ->
List.
add_inited(Inited, SnsId, Tab, FDTab, FWTab) ->
Time = ever_time:now(),
FriendList = add_inited2(Inited, SnsId, Tab, FWTab, Time, []),
[#t_fd{friendList = FriendList2} = FDRec] = ever_db:dirty_read(FDTab, SnsId),
FriendList3 = merge_fdl(FriendList2, FriendList),
ever_db:dirty_write(FDTab, FDRec#t_fd{friendList = FriendList3}),
ok.
add_inited2([FSnsId|Inited], MSnsId, Tab, FWTab, Time, FriendList) ->
case ever_db:dirty_read(Tab, FSnsId) of
[#t{data = Data}] ->
Friend = #t_fdl{snsId = FSnsId, data = Data, time = Time},
FriendList2 = [Friend|FriendList];
_ ->
FriendList2 = FriendList
end,
case ever_db:dirty_read(FWTab, FSnsId) of
[#t_fw{followList = FollowList} = FWRec] ->
FollowList2 = [MSnsId|lists:delete(MSnsId, FollowList)],
ever_db:dirty_write(FWTab, FWRec#t_fw{followList = FollowList2});
_ ->
ignore
end,
add_inited2(Inited, MSnsId, Tab, FWTab, Time, FriendList2);
add_inited2([], _MSnsId, _Tab, _FWTab, _Time, FriendList) ->
FriendList.
split_friends([SnsId|Friends], Tab, Inited, NotInited) ->
case ever_db:dirty_read(Tab, SnsId) of
[] ->
split_friends(Friends, Tab, Inited, [SnsId|NotInited]);
_ ->
split_friends(Friends, Tab, [SnsId|Inited], NotInited)
end;
split_friends([], _Tab, Inited, NotInited) ->
[Inited, NotInited].
check_field_userdata(Data, Req) ->
case proplists:get_value(?FIELD_USERDATA, Data) of
undefined ->
UserData = undefined,
abort(Req, ?RES_ERROR_FIELD);
UserData ->
ok
end,
UserData.
check_field_cmd(Data, Req, CmdList) ->
case proplists:get_value(?FIELD_CMD, Data) of
undefined ->
Cmd = undeifned,
abort(Req, ?RES_ERROR_FIELD);
Cmd ->
case lists:member(Cmd, CmdList) of
true ->
ok;
false ->
abort(Req, ?RES_ERROR_SNSTYPE)
end
end,
Cmd.
check_field_friendlist(Data, Req) ->
case proplists:get_value(?FIELD_FRIENDLIST, Data) of
FriendList when is_list(FriendList) ->
ok;
undefined ->
FriendList = [],
abort(Req, ?RES_ERROR_FIELD)
end,
FriendList.
check_field_snsid(Data, Req) ->
case proplists:get_value(?FIELD_SNSID, Data) of
undefined ->
SnsId = undeifned,
abort(Req, ?RES_ERROR_FIELD);
SnsId ->
ok
end,
SnsId.
check_field_snstype(Data, Req) ->
case proplists:get_value(?FIELD_SNSTYPE, Data) of
undefined ->
SnsType = undeifned,
abort(Req, ?RES_ERROR_FIELD);
SnsType ->
case lists:member(SnsType, ?SNSTYPE_LIST) of
true ->
ok;
false ->
abort(Req, ?RES_ERROR_SNSTYPE)
end
end,
SnsType.
check_field_time(Data, _Req) ->
case proplists:get_value(?FIELD_TIME, Data) of
Time when is_integer(Time) ->
ok;
_ ->
Time = 0
end,
Time.
|
bff2129a5dc9155de0fe3919ee78abfbc5f53ef5dd8b2b5bc5d42fda9b5d74d3 | art-w/unicorn | algebra.ml | open Optic
open Type
type 'a t = 'a Type.t
let empty : type a. a t = W ((), (), Eq.unit, fun x -> x, Dag.empty ())
let ( & ) (W (c0, s0, _, w0)) (W (c1, s1, _, w1)) =
W
( (None, c0, c1)
, (s0, s1)
, Eq.create ()
, fun ((x, (s0, s1), (cache, c0, c1)) as input) ->
match cache with
| Some (x', s0', s1', img) when x == x' && s0 == s0' && s1 == s1' -> input, img
| _ ->
let (x, s0, c0), img0 = w0 (x, s0, c0) in
let (x, s1, c1), img1 = w1 (x, s1, c1) in
let img = Dag.seq img0 img1 in
let cache = Some (x, s0, s1, img) in
(x, (s0, s1), (cache, c0, c1)), img )
let iso iso (W (c, s, _, w)) =
let iso' =
{ Iso.ltor = (fun (x, y) -> Iso.ltor iso x, y)
; rtol = (fun (x, y) -> Iso.rtol iso x, y)
}
in
let eq_iso = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
let y = Optic.Iso.ltor iso x in
let (y, s, c), img = w (y, s, c) in
let x = Optic.Iso.rtol iso y in
let img = Dag.iso eq_iso iso' img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img )
let on lens (W (c, s, _, w)) =
let eq_lens = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
let y = Optic.Lens.get lens x in
let (y, s, c), img = w (y, s, c) in
let x = Optic.Lens.put lens y x in
let img = Dag.on eq_lens lens img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img )
let case prism (W (c, s0, _, w)) =
let eq_prism = Eq.create () in
W
( (None, c)
, s0
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
(match Optic.Prism.extract prism x with
| None ->
let img = Dag.empty () in
let cache = Some (x, s, img) in
(x, s0, (cache, c)), img
| Some y ->
let (y, s, c), img = w (y, s, c) in
let x = Optic.Prism.make prism y in
let img = Dag.into eq_prism prism img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img) )
let into prism (W (c, s, _, w)) =
let eq_prism = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
(match Optic.Prism.extract prism x with
| None ->
let img = Dag.empty () in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img
| Some y ->
let (y, s, c), img = w (y, s, c) in
let x = Optic.Prism.make prism y in
let img = Dag.into eq_prism prism img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img) )
let cond predicate w = into (Prism.satisfy predicate) w
let cond_forget predicate w = case (Prism.satisfy predicate) w
let ifte predicate if_true if_false =
cond predicate if_true & cond (fun x -> not (predicate x)) if_false
let reorder : type a b c. (a * (b * c), (b * a) * c) Optic.iso =
{ Optic.Iso.ltor = (fun (x, (s0, s1)) -> (s0, x), s1)
; rtol = (fun ((s0, x), s1) -> x, (s0, s1))
}
let stateful s0 (W (c, s1, _, w)) =
let eq_iso = Eq.create () in
W
( (None, c)
, (s0, s1)
, Eq.create ()
, fun ((x, (s0, s1), (cache, c)) as input) ->
match cache with
| Some (x', s0', s1', img) when x == x' && s0 == s0' && s1 == s1' -> input, img
| _ ->
let ((s0, x), s1, c), img = w ((s0, x), s1, c) in
let img = Dag.iso eq_iso reorder img in
let cache = Some (x, s0, s1, img) in
(x, (s0, s1), (cache, c)), img )
let dynamic : type a. (a t * a) t =
W
( ()
, ()
, Eq.unit
, fun (wx, (), ()) ->
let W (c, s, seq, w), x = wx in
let (x, s, c), img = w (x, s, c) in
let wx = W (c, s, seq, w), x in
let img = Dag.dynamic seq img in
(wx, (), ()), img )
(********************************************************************************)
let ( <*> ) a b = on Lens.fst a & on Lens.snd b
let initialize : type a b. (a -> b) -> (b option * a, b * a) Optic.iso =
fun fn ->
{ Optic.Iso.ltor =
(fun (s0, x) ->
let s0 =
match s0 with
| None -> fn x
| Some s0 -> s0
in
s0, x)
; rtol = (fun (s0, x) -> Some s0, x)
}
let stateful_by fn w = stateful None (iso (initialize fn) w)
let of_lazy w = stateful_by (fun _ -> Lazy.force w) dynamic
let apply f x = of_lazy (lazy (f x))
let fix fn =
let rec self = lazy (fn (of_lazy self)) in
Lazy.force self
let of_list ws = List.fold_left ( & ) empty ws
let list w = fix (fun lst -> into Prism.cons (w <*> lst))
| null | https://raw.githubusercontent.com/art-w/unicorn/efdc6b0848af8a6bb718aeb95f6d87e8b05e38a6/jsoo/algebra.ml | ocaml | ****************************************************************************** | open Optic
open Type
type 'a t = 'a Type.t
let empty : type a. a t = W ((), (), Eq.unit, fun x -> x, Dag.empty ())
let ( & ) (W (c0, s0, _, w0)) (W (c1, s1, _, w1)) =
W
( (None, c0, c1)
, (s0, s1)
, Eq.create ()
, fun ((x, (s0, s1), (cache, c0, c1)) as input) ->
match cache with
| Some (x', s0', s1', img) when x == x' && s0 == s0' && s1 == s1' -> input, img
| _ ->
let (x, s0, c0), img0 = w0 (x, s0, c0) in
let (x, s1, c1), img1 = w1 (x, s1, c1) in
let img = Dag.seq img0 img1 in
let cache = Some (x, s0, s1, img) in
(x, (s0, s1), (cache, c0, c1)), img )
let iso iso (W (c, s, _, w)) =
let iso' =
{ Iso.ltor = (fun (x, y) -> Iso.ltor iso x, y)
; rtol = (fun (x, y) -> Iso.rtol iso x, y)
}
in
let eq_iso = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
let y = Optic.Iso.ltor iso x in
let (y, s, c), img = w (y, s, c) in
let x = Optic.Iso.rtol iso y in
let img = Dag.iso eq_iso iso' img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img )
let on lens (W (c, s, _, w)) =
let eq_lens = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
let y = Optic.Lens.get lens x in
let (y, s, c), img = w (y, s, c) in
let x = Optic.Lens.put lens y x in
let img = Dag.on eq_lens lens img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img )
let case prism (W (c, s0, _, w)) =
let eq_prism = Eq.create () in
W
( (None, c)
, s0
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
(match Optic.Prism.extract prism x with
| None ->
let img = Dag.empty () in
let cache = Some (x, s, img) in
(x, s0, (cache, c)), img
| Some y ->
let (y, s, c), img = w (y, s, c) in
let x = Optic.Prism.make prism y in
let img = Dag.into eq_prism prism img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img) )
let into prism (W (c, s, _, w)) =
let eq_prism = Eq.create () in
W
( (None, c)
, s
, Eq.create ()
, fun ((x, s, (cache, c)) as input) ->
match cache with
| Some (x', s', img) when x == x' && s == s' -> input, img
| _ ->
(match Optic.Prism.extract prism x with
| None ->
let img = Dag.empty () in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img
| Some y ->
let (y, s, c), img = w (y, s, c) in
let x = Optic.Prism.make prism y in
let img = Dag.into eq_prism prism img in
let cache = Some (x, s, img) in
(x, s, (cache, c)), img) )
let cond predicate w = into (Prism.satisfy predicate) w
let cond_forget predicate w = case (Prism.satisfy predicate) w
let ifte predicate if_true if_false =
cond predicate if_true & cond (fun x -> not (predicate x)) if_false
let reorder : type a b c. (a * (b * c), (b * a) * c) Optic.iso =
{ Optic.Iso.ltor = (fun (x, (s0, s1)) -> (s0, x), s1)
; rtol = (fun ((s0, x), s1) -> x, (s0, s1))
}
let stateful s0 (W (c, s1, _, w)) =
let eq_iso = Eq.create () in
W
( (None, c)
, (s0, s1)
, Eq.create ()
, fun ((x, (s0, s1), (cache, c)) as input) ->
match cache with
| Some (x', s0', s1', img) when x == x' && s0 == s0' && s1 == s1' -> input, img
| _ ->
let ((s0, x), s1, c), img = w ((s0, x), s1, c) in
let img = Dag.iso eq_iso reorder img in
let cache = Some (x, s0, s1, img) in
(x, (s0, s1), (cache, c)), img )
let dynamic : type a. (a t * a) t =
W
( ()
, ()
, Eq.unit
, fun (wx, (), ()) ->
let W (c, s, seq, w), x = wx in
let (x, s, c), img = w (x, s, c) in
let wx = W (c, s, seq, w), x in
let img = Dag.dynamic seq img in
(wx, (), ()), img )
let ( <*> ) a b = on Lens.fst a & on Lens.snd b
let initialize : type a b. (a -> b) -> (b option * a, b * a) Optic.iso =
fun fn ->
{ Optic.Iso.ltor =
(fun (s0, x) ->
let s0 =
match s0 with
| None -> fn x
| Some s0 -> s0
in
s0, x)
; rtol = (fun (s0, x) -> Some s0, x)
}
let stateful_by fn w = stateful None (iso (initialize fn) w)
let of_lazy w = stateful_by (fun _ -> Lazy.force w) dynamic
let apply f x = of_lazy (lazy (f x))
let fix fn =
let rec self = lazy (fn (of_lazy self)) in
Lazy.force self
let of_list ws = List.fold_left ( & ) empty ws
let list w = fix (fun lst -> into Prism.cons (w <*> lst))
|
5144680f20be8d4efc33cb9ae52b618119debc831038ae8c6fe730121942bc8e | hiroshi-unno/coar | envs.ml | open Core
let cgen_config = ref {
Ast.Rtype.depend_on_func_args = false;
Ast.Rtype.depend_on_unit_args = false;
Ast.Rtype.instantiate_svars_to_int = false;
Ast.Rtype.gen_ref_pred_for_fun_types = false;
Ast.Rtype.gen_type_temp_for_constrs = false;
Ast.Rtype.never_fail = false;
Ast.Rtype.can_fail_only_at_total_apps = false;
Ast.Rtype.can_cause_temp_eff_only_at_total_apps = false;
Ast.Rtype.enable_temp_eff = false
}
let denv = ref (Ast.LogicOld.DTEnv.mk_empty ())
let renv = ref (Ast.Rtype.Env.mk_empty ())
| null | https://raw.githubusercontent.com/hiroshi-unno/coar/90a23a09332c68f380efd4115b3f6fdc825f413d/lib/RCaml/envs.ml | ocaml | open Core
let cgen_config = ref {
Ast.Rtype.depend_on_func_args = false;
Ast.Rtype.depend_on_unit_args = false;
Ast.Rtype.instantiate_svars_to_int = false;
Ast.Rtype.gen_ref_pred_for_fun_types = false;
Ast.Rtype.gen_type_temp_for_constrs = false;
Ast.Rtype.never_fail = false;
Ast.Rtype.can_fail_only_at_total_apps = false;
Ast.Rtype.can_cause_temp_eff_only_at_total_apps = false;
Ast.Rtype.enable_temp_eff = false
}
let denv = ref (Ast.LogicOld.DTEnv.mk_empty ())
let renv = ref (Ast.Rtype.Env.mk_empty ())
|
|
661255ccd3d51f49d9e170ebe19c947c2d5f25a412f7e23228aff620863793d1 | vivid-inc/ash-ra-template | project.clj | (defproject art-sample--simple "0"
Add the - art Leiningen plugin :
:plugins [[net.vivid-inc/lein-art "0.6.1"]]
; Render .art templates
:art {:templates "templates"
:output-dir "target"})
| null | https://raw.githubusercontent.com/vivid-inc/ash-ra-template/f64be7efd6f52ccd451cddb851f02511d1665b11/examples/simple/project.clj | clojure | Render .art templates | (defproject art-sample--simple "0"
Add the - art Leiningen plugin :
:plugins [[net.vivid-inc/lein-art "0.6.1"]]
:art {:templates "templates"
:output-dir "target"})
|
727850a3136670d9495d48a11866acdcca79fb1c6f62bf598e1cf1c864fcae4c | rowangithub/DOrder | typecore.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : typecore.mli 10417 2010 - 05 - 18 16:46:46Z frisch $
(* Type inference for the core language *)
open Asttypes
open Types
open Format
val is_nonexpansive: Typedtree.expression -> bool
val type_binding:
Env.t -> rec_flag ->
(Parsetree.pattern * Parsetree.expression) list ->
Env.annoident option ->
(Typedtree.pattern * Typedtree.expression) list * Env.t
val type_let:
Env.t -> rec_flag ->
(Parsetree.pattern * Parsetree.expression) list ->
Env.annoident option ->
(Typedtree.pattern * Typedtree.expression) list * Env.t
val type_expression:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_class_arg_pattern:
string -> Env.t -> Env.t -> label -> Parsetree.pattern ->
Typedtree.pattern * (Ident.t * Ident.t * type_expr) list *
Env.t * Env.t
val type_self_pattern:
string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern ->
Typedtree.pattern *
(Ident.t * type_expr) Meths.t ref *
(Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr)
Vars.t ref *
Env.t * Env.t * Env.t
val type_expect:
?in_function:(Location.t * type_expr) ->
Env.t -> Parsetree.expression -> type_expr -> Typedtree.expression
val type_exp:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_approx:
Env.t -> Parsetree.expression -> type_expr
val type_argument:
Env.t -> Parsetree.expression -> type_expr -> Typedtree.expression
val option_some: Typedtree.expression -> Typedtree.expression
val option_none: type_expr -> Location.t -> Typedtree.expression
val extract_option_type: Env.t -> type_expr -> type_expr
val iter_pattern: (Typedtree.pattern -> unit) -> Typedtree.pattern -> unit
val reset_delayed_checks: unit -> unit
val force_delayed_checks: unit -> unit
val self_coercion : (Path.t * Location.t list ref) list ref
type error =
Polymorphic_label of Longident.t
| Constructor_arity_mismatch of Longident.t * int * int
| Label_mismatch of Longident.t * (type_expr * type_expr) list
| Pattern_type_clash of (type_expr * type_expr) list
| Multiply_bound_variable of string
| Orpat_vars of Ident.t
| Expr_type_clash of (type_expr * type_expr) list
| Apply_non_function of type_expr
| Apply_wrong_label of label * type_expr
| Label_multiply_defined of Longident.t
| Label_missing of string list
| Label_not_mutable of Longident.t
| Incomplete_format of string
| Bad_conversion of string * int * char
| Undefined_method of type_expr * string
| Undefined_inherited_method of string
| Virtual_class of Longident.t
| Private_type of type_expr
| Private_label of Longident.t * type_expr
| Unbound_instance_variable of string
| Instance_variable_not_mutable of bool * string
| Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list
| Outside_class
| Value_multiply_overridden of string
| Coercion_failure of
type_expr * type_expr * (type_expr * type_expr) list * bool
| Too_many_arguments of bool * type_expr
| Abstract_wrong_label of label * type_expr
| Scoping_let_module of string * type_expr
| Masked_instance_variable of Longident.t
| Not_a_variant_type of Longident.t
| Incoherent_label_order
| Less_general of string * (type_expr * type_expr) list
exception Error of Location.t * error
val report_error: formatter -> error -> unit
Forward declaration , to be filled in by
val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref
(* Forward declaration, to be filled in by Typemod.type_open *)
val type_open: (Env.t -> Location.t -> Longident.t -> Env.t) ref
Forward declaration , to be filled in by Typeclass.class_structure
val type_object:
(Env.t -> Location.t -> Parsetree.class_structure ->
Typedtree.class_structure * class_signature * string list) ref
val create_package_type: Location.t -> Env.t -> Parsetree.package_type -> type_expr
| null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/typing/typecore.mli | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Type inference for the core language
Forward declaration, to be filled in by Typemod.type_open | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : typecore.mli 10417 2010 - 05 - 18 16:46:46Z frisch $
open Asttypes
open Types
open Format
val is_nonexpansive: Typedtree.expression -> bool
val type_binding:
Env.t -> rec_flag ->
(Parsetree.pattern * Parsetree.expression) list ->
Env.annoident option ->
(Typedtree.pattern * Typedtree.expression) list * Env.t
val type_let:
Env.t -> rec_flag ->
(Parsetree.pattern * Parsetree.expression) list ->
Env.annoident option ->
(Typedtree.pattern * Typedtree.expression) list * Env.t
val type_expression:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_class_arg_pattern:
string -> Env.t -> Env.t -> label -> Parsetree.pattern ->
Typedtree.pattern * (Ident.t * Ident.t * type_expr) list *
Env.t * Env.t
val type_self_pattern:
string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern ->
Typedtree.pattern *
(Ident.t * type_expr) Meths.t ref *
(Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr)
Vars.t ref *
Env.t * Env.t * Env.t
val type_expect:
?in_function:(Location.t * type_expr) ->
Env.t -> Parsetree.expression -> type_expr -> Typedtree.expression
val type_exp:
Env.t -> Parsetree.expression -> Typedtree.expression
val type_approx:
Env.t -> Parsetree.expression -> type_expr
val type_argument:
Env.t -> Parsetree.expression -> type_expr -> Typedtree.expression
val option_some: Typedtree.expression -> Typedtree.expression
val option_none: type_expr -> Location.t -> Typedtree.expression
val extract_option_type: Env.t -> type_expr -> type_expr
val iter_pattern: (Typedtree.pattern -> unit) -> Typedtree.pattern -> unit
val reset_delayed_checks: unit -> unit
val force_delayed_checks: unit -> unit
val self_coercion : (Path.t * Location.t list ref) list ref
type error =
Polymorphic_label of Longident.t
| Constructor_arity_mismatch of Longident.t * int * int
| Label_mismatch of Longident.t * (type_expr * type_expr) list
| Pattern_type_clash of (type_expr * type_expr) list
| Multiply_bound_variable of string
| Orpat_vars of Ident.t
| Expr_type_clash of (type_expr * type_expr) list
| Apply_non_function of type_expr
| Apply_wrong_label of label * type_expr
| Label_multiply_defined of Longident.t
| Label_missing of string list
| Label_not_mutable of Longident.t
| Incomplete_format of string
| Bad_conversion of string * int * char
| Undefined_method of type_expr * string
| Undefined_inherited_method of string
| Virtual_class of Longident.t
| Private_type of type_expr
| Private_label of Longident.t * type_expr
| Unbound_instance_variable of string
| Instance_variable_not_mutable of bool * string
| Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list
| Outside_class
| Value_multiply_overridden of string
| Coercion_failure of
type_expr * type_expr * (type_expr * type_expr) list * bool
| Too_many_arguments of bool * type_expr
| Abstract_wrong_label of label * type_expr
| Scoping_let_module of string * type_expr
| Masked_instance_variable of Longident.t
| Not_a_variant_type of Longident.t
| Incoherent_label_order
| Less_general of string * (type_expr * type_expr) list
exception Error of Location.t * error
val report_error: formatter -> error -> unit
Forward declaration , to be filled in by
val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref
val type_open: (Env.t -> Location.t -> Longident.t -> Env.t) ref
Forward declaration , to be filled in by Typeclass.class_structure
val type_object:
(Env.t -> Location.t -> Parsetree.class_structure ->
Typedtree.class_structure * class_signature * string list) ref
val create_package_type: Location.t -> Env.t -> Parsetree.package_type -> type_expr
|
24a1e9b70b3f940e58c47f0048c575c1614a1c8ed083af22926b22d0fd2505eb | dimitri/pgloader | command-fixed.lisp | ;;;
;;; LOAD FIXED COLUMNS FILE
;;;
;;; That has lots in common with CSV, so we share a fair amount of parsing
;;; rules with the CSV case.
;;;
(in-package #:pgloader.parser)
(defrule option-fixed-header (and kw-fixed kw-header)
(:constant (cons :header t)))
(defrule hex-number (and "0x" (+ (hexdigit-char-p character)))
(:lambda (hex)
(bind (((_ digits) hex))
(parse-integer (text digits) :radix 16))))
(defrule dec-number (+ (digit-char-p character))
(:lambda (digits)
(parse-integer (text digits))))
(defrule number (or hex-number dec-number))
(defrule field-start-position (and (? kw-from) ignore-whitespace number)
(:function third))
(defrule fixed-field-length (and (? kw-for) ignore-whitespace number)
(:function third))
(defrule fixed-source-field (and csv-field-name
field-start-position fixed-field-length
csv-field-options)
(:destructure (name start len opts)
`(,name :start ,start :length ,len ,@opts)))
(defrule another-fixed-source-field (and comma fixed-source-field)
(:lambda (source)
(bind (((_ field) source)) field)))
(defrule fixed-source-fields (and fixed-source-field (* another-fixed-source-field))
(:lambda (source)
(destructuring-bind (field1 fields) source
(list* field1 fields))))
(defrule fixed-source-field-list (and open-paren fixed-source-fields close-paren)
(:lambda (source)
(bind (((_ field-defs _) source)) field-defs)))
(defrule fixed-option (or option-on-error-stop
option-on-error-resume-next
option-workers
option-concurrency
option-batch-rows
option-batch-size
option-prefetch-rows
option-max-parallel-create-index
option-truncate
option-drop-indexes
option-disable-triggers
option-identifiers-case
option-skip-header
option-fixed-header))
(defrule fixed-options (and kw-with
(and fixed-option (* (and comma fixed-option))))
(:function flatten-option-list))
(defrule fixed-uri (and "fixed://" filename)
(:lambda (source)
(bind (((_ filename) source))
(make-instance 'fixed-connection :spec filename))))
(defrule fixed-file-source (or stdin
inline
http-uri
fixed-uri
filename-matching
maybe-quoted-filename)
(:lambda (src)
(if (typep src 'fixed-connection) src
(destructuring-bind (type &rest specs) src
(case type
(:stdin (make-instance 'fixed-connection :spec src))
(:inline (make-instance 'fixed-connection :spec src))
(:filename (make-instance 'fixed-connection :spec src))
(:regex (make-instance 'fixed-connection :spec src))
(:http (make-instance 'fixed-connection :uri (first specs))))))))
(defrule fixed-source (and kw-load kw-fixed kw-from fixed-file-source)
(:lambda (src)
(bind (((_ _ _ source) src)) source)))
(defrule load-fixed-cols-file-optional-clauses (* (or fixed-options
gucs
before-load
after-load))
(:lambda (clauses-list)
(alexandria:alist-plist clauses-list)))
(defrule load-fixed-cols-file-command (and fixed-source (? file-encoding)
(? fixed-source-field-list)
target
(? csv-target-table)
(? csv-target-column-list)
load-fixed-cols-file-optional-clauses)
(:lambda (command)
(destructuring-bind (source encoding fields pguri table-name columns clauses)
command
(list* source
encoding
fields
pguri
(or table-name (pgconn-table-name pguri))
columns
clauses))))
(defun lisp-code-for-loading-from-fixed (fixed-conn pg-db-conn
&key
(encoding :utf-8)
fields
target-table-name
columns
gucs before after options
&allow-other-keys
&aux
(worker-count (getf options :worker-count))
(concurrency (getf options :concurrency)))
`(lambda ()
(let* (,@(pgsql-connection-bindings pg-db-conn gucs)
,@(batch-control-bindings options)
,@(identifier-case-binding options)
(source-db (with-stats-collection ("fetch" :section :pre)
(expand (fetch-file ,fixed-conn)))))
(progn
,(sql-code-block pg-db-conn :pre before "before load")
(let ((on-error-stop ,(getf options :on-error-stop))
(truncate ,(getf options :truncate))
(disable-triggers ,(getf options :disable-triggers))
(drop-indexes ,(getf options :drop-indexes))
(max-parallel-create-index ,(getf options :max-parallel-create-index))
(source
(make-instance 'copy-fixed
:target-db ,pg-db-conn
:source source-db
:target (create-table ',target-table-name)
:encoding ,encoding
:fields ',fields
:columns ',columns
:skip-lines ,(or (getf options :skip-lines) 0)
:header ,(getf options :header))))
(copy-database source
,@ (when worker-count
(list :worker-count worker-count))
,@ (when concurrency
(list :concurrency concurrency))
:on-error-stop on-error-stop
:truncate truncate
:drop-indexes drop-indexes
:disable-triggers disable-triggers
:max-parallel-create-index max-parallel-create-index))
,(sql-code-block pg-db-conn :post after "after load")))))
(defrule load-fixed-cols-file load-fixed-cols-file-command
(:lambda (command)
(bind (((source encoding fields pg-db-uri table-name columns
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
(t
(lisp-code-for-loading-from-fixed source pg-db-uri
:encoding encoding
:fields fields
:target-table-name table-name
:columns columns
:gucs gucs
:before before
:after after
:options options))))))
| null | https://raw.githubusercontent.com/dimitri/pgloader/3047c9afe141763e9e7ec05b7f2a6aa97cf06801/src/parsers/command-fixed.lisp | lisp |
LOAD FIXED COLUMNS FILE
That has lots in common with CSV, so we share a fair amount of parsing
rules with the CSV case.
|
(in-package #:pgloader.parser)
(defrule option-fixed-header (and kw-fixed kw-header)
(:constant (cons :header t)))
(defrule hex-number (and "0x" (+ (hexdigit-char-p character)))
(:lambda (hex)
(bind (((_ digits) hex))
(parse-integer (text digits) :radix 16))))
(defrule dec-number (+ (digit-char-p character))
(:lambda (digits)
(parse-integer (text digits))))
(defrule number (or hex-number dec-number))
(defrule field-start-position (and (? kw-from) ignore-whitespace number)
(:function third))
(defrule fixed-field-length (and (? kw-for) ignore-whitespace number)
(:function third))
(defrule fixed-source-field (and csv-field-name
field-start-position fixed-field-length
csv-field-options)
(:destructure (name start len opts)
`(,name :start ,start :length ,len ,@opts)))
(defrule another-fixed-source-field (and comma fixed-source-field)
(:lambda (source)
(bind (((_ field) source)) field)))
(defrule fixed-source-fields (and fixed-source-field (* another-fixed-source-field))
(:lambda (source)
(destructuring-bind (field1 fields) source
(list* field1 fields))))
(defrule fixed-source-field-list (and open-paren fixed-source-fields close-paren)
(:lambda (source)
(bind (((_ field-defs _) source)) field-defs)))
(defrule fixed-option (or option-on-error-stop
option-on-error-resume-next
option-workers
option-concurrency
option-batch-rows
option-batch-size
option-prefetch-rows
option-max-parallel-create-index
option-truncate
option-drop-indexes
option-disable-triggers
option-identifiers-case
option-skip-header
option-fixed-header))
(defrule fixed-options (and kw-with
(and fixed-option (* (and comma fixed-option))))
(:function flatten-option-list))
(defrule fixed-uri (and "fixed://" filename)
(:lambda (source)
(bind (((_ filename) source))
(make-instance 'fixed-connection :spec filename))))
(defrule fixed-file-source (or stdin
inline
http-uri
fixed-uri
filename-matching
maybe-quoted-filename)
(:lambda (src)
(if (typep src 'fixed-connection) src
(destructuring-bind (type &rest specs) src
(case type
(:stdin (make-instance 'fixed-connection :spec src))
(:inline (make-instance 'fixed-connection :spec src))
(:filename (make-instance 'fixed-connection :spec src))
(:regex (make-instance 'fixed-connection :spec src))
(:http (make-instance 'fixed-connection :uri (first specs))))))))
(defrule fixed-source (and kw-load kw-fixed kw-from fixed-file-source)
(:lambda (src)
(bind (((_ _ _ source) src)) source)))
(defrule load-fixed-cols-file-optional-clauses (* (or fixed-options
gucs
before-load
after-load))
(:lambda (clauses-list)
(alexandria:alist-plist clauses-list)))
(defrule load-fixed-cols-file-command (and fixed-source (? file-encoding)
(? fixed-source-field-list)
target
(? csv-target-table)
(? csv-target-column-list)
load-fixed-cols-file-optional-clauses)
(:lambda (command)
(destructuring-bind (source encoding fields pguri table-name columns clauses)
command
(list* source
encoding
fields
pguri
(or table-name (pgconn-table-name pguri))
columns
clauses))))
(defun lisp-code-for-loading-from-fixed (fixed-conn pg-db-conn
&key
(encoding :utf-8)
fields
target-table-name
columns
gucs before after options
&allow-other-keys
&aux
(worker-count (getf options :worker-count))
(concurrency (getf options :concurrency)))
`(lambda ()
(let* (,@(pgsql-connection-bindings pg-db-conn gucs)
,@(batch-control-bindings options)
,@(identifier-case-binding options)
(source-db (with-stats-collection ("fetch" :section :pre)
(expand (fetch-file ,fixed-conn)))))
(progn
,(sql-code-block pg-db-conn :pre before "before load")
(let ((on-error-stop ,(getf options :on-error-stop))
(truncate ,(getf options :truncate))
(disable-triggers ,(getf options :disable-triggers))
(drop-indexes ,(getf options :drop-indexes))
(max-parallel-create-index ,(getf options :max-parallel-create-index))
(source
(make-instance 'copy-fixed
:target-db ,pg-db-conn
:source source-db
:target (create-table ',target-table-name)
:encoding ,encoding
:fields ',fields
:columns ',columns
:skip-lines ,(or (getf options :skip-lines) 0)
:header ,(getf options :header))))
(copy-database source
,@ (when worker-count
(list :worker-count worker-count))
,@ (when concurrency
(list :concurrency concurrency))
:on-error-stop on-error-stop
:truncate truncate
:drop-indexes drop-indexes
:disable-triggers disable-triggers
:max-parallel-create-index max-parallel-create-index))
,(sql-code-block pg-db-conn :post after "after load")))))
(defrule load-fixed-cols-file load-fixed-cols-file-command
(:lambda (command)
(bind (((source encoding fields pg-db-uri table-name columns
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
(t
(lisp-code-for-loading-from-fixed source pg-db-uri
:encoding encoding
:fields fields
:target-table-name table-name
:columns columns
:gucs gucs
:before before
:after after
:options options))))))
|
0632e90d838fe593c2a1b73f156d3e0376b0e08c4d3d00cf83ef2abf7c59746b | serokell/haskell-with-utf8 | Utf8.hs | SPDX - FileCopyrightText : 2020 >
-
- SPDX - License - Identifier : MPL-2.0
-
- SPDX-License-Identifier: MPL-2.0
-}
-- | "Data.Text.Lazy.IO" for the modern world.
--
-- Wrappers around simple file reading/writing functions from the
@text@ package that reset the handle encoding to UTF-8 .
module Data.Text.Lazy.IO.Utf8
( readFile
, writeFile
) where
import Prelude hiding (readFile, writeFile)
import Control.Exception.Safe (MonadMask)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy.IO as T
import qualified System.IO as IO
import qualified System.IO.Utf8 as Utf8
| Like @readFile@ , but assumes the file is encoded in UTF-8 , regardless
-- of the current locale.
readFile :: MonadIO m => IO.FilePath -> m Text
readFile path = Utf8.openFile path IO.ReadMode >>= liftIO . T.hGetContents
| Like @writeFile@ , but encodes the data in UTF-8 , regardless
-- of the current locale.
writeFile :: (MonadIO m, MonadMask m) => IO.FilePath -> Text -> m ()
writeFile path = Utf8.withFile path IO.WriteMode . (liftIO .) . flip T.hPutStr
| null | https://raw.githubusercontent.com/serokell/haskell-with-utf8/63b26842bec2da71f4b822d2dff14640e63a63e5/lib/Data/Text/Lazy/IO/Utf8.hs | haskell | | "Data.Text.Lazy.IO" for the modern world.
Wrappers around simple file reading/writing functions from the
of the current locale.
of the current locale. | SPDX - FileCopyrightText : 2020 >
-
- SPDX - License - Identifier : MPL-2.0
-
- SPDX-License-Identifier: MPL-2.0
-}
@text@ package that reset the handle encoding to UTF-8 .
module Data.Text.Lazy.IO.Utf8
( readFile
, writeFile
) where
import Prelude hiding (readFile, writeFile)
import Control.Exception.Safe (MonadMask)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy.IO as T
import qualified System.IO as IO
import qualified System.IO.Utf8 as Utf8
| Like @readFile@ , but assumes the file is encoded in UTF-8 , regardless
readFile :: MonadIO m => IO.FilePath -> m Text
readFile path = Utf8.openFile path IO.ReadMode >>= liftIO . T.hGetContents
| Like @writeFile@ , but encodes the data in UTF-8 , regardless
writeFile :: (MonadIO m, MonadMask m) => IO.FilePath -> Text -> m ()
writeFile path = Utf8.withFile path IO.WriteMode . (liftIO .) . flip T.hPutStr
|
3ad905557c5d3fb043423c875509a4beb33057512447b5387eba3ff2f1073df7 | meamy/feynman | Swaps.hs | module Feynman.Optimization.Swaps (pushSwaps) where
import Data.Map (Map, (!))
import qualified Data.Map as Map
import Feynman.Core
-- Permutations on /a/
data Permutation a = Permutation !(Map a a) !(Map a a) deriving (Eq, Ord, Show)
identity :: Permutation a
identity = Permutation Map.empty Map.empty
fLookup :: Ord a => a -> Permutation a -> a
fLookup x (Permutation fperm _) = Map.findWithDefault x x fperm
bLookup :: Ord a => a -> Permutation a -> a
bLookup x (Permutation _ bperm) = Map.findWithDefault x x bperm
swap :: Ord a => a -> a -> Permutation a -> Permutation a
swap x y (Permutation fperm bperm) = Permutation fperm' bperm' where
x0 = Map.findWithDefault x x bperm
y0 = Map.findWithDefault y y bperm
fperm' = Map.insert x0 y (Map.insert y0 x fperm)
bperm' = Map.insert x y0 (Map.insert y x0 bperm)
unSwap :: Ord a => a -> a -> Permutation a -> Permutation a
unSwap x y (Permutation fperm bperm) = Permutation fperm' bperm' where
x' = Map.findWithDefault x x fperm
y' = Map.findWithDefault y y fperm
fperm' = Map.insert x y' (Map.insert y x' fperm)
bperm' = Map.insert x' y (Map.insert y' x bperm)
support :: Ord a => Permutation a -> [a]
support (Permutation fperm _) = Map.keys fperm
toSwaps :: Permutation ID -> [Primitive]
toSwaps perm = go perm (support perm) where
go perm [] = []
go perm (x:xs)
| bLookup x perm == x = go perm xs
| otherwise = (Swap x y):(go (unSwap x y perm) xs) where
y = bLookup x perm
-- Hoist swaps out of code. Useful mainly so that T-par doesn't
-- have to worry about clever orderings itself
pushSwaps :: [Primitive] -> [Primitive]
pushSwaps = go identity where
go perm [] = toSwaps perm
go perm (gate:circ) = case gate of
Swap x y -> go (swap x y perm) circ
_ -> gate':(go perm circ) where
gate' = substGate (flip bLookup $ perm) gate
| null | https://raw.githubusercontent.com/meamy/feynman/6487c3e90b3c3a56e3b309436663d8bf4cbf4422/src/Feynman/Optimization/Swaps.hs | haskell | Permutations on /a/
Hoist swaps out of code. Useful mainly so that T-par doesn't
have to worry about clever orderings itself | module Feynman.Optimization.Swaps (pushSwaps) where
import Data.Map (Map, (!))
import qualified Data.Map as Map
import Feynman.Core
data Permutation a = Permutation !(Map a a) !(Map a a) deriving (Eq, Ord, Show)
identity :: Permutation a
identity = Permutation Map.empty Map.empty
fLookup :: Ord a => a -> Permutation a -> a
fLookup x (Permutation fperm _) = Map.findWithDefault x x fperm
bLookup :: Ord a => a -> Permutation a -> a
bLookup x (Permutation _ bperm) = Map.findWithDefault x x bperm
swap :: Ord a => a -> a -> Permutation a -> Permutation a
swap x y (Permutation fperm bperm) = Permutation fperm' bperm' where
x0 = Map.findWithDefault x x bperm
y0 = Map.findWithDefault y y bperm
fperm' = Map.insert x0 y (Map.insert y0 x fperm)
bperm' = Map.insert x y0 (Map.insert y x0 bperm)
unSwap :: Ord a => a -> a -> Permutation a -> Permutation a
unSwap x y (Permutation fperm bperm) = Permutation fperm' bperm' where
x' = Map.findWithDefault x x fperm
y' = Map.findWithDefault y y fperm
fperm' = Map.insert x y' (Map.insert y x' fperm)
bperm' = Map.insert x' y (Map.insert y' x bperm)
support :: Ord a => Permutation a -> [a]
support (Permutation fperm _) = Map.keys fperm
toSwaps :: Permutation ID -> [Primitive]
toSwaps perm = go perm (support perm) where
go perm [] = []
go perm (x:xs)
| bLookup x perm == x = go perm xs
| otherwise = (Swap x y):(go (unSwap x y perm) xs) where
y = bLookup x perm
pushSwaps :: [Primitive] -> [Primitive]
pushSwaps = go identity where
go perm [] = toSwaps perm
go perm (gate:circ) = case gate of
Swap x y -> go (swap x y perm) circ
_ -> gate':(go perm circ) where
gate' = substGate (flip bLookup $ perm) gate
|
66bee0d2ca74bf0fd3e78b3104dfb7b9adf88712388086fcb58b1774b00271b5 | rabbitmq/ra-kv-store | util.clj | (ns jepsen.util
"Kitchen sink"
(:require [clojure.tools.logging :refer [info]]
[clojure.core.reducers :as r]
[clojure.string :as str]
[clojure.pprint :refer [pprint]]
[clojure.walk :as walk]
[clojure.java.io :as io]
[clj-time.core :as time]
[clj-time.local :as time.local]
[clojure.tools.logging :refer [debug info warn]]
[dom-top.core :refer [bounded-future]]
[knossos.history :as history])
(:import (java.util.concurrent.locks LockSupport)
(java.util.concurrent ExecutionException)
(java.io File
RandomAccessFile)))
(defn exception?
"Is x an Exception?"
[x]
(instance? Exception x))
(defn fcatch
"Takes a function and returns a version of it which returns, rather than
throws, exceptions."
[f]
(fn wrapper [& args]
(try (apply f args)
(catch Exception e e))))
(defn random-nonempty-subset
"A randomly selected, randomly ordered, non-empty subset of the given
collection."
[nodes]
(take (inc (rand-int (count nodes))) (shuffle nodes)))
(defn name+
"Tries name, falls back to pr-str."
[x]
(if (instance? clojure.lang.Named x)
(name x))
(pr-str x))
(defn real-pmap
"Like pmap, but launches futures instead of using a bounded threadpool."
[f coll]
(->> coll
(map (fn launcher [x] (future (f x))))
doall
(map deref)))
(defn processors
"How many processors on this platform?"
[]
(.. Runtime getRuntime availableProcessors))
(defn majority
"Given a number, returns the smallest integer strictly greater than half."
[n]
(inc (int (Math/floor (/ n 2)))))
(defn min-by
"Finds the minimum element of a collection based on some (f element), which
returns Comparables. If `coll` is empty, returns nil."
[f coll]
(when (seq coll)
(reduce (fn [m e]
(if (pos? (compare (f m) (f e)))
e
m))
coll)))
(defn max-by
"Finds the maximum element of a collection based on some (f element), which
returns Comparables. If `coll` is empty, returns nil."
[f coll]
(when (seq coll)
(reduce (fn [m e]
(if (neg? (compare (f m) (f e)))
e
m))
coll)))
(defn fast-last
"Like last, but O(1) on counted collections."
[coll]
(nth coll (dec (count coll))))
(defn rand-nth-empty
"Like rand-nth, but returns nil if the collection is empty."
[coll]
(try (rand-nth coll)
(catch IndexOutOfBoundsException e nil)))
(defn fraction
"a/b, but if b is zero, returns unity."
[a b]
(if (zero? b)
1
(/ a b)))
(defn inc*
"Like inc, but (inc nil) => 1."
[x]
(if (nil? x)
1
(inc x)))
(defn local-time
"Drops millisecond resolution"
[]
(let [t (time.local/local-now)]
(time/minus t (time/millis (time/milli t)))))
(defn chunk-vec
"Partitions a vector into reducibles of size n (somewhat like partition-all)
but uses subvec for speed.
= > ( [ 1 ] )
= > ( [ 1 2 ] [ 3 ] ) "
([^long n v]
(let [c (count v)]
(->> (range 0 c n)
(map #(subvec v % (min c (+ % n))))))))
(def buf-size 1048576)
(defn concat-files!
"Appends contents of all fs, writing to out. Returns fs."
[out fs]
(with-open [oc (.getChannel (RandomAccessFile. (io/file out) "rw"))]
(doseq [f fs]
(with-open [fc (.getChannel (RandomAccessFile. (io/file f) "r"))]
(let [size (.size fc)]
(loop [position 0]
(when (< position size)
(recur (+ position (.transferTo fc
position
(min (- size position)
buf-size)
oc)))))))))
fs)
(defn op->str
"Format an operation as a string."
[op]
(str (:process op) \tab
(:type op) \tab
(pr-str (:f op)) \tab
(pr-str (:value op))
(when-let [err (:error op)]
(str \tab err))))
(defn prn-op
"Prints an operation to the console."
[op]
(pr (:process op)) (print \tab)
(pr (:type op)) (print \tab)
(pr (:f op)) (print \tab)
(pr (:value op))
(when-let [err (:error op)]
(print \tab) (print err))
(print \newline))
(defn print-history
"Prints a history to the console."
([history]
(print-history prn-op history))
([printer history]
(doseq [op history]
(printer op))))
(defn write-history!
"Writes a history to a file."
([f history]
(write-history! f prn-op history))
([f printer history]
(with-open [w (io/writer f)]
(binding [*out* w]
(print-history printer history)))))
(defn pwrite-history!
"Writes history, taking advantage of more cores."
([f history]
(pwrite-history! f prn-op history))
([f printer history]
(if (or (< (count history) 16384) (not (vector? history)))
Plain old write
(write-history! f printer history)
Parallel variant
(let [chunks (chunk-vec (Math/ceil (/ (count history) (processors)))
history)
files (repeatedly (count chunks)
#(File/createTempFile "jepsen-history" ".part"))]
(try
(->> chunks
(map (fn [file chunk]
(bounded-future (write-history! file printer chunk) file))
files)
doall
(map deref)
(concat-files! f))
(finally
(doseq [f files] (.delete ^File f))))))))
(defn log-op
"Logs an operation and returns it."
[op]
(info (op->str op))
op)
(def logger (agent nil))
(defn log-print
[_ & things]
(apply println things))
(defn log
[& things]
(apply send-off logger log-print things))
( defn all - loggers [ ]
; (->> (org.apache.log4j.LogManager/getCurrentLoggers)
; (java.util.Collections/list)
; (cons (org.apache.log4j.LogManager/getRootLogger))))
(defn all-jdk-loggers []
(let [manager (java.util.logging.LogManager/getLogManager)]
(->> manager
.getLoggerNames
java.util.Collections/list
(map #(.getLogger manager %)))))
(defmacro mute-jdk [& body]
`(let [loggers# (all-jdk-loggers)
levels# (map #(.getLevel %) loggers#)]
(try
(doseq [l# loggers#]
(.setLevel l# java.util.logging.Level/OFF))
~@body
(finally
(dorun (map (fn [logger# level#] (.setLevel logger# level#))
loggers#
levels#))))))
( [ & body ]
; `(let [loggers# (all-loggers)
levels # ( map # ( .getLevel % ) loggers # ) ]
; (try
; (doseq [l# loggers#]
( .setLevel l # org.apache.log4j . Level / OFF ) )
; ~@body
; (finally
( dorun ( map ( fn [ logger # level # ] ( .setLevel logger # level # ) )
; loggers#
; levels#))))))
(defmacro mute [& body]
`(mute-jdk
; (mute-log4j
~@body));)
(defn ms->nanos [ms] (* ms 1000000))
(defn nanos->ms [nanos] (/ nanos 1000000))
(defn secs->nanos [s] (* s 1e9))
(defn nanos->secs [nanos] (/ nanos 1e9))
(defn ^Long linear-time-nanos
"A linear time source in nanoseconds."
[]
(System/nanoTime))
(def ^:dynamic ^Long *relative-time-origin*
"A reference point for measuring time in a test run.")
(defmacro with-relative-time
"Binds *relative-time-origin* at the start of body."
[& body]
`(binding [*relative-time-origin* (linear-time-nanos)]
~@body))
(defn relative-time-nanos
"Time in nanoseconds since *relative-time-origin*"
[]
(- (linear-time-nanos) *relative-time-origin*))
(defn sleep
"High-resolution sleep; takes a (possibly fractional) time in ms."
[dt]
(let [t (+ (long (ms->nanos dt))
(System/nanoTime))]
(while (< (+ (System/nanoTime) 10000) t)
(LockSupport/parkNanos (- t (System/nanoTime))))))
(defmacro time-
[& body]
`(let [t0# (System/nanoTime)]
~@body
(nanos->ms (- (System/nanoTime) t0#))))
(defn pprint-str [x]
(with-out-str (pprint x)))
(defn spy [x]
(info (pprint-str x))
x)
(defmacro timeout
"Times out body after n millis, returning timeout-val."
[millis timeout-val & body]
`(let [worker# (future ~@body)
retval# (try
(deref worker# ~millis ::timeout)
(catch ExecutionException ee#
(throw (.getCause ee#))))]
(if (= retval# ::timeout)
(do (future-cancel worker#)
~timeout-val)
retval#)))
(defmacro retry
"Evals body repeatedly until it doesn't throw, sleeping dt seconds."
[dt & body]
`(loop []
(let [res# (try ~@body
(catch Throwable e#
( warn e # " retrying in " ~dt " seconds " )
::failed))]
(if (= res# ::failed)
(do (Thread/sleep (* ~dt 1000))
(recur))
res#))))
(defrecord Retry [bindings])
(defmacro with-retry
"It's really fucking inconvenient not being able to recur from within (catch)
expressions. This macro wraps its body in a (loop [bindings] (try ...)).
Provides a (retry & new bindings) form which is usable within (catch) blocks:
when this form is returned by the body, the body will be retried with the new
bindings."
[initial-bindings & body]
(assert (vector? initial-bindings))
(assert (even? (count initial-bindings)))
(let [bindings-count (/ (count initial-bindings) 2)
body (walk/prewalk (fn [form]
(if (and (seq? form)
(= 'retry (first form)))
(do (assert (= bindings-count
(count (rest form))))
`(Retry. [~@(rest form)]))
form))
body)
retval (gensym 'retval)]
`(loop [~@initial-bindings]
(let [~retval (try ~@body)]
(if (instance? Retry ~retval)
(recur ~@(->> (range bindings-count)
(map (fn [i] `(nth (.bindings ~retval) ~i)))))
~retval)))))
(deftype Return [value])
(defn letr-rewrite-return
"Rewrites (return x) to (Return. x) in expr. Returns a pair of [changed?
expr], where changed is whether the expression contained a return."
[expr]
(let [return? (atom false)
expr (walk/prewalk
(fn [form]
(if (and (seq? form)
(= 'return (first form)))
(do (assert
(= 2 (count form))
(str (pr-str form) " should have one argument"))
(reset! return? true)
`(Return. ~(second form)))
form))
expr)]
[@return? expr]))
(defn letr-partition-bindings
"Takes a vector of bindings [sym expr, sym' expr, ...]. Returns
binding-groups: a sequence of vectors of bindgs, where the final binding in
each group has an early return. The final group (possibly empty!) contains no
early return."
[bindings]
(->> bindings
(partition 2)
(reduce (fn [groups [sym expr]]
(let [[return? expr] (letr-rewrite-return expr)
groups (assoc groups
(dec (count groups))
(-> (peek groups) (conj sym) (conj expr)))]
(if return?
(do (assert (symbol? sym)
(str (pr-str sym " must be a symbol")))
(conj groups []))
groups)))
[[]])))
(defn letr-let-if
"Takes a sequence of binding groups and a body expression, and emits a let
for the first group, an if statement checking for a return, and recurses;
ending with body."
[groups body]
(assert (pos? (count groups)))
(if (= 1 (count groups))
; Final group with no returns
`(let ~(first groups) ~@body)
Group ending in a return
(let [bindings (first groups)
final-sym (nth bindings (- (count bindings) 2))]
`(let ~bindings
(if (instance? Return ~final-sym)
(.value ~final-sym)
~(letr-let-if (rest groups) body))))))
(defmacro letr
"Let bindings, plus early return.
You want to do some complicated, multi-stage operation assigning lots of
variables--but at different points in the let binding, you need to perform
some conditional check to make sure you can proceed to the next step.
Ordinarily, you'd intersperse let and if statements, like so:
(let [res (network-call)]
(if-not (:ok? res)
:failed-network-call
(let [people (:people (:body res))]
(if (zero? (count people))
:no-people
(let [res2 (network-call-2 people)]
...
This is a linear chain of operations, but we're forced to nest deeply because
we have no early-return construct. In ruby, we might write
res = network_call
return :failed_network_call if not x.ok?
people = res[:body][:people]
return :no-people if people.empty?
res2 = network_call_2 people
...
which reads the same, but requires no nesting thanks to Ruby's early return.
Clojure's single-return is *usually* a boon to understandability, but deep
linear branching usually means something like
- Deep nesting (readability issues)
- Function chaining (lots of arguments for bound variables)
- Throw/catch (awkward exception wrappers)
- Monadic interpreter (slow, indirect)
This macro lets you write:
(letr [res (network-call)
_ (when-not (:ok? res) (return :failed-network-call))
people (:people (:body res))
_ (when (zero? (count people)) (return :no-people))
res2 (network-call-2 people)]
...)
letr works like let, but if (return x) is ever returned from a binding, letr
returns x, and does not evaluate subsequent expressions.
If something other than (return x) is returned from evaluating a binding,
letr binds the corresponding variable as normal. Here, we use _ to indicate
that we're not using the results of (when ...), but this is not mandatory.
You cannot use a destructuring bind for a return expression.
letr is not a *true* early return--(return x) must be a *terminal* expression
for it to work--like (recur). For example,
(letr [x (do (return 2) 1)]
x)
returns 1, not 2, because (return 2) was not the terminal expression.
return only works within letr's bindings, not its body."
[bindings & body]
(assert (vector? bindings))
(assert (even? (count bindings)))
(let [groups (letr-partition-bindings bindings)]
(letr-let-if (letr-partition-bindings bindings) body)))
(defn map-kv
"Takes a function (f [k v]) which returns [k v], and builds a new map by
applying f to every pair."
[f m]
(into {} (r/map f m)))
(defn map-vals
"Maps values in a map."
[f m]
(map-kv (fn [[k v]] [k (f v)]) m))
(defn compare<
"Like <, but works on any comparable objects, not just numbers."
[a b]
(neg? (compare a b)))
(defn poly-compare
"Comparator function for sorting heterogenous collections."
[a b]
(try (compare a b)
(catch java.lang.ClassCastException e
(compare (str (class a)) (str (class b))))))
(defn polysort
"Sort, but on heterogenous collections."
[coll]
(sort poly-compare coll))
(defn integer-interval-set-str
"Takes a set of integers and yields a sorted, compact string representation."
[set]
(if (some nil? set)
(str set)
(let [[runs start end]
(reduce (fn r [[runs start end] cur]
(cond ; Start new run
(nil? start) [runs cur cur]
; Continue run
(= cur (inc end)) [runs start cur]
; Break!
:else [(conj runs [start end]) cur cur]))
[[] nil nil]
(sort set))
runs (if (nil? start) runs (conj runs [start end]))]
(str "#{"
(->> runs
(map (fn m [[start end]]
(if (= start end)
start
(str start ".." end))))
(str/join " "))
"}"))))
(defmacro meh
"Returns, rather than throws, exceptions."
[& body]
`(try ~@body (catch Exception e# e#)))
(defmacro with-thread-name
"Sets the thread name for duration of block."
[thread-name & body]
`(let [old-name# (.. Thread currentThread getName)]
(try
(.. Thread currentThread (setName (name ~thread-name)))
~@body
(finally (.. Thread currentThread (setName old-name#))))))
(defn maybe-number
"Tries reading a string as a long, then double, then string. Passes through
nil. Useful for getting nice values out of stats APIs that just dump a bunch
of heterogenously-typed strings at you."
[s]
(when s
(try (Long/parseLong s)
(catch java.lang.NumberFormatException e
(try (Double/parseDouble s)
(catch java.lang.NumberFormatException e
s))))))
(defn coll
"Wraps non-coll things into singleton lists, and leaves colls as themselves.
Useful when you can take either a single thing or a sequence of things."
[thing-or-things]
(cond (nil? thing-or-things) nil
(coll? thing-or-things) thing-or-things
true (list thing-or-things)))
(defn sequential
"Wraps non-sequential things into singleton lists, and leaves sequential
things or nil as themselves. Useful when you can take either a single thing
or a sequence of things."
[thing-or-things]
(cond (nil? thing-or-things) nil
(sequential? thing-or-things) thing-or-things
true (list thing-or-things)))
(defn history->latencies
"Takes a history--a sequence of operations--and emits the same history but
with every invocation containing two new keys:
:latency the time in nanoseconds it took for the operation to complete.
:completion the next event for that process"
[history]
(let [idx (->> history
(map-indexed (fn [i op] [op i]))
(into {}))]
(->> history
(reduce (fn [[history invokes] op]
(if (= :invoke (:type op))
; New invocation!
[(conj! history op)
(assoc! invokes (:process op)
(dec (count history)))]
(if-let [invoke-idx (get invokes (:process op))]
; We have an invocation for this process
(let [invoke (get history invoke-idx)
; Compute latency
l (- (:time op) (:time invoke))
op (assoc op :latency l)]
[(-> history
(assoc! invoke-idx
(assoc invoke :latency l, :completion op))
(conj! op))
(dissoc! invokes (:process op))])
; We have no invocation for this process
[(conj! history op) invokes])))
[(transient []) (transient {})])
first
persistent!)))
(defn nemesis-intervals
"Given a history where a nemesis goes through :f :start and :f :stop
transitions, constructs a sequence of pairs of :start and :stop ops. Since a
nemesis usually goes :start :start :stop :stop, we construct pairs of the
first and third, then second and fourth events. Where no :stop op is present,
we emit a pair like [start nil]."
[history]
(let [[pairs starts] (->> history
(filter #(= :nemesis (:process %)))
(reduce (fn [[pairs starts] op]
(case (:f op)
:start [pairs (conj starts op)]
:stop [(conj pairs [(peek starts)
op])
(pop starts)]
[pairs starts]))
[[] (clojure.lang.PersistentQueue/EMPTY)]))]
(concat pairs (map vector starts (repeat nil)))))
(defn longest-common-prefix
"Given a collection of sequences, finds the longest sequence which is a
prefix of every sequence given."
[cs]
(when (seq cs)
(reduce (fn prefix [s1 s2]
(let [len (->> (map = s1 s2)
(take-while true?)
count)]
; Avoid unnecessary seq wrapping
(if (= len (count s1))
s1
(take len s2))))
cs)))
(defn drop-common-proper-prefix
"Given a collection of sequences, removes the longest common proper prefix
from each one."
[cs]
(map (partial drop (reduce min
(count (longest-common-prefix cs))
(map (comp dec count) cs)))
cs))
(definterface ILazyAtom
(init []))
(defn lazy-atom
"An atom with lazy state initialization. Calls (f) on first use to provide
the initial value of the atom. Only supports swap/reset/deref. Reset bypasses
lazy initialization. If f throws, behavior is undefined (read: proper
fucked)."
[f]
(let [state ^clojure.lang.Atom (atom ::fresh)]
(reify
ILazyAtom
(init [_]
(let [s @state]
(if-not (identical? s ::fresh)
; Regular old value
s
; Someone must initialize. Everyone form an orderly queue.
(do (locking state
(if (identical? @state ::fresh)
We 're the first .
(reset! state (f))))
; OK, definitely initialized now.
@state))))
clojure.lang.IAtom
(swap [this f]
(.init this)
(.swap state f))
(swap [this f a]
(.init this)
(.swap state f a))
(swap [this f a b]
(.init this)
(.swap state f a b))
(swap [this f a b more]
(.init this)
(.swap state f a b more))
(compareAndSet [this v v']
(.init this)
(.compareAndSet state v v'))
(reset [this v]
(.reset state v))
clojure.lang.IDeref
(deref [this]
(.init this)))))
| null | https://raw.githubusercontent.com/rabbitmq/ra-kv-store/faf36863bb3822ef4dcd99de5635007273d35997/jepsen/jepsen/src/jepsen/util.clj | clojure | (->> (org.apache.log4j.LogManager/getCurrentLoggers)
(java.util.Collections/list)
(cons (org.apache.log4j.LogManager/getRootLogger))))
`(let [loggers# (all-loggers)
(try
(doseq [l# loggers#]
~@body
(finally
loggers#
levels#))))))
(mute-log4j
)
Final group with no returns
Start new run
Continue run
Break!
New invocation!
We have an invocation for this process
Compute latency
We have no invocation for this process
Avoid unnecessary seq wrapping
Regular old value
Someone must initialize. Everyone form an orderly queue.
OK, definitely initialized now. | (ns jepsen.util
"Kitchen sink"
(:require [clojure.tools.logging :refer [info]]
[clojure.core.reducers :as r]
[clojure.string :as str]
[clojure.pprint :refer [pprint]]
[clojure.walk :as walk]
[clojure.java.io :as io]
[clj-time.core :as time]
[clj-time.local :as time.local]
[clojure.tools.logging :refer [debug info warn]]
[dom-top.core :refer [bounded-future]]
[knossos.history :as history])
(:import (java.util.concurrent.locks LockSupport)
(java.util.concurrent ExecutionException)
(java.io File
RandomAccessFile)))
(defn exception?
"Is x an Exception?"
[x]
(instance? Exception x))
(defn fcatch
"Takes a function and returns a version of it which returns, rather than
throws, exceptions."
[f]
(fn wrapper [& args]
(try (apply f args)
(catch Exception e e))))
(defn random-nonempty-subset
"A randomly selected, randomly ordered, non-empty subset of the given
collection."
[nodes]
(take (inc (rand-int (count nodes))) (shuffle nodes)))
(defn name+
"Tries name, falls back to pr-str."
[x]
(if (instance? clojure.lang.Named x)
(name x))
(pr-str x))
(defn real-pmap
"Like pmap, but launches futures instead of using a bounded threadpool."
[f coll]
(->> coll
(map (fn launcher [x] (future (f x))))
doall
(map deref)))
(defn processors
"How many processors on this platform?"
[]
(.. Runtime getRuntime availableProcessors))
(defn majority
"Given a number, returns the smallest integer strictly greater than half."
[n]
(inc (int (Math/floor (/ n 2)))))
(defn min-by
"Finds the minimum element of a collection based on some (f element), which
returns Comparables. If `coll` is empty, returns nil."
[f coll]
(when (seq coll)
(reduce (fn [m e]
(if (pos? (compare (f m) (f e)))
e
m))
coll)))
(defn max-by
"Finds the maximum element of a collection based on some (f element), which
returns Comparables. If `coll` is empty, returns nil."
[f coll]
(when (seq coll)
(reduce (fn [m e]
(if (neg? (compare (f m) (f e)))
e
m))
coll)))
(defn fast-last
"Like last, but O(1) on counted collections."
[coll]
(nth coll (dec (count coll))))
(defn rand-nth-empty
"Like rand-nth, but returns nil if the collection is empty."
[coll]
(try (rand-nth coll)
(catch IndexOutOfBoundsException e nil)))
(defn fraction
"a/b, but if b is zero, returns unity."
[a b]
(if (zero? b)
1
(/ a b)))
(defn inc*
"Like inc, but (inc nil) => 1."
[x]
(if (nil? x)
1
(inc x)))
(defn local-time
"Drops millisecond resolution"
[]
(let [t (time.local/local-now)]
(time/minus t (time/millis (time/milli t)))))
(defn chunk-vec
"Partitions a vector into reducibles of size n (somewhat like partition-all)
but uses subvec for speed.
= > ( [ 1 ] )
= > ( [ 1 2 ] [ 3 ] ) "
([^long n v]
(let [c (count v)]
(->> (range 0 c n)
(map #(subvec v % (min c (+ % n))))))))
(def buf-size 1048576)
(defn concat-files!
"Appends contents of all fs, writing to out. Returns fs."
[out fs]
(with-open [oc (.getChannel (RandomAccessFile. (io/file out) "rw"))]
(doseq [f fs]
(with-open [fc (.getChannel (RandomAccessFile. (io/file f) "r"))]
(let [size (.size fc)]
(loop [position 0]
(when (< position size)
(recur (+ position (.transferTo fc
position
(min (- size position)
buf-size)
oc)))))))))
fs)
(defn op->str
"Format an operation as a string."
[op]
(str (:process op) \tab
(:type op) \tab
(pr-str (:f op)) \tab
(pr-str (:value op))
(when-let [err (:error op)]
(str \tab err))))
(defn prn-op
"Prints an operation to the console."
[op]
(pr (:process op)) (print \tab)
(pr (:type op)) (print \tab)
(pr (:f op)) (print \tab)
(pr (:value op))
(when-let [err (:error op)]
(print \tab) (print err))
(print \newline))
(defn print-history
"Prints a history to the console."
([history]
(print-history prn-op history))
([printer history]
(doseq [op history]
(printer op))))
(defn write-history!
"Writes a history to a file."
([f history]
(write-history! f prn-op history))
([f printer history]
(with-open [w (io/writer f)]
(binding [*out* w]
(print-history printer history)))))
(defn pwrite-history!
"Writes history, taking advantage of more cores."
([f history]
(pwrite-history! f prn-op history))
([f printer history]
(if (or (< (count history) 16384) (not (vector? history)))
Plain old write
(write-history! f printer history)
Parallel variant
(let [chunks (chunk-vec (Math/ceil (/ (count history) (processors)))
history)
files (repeatedly (count chunks)
#(File/createTempFile "jepsen-history" ".part"))]
(try
(->> chunks
(map (fn [file chunk]
(bounded-future (write-history! file printer chunk) file))
files)
doall
(map deref)
(concat-files! f))
(finally
(doseq [f files] (.delete ^File f))))))))
(defn log-op
"Logs an operation and returns it."
[op]
(info (op->str op))
op)
(def logger (agent nil))
(defn log-print
[_ & things]
(apply println things))
(defn log
[& things]
(apply send-off logger log-print things))
( defn all - loggers [ ]
(defn all-jdk-loggers []
(let [manager (java.util.logging.LogManager/getLogManager)]
(->> manager
.getLoggerNames
java.util.Collections/list
(map #(.getLogger manager %)))))
(defmacro mute-jdk [& body]
`(let [loggers# (all-jdk-loggers)
levels# (map #(.getLevel %) loggers#)]
(try
(doseq [l# loggers#]
(.setLevel l# java.util.logging.Level/OFF))
~@body
(finally
(dorun (map (fn [logger# level#] (.setLevel logger# level#))
loggers#
levels#))))))
( [ & body ]
levels # ( map # ( .getLevel % ) loggers # ) ]
( .setLevel l # org.apache.log4j . Level / OFF ) )
( dorun ( map ( fn [ logger # level # ] ( .setLevel logger # level # ) )
(defmacro mute [& body]
`(mute-jdk
(defn ms->nanos [ms] (* ms 1000000))
(defn nanos->ms [nanos] (/ nanos 1000000))
(defn secs->nanos [s] (* s 1e9))
(defn nanos->secs [nanos] (/ nanos 1e9))
(defn ^Long linear-time-nanos
"A linear time source in nanoseconds."
[]
(System/nanoTime))
(def ^:dynamic ^Long *relative-time-origin*
"A reference point for measuring time in a test run.")
(defmacro with-relative-time
"Binds *relative-time-origin* at the start of body."
[& body]
`(binding [*relative-time-origin* (linear-time-nanos)]
~@body))
(defn relative-time-nanos
"Time in nanoseconds since *relative-time-origin*"
[]
(- (linear-time-nanos) *relative-time-origin*))
(defn sleep
"High-resolution sleep; takes a (possibly fractional) time in ms."
[dt]
(let [t (+ (long (ms->nanos dt))
(System/nanoTime))]
(while (< (+ (System/nanoTime) 10000) t)
(LockSupport/parkNanos (- t (System/nanoTime))))))
(defmacro time-
[& body]
`(let [t0# (System/nanoTime)]
~@body
(nanos->ms (- (System/nanoTime) t0#))))
(defn pprint-str [x]
(with-out-str (pprint x)))
(defn spy [x]
(info (pprint-str x))
x)
(defmacro timeout
"Times out body after n millis, returning timeout-val."
[millis timeout-val & body]
`(let [worker# (future ~@body)
retval# (try
(deref worker# ~millis ::timeout)
(catch ExecutionException ee#
(throw (.getCause ee#))))]
(if (= retval# ::timeout)
(do (future-cancel worker#)
~timeout-val)
retval#)))
(defmacro retry
"Evals body repeatedly until it doesn't throw, sleeping dt seconds."
[dt & body]
`(loop []
(let [res# (try ~@body
(catch Throwable e#
( warn e # " retrying in " ~dt " seconds " )
::failed))]
(if (= res# ::failed)
(do (Thread/sleep (* ~dt 1000))
(recur))
res#))))
(defrecord Retry [bindings])
(defmacro with-retry
"It's really fucking inconvenient not being able to recur from within (catch)
expressions. This macro wraps its body in a (loop [bindings] (try ...)).
Provides a (retry & new bindings) form which is usable within (catch) blocks:
when this form is returned by the body, the body will be retried with the new
bindings."
[initial-bindings & body]
(assert (vector? initial-bindings))
(assert (even? (count initial-bindings)))
(let [bindings-count (/ (count initial-bindings) 2)
body (walk/prewalk (fn [form]
(if (and (seq? form)
(= 'retry (first form)))
(do (assert (= bindings-count
(count (rest form))))
`(Retry. [~@(rest form)]))
form))
body)
retval (gensym 'retval)]
`(loop [~@initial-bindings]
(let [~retval (try ~@body)]
(if (instance? Retry ~retval)
(recur ~@(->> (range bindings-count)
(map (fn [i] `(nth (.bindings ~retval) ~i)))))
~retval)))))
(deftype Return [value])
(defn letr-rewrite-return
"Rewrites (return x) to (Return. x) in expr. Returns a pair of [changed?
expr], where changed is whether the expression contained a return."
[expr]
(let [return? (atom false)
expr (walk/prewalk
(fn [form]
(if (and (seq? form)
(= 'return (first form)))
(do (assert
(= 2 (count form))
(str (pr-str form) " should have one argument"))
(reset! return? true)
`(Return. ~(second form)))
form))
expr)]
[@return? expr]))
(defn letr-partition-bindings
"Takes a vector of bindings [sym expr, sym' expr, ...]. Returns
binding-groups: a sequence of vectors of bindgs, where the final binding in
each group has an early return. The final group (possibly empty!) contains no
early return."
[bindings]
(->> bindings
(partition 2)
(reduce (fn [groups [sym expr]]
(let [[return? expr] (letr-rewrite-return expr)
groups (assoc groups
(dec (count groups))
(-> (peek groups) (conj sym) (conj expr)))]
(if return?
(do (assert (symbol? sym)
(str (pr-str sym " must be a symbol")))
(conj groups []))
groups)))
[[]])))
(defn letr-let-if
"Takes a sequence of binding groups and a body expression, and emits a let
ending with body."
[groups body]
(assert (pos? (count groups)))
(if (= 1 (count groups))
`(let ~(first groups) ~@body)
Group ending in a return
(let [bindings (first groups)
final-sym (nth bindings (- (count bindings) 2))]
`(let ~bindings
(if (instance? Return ~final-sym)
(.value ~final-sym)
~(letr-let-if (rest groups) body))))))
(defmacro letr
"Let bindings, plus early return.
You want to do some complicated, multi-stage operation assigning lots of
variables--but at different points in the let binding, you need to perform
some conditional check to make sure you can proceed to the next step.
Ordinarily, you'd intersperse let and if statements, like so:
(let [res (network-call)]
(if-not (:ok? res)
:failed-network-call
(let [people (:people (:body res))]
(if (zero? (count people))
:no-people
(let [res2 (network-call-2 people)]
...
This is a linear chain of operations, but we're forced to nest deeply because
we have no early-return construct. In ruby, we might write
res = network_call
return :failed_network_call if not x.ok?
people = res[:body][:people]
return :no-people if people.empty?
res2 = network_call_2 people
...
which reads the same, but requires no nesting thanks to Ruby's early return.
Clojure's single-return is *usually* a boon to understandability, but deep
linear branching usually means something like
- Deep nesting (readability issues)
- Function chaining (lots of arguments for bound variables)
- Throw/catch (awkward exception wrappers)
- Monadic interpreter (slow, indirect)
This macro lets you write:
(letr [res (network-call)
_ (when-not (:ok? res) (return :failed-network-call))
people (:people (:body res))
_ (when (zero? (count people)) (return :no-people))
res2 (network-call-2 people)]
...)
letr works like let, but if (return x) is ever returned from a binding, letr
returns x, and does not evaluate subsequent expressions.
If something other than (return x) is returned from evaluating a binding,
letr binds the corresponding variable as normal. Here, we use _ to indicate
that we're not using the results of (when ...), but this is not mandatory.
You cannot use a destructuring bind for a return expression.
letr is not a *true* early return--(return x) must be a *terminal* expression
for it to work--like (recur). For example,
(letr [x (do (return 2) 1)]
x)
returns 1, not 2, because (return 2) was not the terminal expression.
return only works within letr's bindings, not its body."
[bindings & body]
(assert (vector? bindings))
(assert (even? (count bindings)))
(let [groups (letr-partition-bindings bindings)]
(letr-let-if (letr-partition-bindings bindings) body)))
(defn map-kv
"Takes a function (f [k v]) which returns [k v], and builds a new map by
applying f to every pair."
[f m]
(into {} (r/map f m)))
(defn map-vals
"Maps values in a map."
[f m]
(map-kv (fn [[k v]] [k (f v)]) m))
(defn compare<
"Like <, but works on any comparable objects, not just numbers."
[a b]
(neg? (compare a b)))
(defn poly-compare
"Comparator function for sorting heterogenous collections."
[a b]
(try (compare a b)
(catch java.lang.ClassCastException e
(compare (str (class a)) (str (class b))))))
(defn polysort
"Sort, but on heterogenous collections."
[coll]
(sort poly-compare coll))
(defn integer-interval-set-str
"Takes a set of integers and yields a sorted, compact string representation."
[set]
(if (some nil? set)
(str set)
(let [[runs start end]
(reduce (fn r [[runs start end] cur]
(nil? start) [runs cur cur]
(= cur (inc end)) [runs start cur]
:else [(conj runs [start end]) cur cur]))
[[] nil nil]
(sort set))
runs (if (nil? start) runs (conj runs [start end]))]
(str "#{"
(->> runs
(map (fn m [[start end]]
(if (= start end)
start
(str start ".." end))))
(str/join " "))
"}"))))
(defmacro meh
"Returns, rather than throws, exceptions."
[& body]
`(try ~@body (catch Exception e# e#)))
(defmacro with-thread-name
"Sets the thread name for duration of block."
[thread-name & body]
`(let [old-name# (.. Thread currentThread getName)]
(try
(.. Thread currentThread (setName (name ~thread-name)))
~@body
(finally (.. Thread currentThread (setName old-name#))))))
(defn maybe-number
"Tries reading a string as a long, then double, then string. Passes through
nil. Useful for getting nice values out of stats APIs that just dump a bunch
of heterogenously-typed strings at you."
[s]
(when s
(try (Long/parseLong s)
(catch java.lang.NumberFormatException e
(try (Double/parseDouble s)
(catch java.lang.NumberFormatException e
s))))))
(defn coll
"Wraps non-coll things into singleton lists, and leaves colls as themselves.
Useful when you can take either a single thing or a sequence of things."
[thing-or-things]
(cond (nil? thing-or-things) nil
(coll? thing-or-things) thing-or-things
true (list thing-or-things)))
(defn sequential
"Wraps non-sequential things into singleton lists, and leaves sequential
things or nil as themselves. Useful when you can take either a single thing
or a sequence of things."
[thing-or-things]
(cond (nil? thing-or-things) nil
(sequential? thing-or-things) thing-or-things
true (list thing-or-things)))
(defn history->latencies
"Takes a history--a sequence of operations--and emits the same history but
with every invocation containing two new keys:
:latency the time in nanoseconds it took for the operation to complete.
:completion the next event for that process"
[history]
(let [idx (->> history
(map-indexed (fn [i op] [op i]))
(into {}))]
(->> history
(reduce (fn [[history invokes] op]
(if (= :invoke (:type op))
[(conj! history op)
(assoc! invokes (:process op)
(dec (count history)))]
(if-let [invoke-idx (get invokes (:process op))]
(let [invoke (get history invoke-idx)
l (- (:time op) (:time invoke))
op (assoc op :latency l)]
[(-> history
(assoc! invoke-idx
(assoc invoke :latency l, :completion op))
(conj! op))
(dissoc! invokes (:process op))])
[(conj! history op) invokes])))
[(transient []) (transient {})])
first
persistent!)))
(defn nemesis-intervals
"Given a history where a nemesis goes through :f :start and :f :stop
transitions, constructs a sequence of pairs of :start and :stop ops. Since a
nemesis usually goes :start :start :stop :stop, we construct pairs of the
first and third, then second and fourth events. Where no :stop op is present,
we emit a pair like [start nil]."
[history]
(let [[pairs starts] (->> history
(filter #(= :nemesis (:process %)))
(reduce (fn [[pairs starts] op]
(case (:f op)
:start [pairs (conj starts op)]
:stop [(conj pairs [(peek starts)
op])
(pop starts)]
[pairs starts]))
[[] (clojure.lang.PersistentQueue/EMPTY)]))]
(concat pairs (map vector starts (repeat nil)))))
(defn longest-common-prefix
"Given a collection of sequences, finds the longest sequence which is a
prefix of every sequence given."
[cs]
(when (seq cs)
(reduce (fn prefix [s1 s2]
(let [len (->> (map = s1 s2)
(take-while true?)
count)]
(if (= len (count s1))
s1
(take len s2))))
cs)))
(defn drop-common-proper-prefix
"Given a collection of sequences, removes the longest common proper prefix
from each one."
[cs]
(map (partial drop (reduce min
(count (longest-common-prefix cs))
(map (comp dec count) cs)))
cs))
(definterface ILazyAtom
(init []))
(defn lazy-atom
"An atom with lazy state initialization. Calls (f) on first use to provide
the initial value of the atom. Only supports swap/reset/deref. Reset bypasses
lazy initialization. If f throws, behavior is undefined (read: proper
fucked)."
[f]
(let [state ^clojure.lang.Atom (atom ::fresh)]
(reify
ILazyAtom
(init [_]
(let [s @state]
(if-not (identical? s ::fresh)
s
(do (locking state
(if (identical? @state ::fresh)
We 're the first .
(reset! state (f))))
@state))))
clojure.lang.IAtom
(swap [this f]
(.init this)
(.swap state f))
(swap [this f a]
(.init this)
(.swap state f a))
(swap [this f a b]
(.init this)
(.swap state f a b))
(swap [this f a b more]
(.init this)
(.swap state f a b more))
(compareAndSet [this v v']
(.init this)
(.compareAndSet state v v'))
(reset [this v]
(.reset state v))
clojure.lang.IDeref
(deref [this]
(.init this)))))
|
2eb3ae3d06bee70ae58c83ea237ccd4e8d326dda2c70c18d5fab03f67e15b4e2 | ldgrp/uptop | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import App
import Auth
import Brick.BChan
import Brick.Main
import qualified Brick.Widgets.List as L
import Control.Concurrent
import Control.Monad
import Data.HashMap.Strict
import qualified Data.Text as T
import qualified Data.Vector as Vec
import Graphics.Vty
import Servant.Client
import System.Environment
import System.Exit
import Types
import Up
import Up.API
import Up.Model.Category
import Up.Model.Token
main :: IO ()
main = do
let buildVty = mkVty defaultConfig
initialVty <- buildVty
-- Read the token environment variable
envToken <- lookupEnv "UP_BANK_TOKEN"
(authEvent, vty) <- interactiveAuth initialVty buildVty envToken
-- Either we have a working token, or the user has exited early.
token <- case authEvent of
Just (ASuccess (AuthInfo tok)) -> pure $ T.unpack tok
Just _ae -> do
shutdown vty
exitSuccess
Nothing -> do
shutdown vty
exitSuccess
env <- mkUpClient (Token token)
requestChan <- newBChan 100
responseChan <- newBChan 100
void $ forkIO $ requestWorker env requestChan responseChan
writeBChan requestChan FetchAccounts
writeBChan requestChan FetchCategories
void $ customMainWithVty vty buildVty (Just responseChan) app (initialState env requestChan)
shutdown vty
-- Thread for handling requests
requestWorker :: ClientEnv -> BChan URequest -> BChan UEvent -> IO ()
requestWorker env requestChan responseChan = forever $ do
req <- readBChan requestChan
case req of
FetchTransaction aid -> do
res <- query (listTransactionsByAccount_ aid Nothing Nothing Nothing Nothing)
case res of
Right ts -> writeBChan responseChan $ UTransactions (aid, ts)
Left err -> writeBChan responseChan $ UError (show err)
FetchAccount aid -> do
res <- query (retrieveAccount aid)
case res of
Right a -> writeBChan responseChan $ UAccount a
Left err -> writeBChan responseChan $ UError (show err)
FetchAccounts -> do
res <- query (listAccounts_ Nothing)
case res of
Right as -> writeBChan responseChan $ UAccounts as
Left err -> writeBChan responseChan $ UError (show err)
FetchCategories -> do
res <- query (getCategories <$> listCategories Nothing)
case res of
Right cs -> writeBChan responseChan $ UCategories cs
Left err -> writeBChan responseChan $ UError (show err)
where
query = flip runClientM env
initialState ::
ClientEnv ->
BChan URequest ->
State
initialState env requestChan =
State
{ _transactions = empty,
_accounts = L.list AccountList (Vec.fromList []) 1,
_screen = ListZipper [helpScreen] [] mainScreen,
_categoryMap = empty,
_clientEnv = env,
_reqChan = requestChan,
_version = appVersion
}
appVersion :: Version
appVersion = Version "0.2" | null | https://raw.githubusercontent.com/ldgrp/uptop/53001b39793df4be48c9c3aed9454be0fc178434/up-top/src/Main.hs | haskell | # LANGUAGE OverloadedStrings #
Read the token environment variable
Either we have a working token, or the user has exited early.
Thread for handling requests |
module Main where
import App
import Auth
import Brick.BChan
import Brick.Main
import qualified Brick.Widgets.List as L
import Control.Concurrent
import Control.Monad
import Data.HashMap.Strict
import qualified Data.Text as T
import qualified Data.Vector as Vec
import Graphics.Vty
import Servant.Client
import System.Environment
import System.Exit
import Types
import Up
import Up.API
import Up.Model.Category
import Up.Model.Token
main :: IO ()
main = do
let buildVty = mkVty defaultConfig
initialVty <- buildVty
envToken <- lookupEnv "UP_BANK_TOKEN"
(authEvent, vty) <- interactiveAuth initialVty buildVty envToken
token <- case authEvent of
Just (ASuccess (AuthInfo tok)) -> pure $ T.unpack tok
Just _ae -> do
shutdown vty
exitSuccess
Nothing -> do
shutdown vty
exitSuccess
env <- mkUpClient (Token token)
requestChan <- newBChan 100
responseChan <- newBChan 100
void $ forkIO $ requestWorker env requestChan responseChan
writeBChan requestChan FetchAccounts
writeBChan requestChan FetchCategories
void $ customMainWithVty vty buildVty (Just responseChan) app (initialState env requestChan)
shutdown vty
requestWorker :: ClientEnv -> BChan URequest -> BChan UEvent -> IO ()
requestWorker env requestChan responseChan = forever $ do
req <- readBChan requestChan
case req of
FetchTransaction aid -> do
res <- query (listTransactionsByAccount_ aid Nothing Nothing Nothing Nothing)
case res of
Right ts -> writeBChan responseChan $ UTransactions (aid, ts)
Left err -> writeBChan responseChan $ UError (show err)
FetchAccount aid -> do
res <- query (retrieveAccount aid)
case res of
Right a -> writeBChan responseChan $ UAccount a
Left err -> writeBChan responseChan $ UError (show err)
FetchAccounts -> do
res <- query (listAccounts_ Nothing)
case res of
Right as -> writeBChan responseChan $ UAccounts as
Left err -> writeBChan responseChan $ UError (show err)
FetchCategories -> do
res <- query (getCategories <$> listCategories Nothing)
case res of
Right cs -> writeBChan responseChan $ UCategories cs
Left err -> writeBChan responseChan $ UError (show err)
where
query = flip runClientM env
initialState ::
ClientEnv ->
BChan URequest ->
State
initialState env requestChan =
State
{ _transactions = empty,
_accounts = L.list AccountList (Vec.fromList []) 1,
_screen = ListZipper [helpScreen] [] mainScreen,
_categoryMap = empty,
_clientEnv = env,
_reqChan = requestChan,
_version = appVersion
}
appVersion :: Version
appVersion = Version "0.2" |
d30371e642b311fb57b0c69438c5581880205dc07928c8cd645a39d9b0451d26 | fpco/schoolofhaskell.com | EditTutorial.hs | module Handler.EditTutorial where
import Import
import Handler.EditGroup (slugField)
import Text.Markdown (markdown)
import Data.Time (addUTCTime)
data Meta = Meta
{ metaTitle :: Title
, metaDesc :: Textarea
, metaSlug :: TutorialName
, metaEnv :: MaybeEnv
}
data MaybeEnv = DefaultEnv | Env GhcEnvId
deriving Eq
form :: Entity Tmember -> Tutorial -> [GhcEnvId] -> Form Meta
form memEnt@(Entity _ Tmember {..}) Tutorial {..} ghcEnvs = renderTable $ Meta
<$> areq titleField "Title" (Just tutorialTitle)
<*> (fromMaybe (Textarea "") <$> aopt textareaField "Description" (Just $ Just tutorialDesc))
<*> areq (slugField memEnt) "Slug" { fsTooltip = Just "Used in URLs" } (Just tmemberSlug)
<*> areq (selectFieldList envsList) "Package set" (Just $ maybe DefaultEnv Env tutorialEnvironment)
where
envsList = ("Default package set", DefaultEnv)
: map go ghcEnvs
go id' = (ghcEnvTitle id', Env id')
ghcEnvTitle :: GhcEnvId -> Text
ghcEnvTitle (GhcEnvId x) = x
getEditTutorialR :: TutorialName -> TutorialNames -> Handler Html
getEditTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
(Entity tid tutorial, entMid@(Entity mid Tmember {..})) <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(mid, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
t <- get404 tid
m <- get404 mid
return (Entity tid t, Entity mid m)
_ -> notFound
unless (tutorialAuthor tutorial == profileUser) notFound
app <- getYesod
((res, widget), enctype) <- runFormPost $ form entMid tutorial $ appGhcEnvs app
case res of
FormSuccess Meta {..} -> do
mslug <- $runDB $ do
update tid
[ TutorialTitle =. metaTitle
, TutorialDesc =. metaDesc
, TutorialEnvironment =.
case metaEnv of
DefaultEnv -> Nothing
Env x -> Just x
]
case () of
()
| metaSlug /= tmemberSlug -> do
update mid
[ TmemberSlug =. metaSlug
, TmemberSlugUserGen =. True
]
return $ Just metaSlug
| metaTitle /= (tutorialTitle tutorial) && not tmemberSlugUserGen -> do
slug <- getUniqueSlug tmemberHolder metaTitle
if slug /= tmemberSlug
then do
update mid [TmemberSlug =. slug]
return $ Just slug
else return Nothing
| otherwise -> return Nothing
case mslug of
Nothing -> return ()
Just slug ->
let (tn', tns') =
case reverse tns of
[] -> (slug, [])
_:rest -> (tn, reverse $ slug : rest)
in redirect $ EditTutorialR tn' tns'
_ -> return ()
googleAnalytics <- makeGoogleAnalytics Nothing
isPublished <- fmap isJust $ $runDB $ getBy $ UniquePublishedTutorial tid
pc <- widgetToPageContent $ do
setTitle (toHtml (tutorialTitle tutorial))
defaultWidgetJs
$(combineStylesheets 'StaticR
[ codemirror_lib_codemirror_css
, codemirror_addon_dialog_dialog_css
, codemirror_addon_hint_show_hint_css
])
$(widgetFile "edit-tutorial")
withUrlRenderer
[hamlet|
$doctype 5
<html>
<head>
<title>#{pageTitle pc}
^{pageHead pc}
<body>^{pageBody pc}
|]
postEditTutorialR :: TutorialName -> TutorialNames -> Handler Html
postEditTutorialR = getEditTutorialR
postPublishTutorialR :: TutorialName -> TutorialNames -> Handler ()
postPublishTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
dest <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
t@Tutorial {..} <- get404 tid
unless (tutorialAuthor == profileUser) notFound
putFrozenTutorial tid t
update tid [TutorialIsDirty =. False]
now <- liftIO getCurrentTime
let desc = if null $ unTextarea tutorialDesc
then Textarea $ concat
$ toChunks
$ ellipsize 100
$ plainText
$ markdown def
$ fromChunks
$ return
$ unTutorialContent tutorialContent
else tutorialDesc
shouldPublish <- if length (unTutorialContent tutorialContent) < 100 then return False else do
mrt <- getBy $ UniqueRecentTutorial tid
let oneDay = 60 * 60 * 24
case mrt of
Nothing -> return True
Just (Entity rtid RecentTutorial {..})
| addUTCTime oneDay recentTutorialPublished > now -> do
update rtid
[ RecentTutorialTitle =. tutorialTitle
, RecentTutorialDesc =. desc
]
return False
| otherwise -> return True
when shouldPublish $ do
deleteWhere [RecentTutorialTutorial ==. tid]
insert_ RecentTutorial
{ recentTutorialTutorial = tid
, recentTutorialPublished = now
, recentTutorialTitle = tutorialTitle
, recentTutorialDesc = desc
}
populateUserSummary profileUser
setMessage "Tutorial published"
return $ EditTutorialR tn tns
_ -> notFound
redirect dest
postUnpublishTutorialR :: TutorialName -> TutorialNames -> Handler ()
postUnpublishTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
dest <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
Tutorial {..} <- get404 tid
unless (tutorialAuthor == profileUser) notFound
deleteBy $ UniquePublishedTutorial tid
deleteWhere [RecentTutorialTutorial ==. tid]
populateUserSummary profileUser
setMessage "Tutorial unpublished"
return $ EditTutorialR tn tns
_ -> notFound
redirect dest
postSaveTutorialR :: TutorialName -> TutorialNames -> Handler Value
postSaveTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
$runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
saveTutorial tid profileUser
_ -> notFound
postSaveTutorialIdR :: TutorialId -> Handler Value
postSaveTutorialIdR tid = do
Entity _ Profile {..} <- requireProfile
$runDB $ saveTutorial tid profileUser
-- | Save the given tutorial owned by the given user.
saveTutorial :: TutorialId -> UserId -> YesodDB App Value
saveTutorial tid uid = do
Tutorial {..} <- get404 tid
unless (tutorialAuthor == uid) notFound
token <- runInputPost $ ireq intField "token"
-- We want to allow for empty content, but not missing content.
mcontent' <- lookupPostParam "content"
content' <-
case mcontent' of
Nothing -> invalidArgs ["No content provided"]
Just c -> return c
if TutorialConcurrentToken' token == tutorialConcurrentToken
then do
let token' =
let x = token + 1
in if x > 100000 || x < 0
then 0
else x
update tid
[ TutorialContent =. TutorialContent' content'
, TutorialConcurrentToken =. TutorialConcurrentToken' token'
]
return $ object ["new-token" .= token']
else return $ object ["msg" .= asText "Your content is out of date"]
| null | https://raw.githubusercontent.com/fpco/schoolofhaskell.com/15ec1a03cb9d593ee9c0d167dc522afe45ba4f8e/src/Handler/EditTutorial.hs | haskell | | Save the given tutorial owned by the given user.
We want to allow for empty content, but not missing content. | module Handler.EditTutorial where
import Import
import Handler.EditGroup (slugField)
import Text.Markdown (markdown)
import Data.Time (addUTCTime)
data Meta = Meta
{ metaTitle :: Title
, metaDesc :: Textarea
, metaSlug :: TutorialName
, metaEnv :: MaybeEnv
}
data MaybeEnv = DefaultEnv | Env GhcEnvId
deriving Eq
form :: Entity Tmember -> Tutorial -> [GhcEnvId] -> Form Meta
form memEnt@(Entity _ Tmember {..}) Tutorial {..} ghcEnvs = renderTable $ Meta
<$> areq titleField "Title" (Just tutorialTitle)
<*> (fromMaybe (Textarea "") <$> aopt textareaField "Description" (Just $ Just tutorialDesc))
<*> areq (slugField memEnt) "Slug" { fsTooltip = Just "Used in URLs" } (Just tmemberSlug)
<*> areq (selectFieldList envsList) "Package set" (Just $ maybe DefaultEnv Env tutorialEnvironment)
where
envsList = ("Default package set", DefaultEnv)
: map go ghcEnvs
go id' = (ghcEnvTitle id', Env id')
ghcEnvTitle :: GhcEnvId -> Text
ghcEnvTitle (GhcEnvId x) = x
getEditTutorialR :: TutorialName -> TutorialNames -> Handler Html
getEditTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
(Entity tid tutorial, entMid@(Entity mid Tmember {..})) <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(mid, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
t <- get404 tid
m <- get404 mid
return (Entity tid t, Entity mid m)
_ -> notFound
unless (tutorialAuthor tutorial == profileUser) notFound
app <- getYesod
((res, widget), enctype) <- runFormPost $ form entMid tutorial $ appGhcEnvs app
case res of
FormSuccess Meta {..} -> do
mslug <- $runDB $ do
update tid
[ TutorialTitle =. metaTitle
, TutorialDesc =. metaDesc
, TutorialEnvironment =.
case metaEnv of
DefaultEnv -> Nothing
Env x -> Just x
]
case () of
()
| metaSlug /= tmemberSlug -> do
update mid
[ TmemberSlug =. metaSlug
, TmemberSlugUserGen =. True
]
return $ Just metaSlug
| metaTitle /= (tutorialTitle tutorial) && not tmemberSlugUserGen -> do
slug <- getUniqueSlug tmemberHolder metaTitle
if slug /= tmemberSlug
then do
update mid [TmemberSlug =. slug]
return $ Just slug
else return Nothing
| otherwise -> return Nothing
case mslug of
Nothing -> return ()
Just slug ->
let (tn', tns') =
case reverse tns of
[] -> (slug, [])
_:rest -> (tn, reverse $ slug : rest)
in redirect $ EditTutorialR tn' tns'
_ -> return ()
googleAnalytics <- makeGoogleAnalytics Nothing
isPublished <- fmap isJust $ $runDB $ getBy $ UniquePublishedTutorial tid
pc <- widgetToPageContent $ do
setTitle (toHtml (tutorialTitle tutorial))
defaultWidgetJs
$(combineStylesheets 'StaticR
[ codemirror_lib_codemirror_css
, codemirror_addon_dialog_dialog_css
, codemirror_addon_hint_show_hint_css
])
$(widgetFile "edit-tutorial")
withUrlRenderer
[hamlet|
$doctype 5
<html>
<head>
<title>#{pageTitle pc}
^{pageHead pc}
<body>^{pageBody pc}
|]
postEditTutorialR :: TutorialName -> TutorialNames -> Handler Html
postEditTutorialR = getEditTutorialR
postPublishTutorialR :: TutorialName -> TutorialNames -> Handler ()
postPublishTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
dest <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
t@Tutorial {..} <- get404 tid
unless (tutorialAuthor == profileUser) notFound
putFrozenTutorial tid t
update tid [TutorialIsDirty =. False]
now <- liftIO getCurrentTime
let desc = if null $ unTextarea tutorialDesc
then Textarea $ concat
$ toChunks
$ ellipsize 100
$ plainText
$ markdown def
$ fromChunks
$ return
$ unTutorialContent tutorialContent
else tutorialDesc
shouldPublish <- if length (unTutorialContent tutorialContent) < 100 then return False else do
mrt <- getBy $ UniqueRecentTutorial tid
let oneDay = 60 * 60 * 24
case mrt of
Nothing -> return True
Just (Entity rtid RecentTutorial {..})
| addUTCTime oneDay recentTutorialPublished > now -> do
update rtid
[ RecentTutorialTitle =. tutorialTitle
, RecentTutorialDesc =. desc
]
return False
| otherwise -> return True
when shouldPublish $ do
deleteWhere [RecentTutorialTutorial ==. tid]
insert_ RecentTutorial
{ recentTutorialTutorial = tid
, recentTutorialPublished = now
, recentTutorialTitle = tutorialTitle
, recentTutorialDesc = desc
}
populateUserSummary profileUser
setMessage "Tutorial published"
return $ EditTutorialR tn tns
_ -> notFound
redirect dest
postUnpublishTutorialR :: TutorialName -> TutorialNames -> Handler ()
postUnpublishTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
dest <- $runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
Tutorial {..} <- get404 tid
unless (tutorialAuthor == profileUser) notFound
deleteBy $ UniquePublishedTutorial tid
deleteWhere [RecentTutorialTutorial ==. tid]
populateUserSummary profileUser
setMessage "Tutorial unpublished"
return $ EditTutorialR tn tns
_ -> notFound
redirect dest
postSaveTutorialR :: TutorialName -> TutorialNames -> Handler Value
postSaveTutorialR tn tns = do
Entity _ Profile {..} <- requireProfile
$runDB $ do
eres <- followSlugPath profileUser tn tns
(_, Entity _ content, _) <- either (const notFound) return eres
case content of
TcontentTutorialSum tid -> do
saveTutorial tid profileUser
_ -> notFound
postSaveTutorialIdR :: TutorialId -> Handler Value
postSaveTutorialIdR tid = do
Entity _ Profile {..} <- requireProfile
$runDB $ saveTutorial tid profileUser
saveTutorial :: TutorialId -> UserId -> YesodDB App Value
saveTutorial tid uid = do
Tutorial {..} <- get404 tid
unless (tutorialAuthor == uid) notFound
token <- runInputPost $ ireq intField "token"
mcontent' <- lookupPostParam "content"
content' <-
case mcontent' of
Nothing -> invalidArgs ["No content provided"]
Just c -> return c
if TutorialConcurrentToken' token == tutorialConcurrentToken
then do
let token' =
let x = token + 1
in if x > 100000 || x < 0
then 0
else x
update tid
[ TutorialContent =. TutorialContent' content'
, TutorialConcurrentToken =. TutorialConcurrentToken' token'
]
return $ object ["new-token" .= token']
else return $ object ["msg" .= asText "Your content is out of date"]
|
728bd88ffa194a9a01e9a773579962ff4fd442d0eb38290d73762aa922ad595d | RefactoringTools/HaRe | WhereIn8.hs | module LiftOneLevel.WhereIn8 where
lift ' ' one level up .
g y = f + 345
where
f = y + b
where
b=17
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/LiftOneLevel/WhereIn8.hs | haskell | module LiftOneLevel.WhereIn8 where
lift ' ' one level up .
g y = f + 345
where
f = y + b
where
b=17
|
|
7d926756c28b17145ac2c30075c9e242a303acb952af592ab55340ad3f01b42f | ocaml-multicore/ocaml-effects-tutorial | echo_async.ml | open Printf
module type Aio = sig
type 'a promise
(** Type of promises *)
val async : (unit -> 'a) -> 'a promise
(** [async f] runs [f] concurrently *)
val await : 'a promise -> 'a
(** [await p] returns the result of the promise. *)
val yield : unit -> unit
(** yields control to another task *)
val accept : Unix.file_descr -> Unix.file_descr * Unix.sockaddr
val recv : Unix.file_descr -> bytes -> int -> int -> Unix.msg_flag list -> int
val send : Unix.file_descr -> bytes -> int -> int -> Unix.msg_flag list -> int
val run : (unit -> 'a) -> unit
(** Runs the scheduler *)
end
module Aio : Aio = struct
open Effect
open Effect.Deep
type 'a _promise =
Waiting of ('a,unit) continuation list
| Done of 'a
type 'a promise = 'a _promise ref
type _ Effect.t += Async : (unit -> 'a) -> 'a promise Effect.t
let async f = perform (Async f)
type _ Effect.t += Yield : unit Effect.t
let yield () = perform Yield
type _ Effect.t += Await : 'a promise -> 'a Effect.t
let await p = perform (Await p)
type file_descr = Unix.file_descr
type sockaddr = Unix.sockaddr
type msg_flag = Unix.msg_flag
type _ Effect.t += Accept : file_descr -> (file_descr * sockaddr) Effect.t
let accept fd = perform (Accept fd)
type _ Effect.t += Recv : file_descr * bytes * int * int * msg_flag list -> int Effect.t
let recv fd buf pos len mode = perform (Recv (fd, buf, pos, len, mode))
type _ Effect.t += Send : file_descr * bytes * int * int * msg_flag list -> int Effect.t
let send fd bus pos len mode = perform (Send (fd, bus, pos, len, mode))
(********************)
let ready_to_read fd =
match Unix.select [fd] [] [] 0. with
| [], _, _ -> false
| _ -> true
let ready_to_write fd =
match Unix.select [] [fd] [] 0. with
| _, [], _ -> false
| _ -> true
let q = Queue.create ()
let enqueue t = Queue.push t q
type blocked = Blocked : 'a Effect.t * ('a, unit) continuation -> blocked
(* tasks blocked on reads *)
let br = Hashtbl.create 13
(* tasks blocked on writes *)
let bw = Hashtbl.create 13
let rec schedule () =
if not (Queue.is_empty q) then
(* runnable tasks available *)
Queue.pop q ()
else if Hashtbl.length br = 0 && Hashtbl.length bw = 0 then
(* no runnable tasks, and no blocked tasks => we're done. *)
()
else begin (* no runnable tasks, but blocked tasks available *)
let rd_fds = Hashtbl.fold (fun fd _ acc -> fd::acc) br [] in
let wr_fds = Hashtbl.fold (fun fd _ acc -> fd::acc) bw [] in
let rdy_rd_fds, rdy_wr_fds, _ = Unix.select rd_fds wr_fds [] (-1.) in
let rec resume ht = function
| [] -> ()
| x::xs ->
begin match Hashtbl.find ht x with
| Blocked (Recv (fd, buf, pos, len, mode), k) ->
enqueue (fun () -> continue k (Unix.recv fd buf pos len mode))
| Blocked (Accept fd, k) -> failwith "not implemented"
| Blocked (Send (fd, buf, pos, len, mode), k) -> failwith "not implemented"
| Blocked _ -> failwith "impossible"
end;
Hashtbl.remove ht x
in
resume br rdy_rd_fds;
resume br rdy_wr_fds;
schedule ()
end
let run main =
let rec fork : 'a. 'a promise -> (unit -> 'a) -> unit =
fun pr main ->
match_with main ()
{ retc = (fun v ->
let l = match !pr with Waiting l -> l | _ -> failwith "impossible" in
List.iter (fun k -> enqueue (fun () -> continue k v)) l;
pr := Done v;
schedule ()
);
exnc = raise;
effc = (fun (type b) (eff: b Effect.t) ->
match eff with
| Async f -> Some (fun (k: (b,_) continuation) ->
let pr = ref (Waiting []) in
enqueue (fun () -> continue k pr);
fork pr f
)
| Yield -> Some (fun (k: (b,_) continuation) ->
enqueue (continue k);
schedule ()
)
| Await p -> Some (fun (k: (b,_) continuation) ->
begin match !p with
| Done v -> continue k v
| Waiting l -> begin
p := Waiting (k::l);
schedule ()
end
end
)
| Accept fd -> Some (fun (k: (b,_) continuation) ->
failwith "accept not implemented"
)
| Send (fd,buf,pos,len,mode) -> Some (fun (k: (b,_) continuation) ->
failwith "send not implemented"
)
| (Recv (fd,buf,pos,len,mode) as e) -> Some (fun (k: (b,_) continuation) ->
if ready_to_read fd then
continue k (Unix.recv fd buf pos len mode)
else begin
Hashtbl.add br fd (Blocked (e, k));
schedule ()
end
)
| _ -> None
)}
in
fork (ref (Waiting [])) main
end
module M = Echo.Make(struct
let accept = Aio.accept
let recv = Aio.recv
let send = Aio.send
let fork f = ignore (Aio.async f)
let run f = Aio.run f
let non_blocking_mode = true
end)
let _ = M.start ()
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-effects-tutorial/998376931b7fdaed5d54cb96b39b301b993ba995/sources/echo_async.ml | ocaml | * Type of promises
* [async f] runs [f] concurrently
* [await p] returns the result of the promise.
* yields control to another task
* Runs the scheduler
******************
tasks blocked on reads
tasks blocked on writes
runnable tasks available
no runnable tasks, and no blocked tasks => we're done.
no runnable tasks, but blocked tasks available | open Printf
module type Aio = sig
type 'a promise
val async : (unit -> 'a) -> 'a promise
val await : 'a promise -> 'a
val yield : unit -> unit
val accept : Unix.file_descr -> Unix.file_descr * Unix.sockaddr
val recv : Unix.file_descr -> bytes -> int -> int -> Unix.msg_flag list -> int
val send : Unix.file_descr -> bytes -> int -> int -> Unix.msg_flag list -> int
val run : (unit -> 'a) -> unit
end
module Aio : Aio = struct
open Effect
open Effect.Deep
type 'a _promise =
Waiting of ('a,unit) continuation list
| Done of 'a
type 'a promise = 'a _promise ref
type _ Effect.t += Async : (unit -> 'a) -> 'a promise Effect.t
let async f = perform (Async f)
type _ Effect.t += Yield : unit Effect.t
let yield () = perform Yield
type _ Effect.t += Await : 'a promise -> 'a Effect.t
let await p = perform (Await p)
type file_descr = Unix.file_descr
type sockaddr = Unix.sockaddr
type msg_flag = Unix.msg_flag
type _ Effect.t += Accept : file_descr -> (file_descr * sockaddr) Effect.t
let accept fd = perform (Accept fd)
type _ Effect.t += Recv : file_descr * bytes * int * int * msg_flag list -> int Effect.t
let recv fd buf pos len mode = perform (Recv (fd, buf, pos, len, mode))
type _ Effect.t += Send : file_descr * bytes * int * int * msg_flag list -> int Effect.t
let send fd bus pos len mode = perform (Send (fd, bus, pos, len, mode))
let ready_to_read fd =
match Unix.select [fd] [] [] 0. with
| [], _, _ -> false
| _ -> true
let ready_to_write fd =
match Unix.select [] [fd] [] 0. with
| _, [], _ -> false
| _ -> true
let q = Queue.create ()
let enqueue t = Queue.push t q
type blocked = Blocked : 'a Effect.t * ('a, unit) continuation -> blocked
let br = Hashtbl.create 13
let bw = Hashtbl.create 13
let rec schedule () =
if not (Queue.is_empty q) then
Queue.pop q ()
else if Hashtbl.length br = 0 && Hashtbl.length bw = 0 then
()
let rd_fds = Hashtbl.fold (fun fd _ acc -> fd::acc) br [] in
let wr_fds = Hashtbl.fold (fun fd _ acc -> fd::acc) bw [] in
let rdy_rd_fds, rdy_wr_fds, _ = Unix.select rd_fds wr_fds [] (-1.) in
let rec resume ht = function
| [] -> ()
| x::xs ->
begin match Hashtbl.find ht x with
| Blocked (Recv (fd, buf, pos, len, mode), k) ->
enqueue (fun () -> continue k (Unix.recv fd buf pos len mode))
| Blocked (Accept fd, k) -> failwith "not implemented"
| Blocked (Send (fd, buf, pos, len, mode), k) -> failwith "not implemented"
| Blocked _ -> failwith "impossible"
end;
Hashtbl.remove ht x
in
resume br rdy_rd_fds;
resume br rdy_wr_fds;
schedule ()
end
let run main =
let rec fork : 'a. 'a promise -> (unit -> 'a) -> unit =
fun pr main ->
match_with main ()
{ retc = (fun v ->
let l = match !pr with Waiting l -> l | _ -> failwith "impossible" in
List.iter (fun k -> enqueue (fun () -> continue k v)) l;
pr := Done v;
schedule ()
);
exnc = raise;
effc = (fun (type b) (eff: b Effect.t) ->
match eff with
| Async f -> Some (fun (k: (b,_) continuation) ->
let pr = ref (Waiting []) in
enqueue (fun () -> continue k pr);
fork pr f
)
| Yield -> Some (fun (k: (b,_) continuation) ->
enqueue (continue k);
schedule ()
)
| Await p -> Some (fun (k: (b,_) continuation) ->
begin match !p with
| Done v -> continue k v
| Waiting l -> begin
p := Waiting (k::l);
schedule ()
end
end
)
| Accept fd -> Some (fun (k: (b,_) continuation) ->
failwith "accept not implemented"
)
| Send (fd,buf,pos,len,mode) -> Some (fun (k: (b,_) continuation) ->
failwith "send not implemented"
)
| (Recv (fd,buf,pos,len,mode) as e) -> Some (fun (k: (b,_) continuation) ->
if ready_to_read fd then
continue k (Unix.recv fd buf pos len mode)
else begin
Hashtbl.add br fd (Blocked (e, k));
schedule ()
end
)
| _ -> None
)}
in
fork (ref (Waiting [])) main
end
module M = Echo.Make(struct
let accept = Aio.accept
let recv = Aio.recv
let send = Aio.send
let fork f = ignore (Aio.async f)
let run f = Aio.run f
let non_blocking_mode = true
end)
let _ = M.start ()
|
ea91238beb7d4c40e4b07b32dac9ba59ab5e3e8f55451e87aecfab7dc32f4a29 | janestreet/universe | compare_core.mli | open! Core
open! Import
include module type of struct
include Patdiff_kernel.Compare_core
end
include Patdiff_kernel.Compare_core.S
val diff_files
: Configuration.t
-> prev_file:string
-> next_file:string
-> [ `Different | `Same ]
val diff_dirs
: Configuration.t
-> prev_dir:string
-> next_dir:string
-> file_filter:(string * Unix.stats -> bool) option
-> [ `Different | `Same ]
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/patdiff/lib/src/compare_core.mli | ocaml | open! Core
open! Import
include module type of struct
include Patdiff_kernel.Compare_core
end
include Patdiff_kernel.Compare_core.S
val diff_files
: Configuration.t
-> prev_file:string
-> next_file:string
-> [ `Different | `Same ]
val diff_dirs
: Configuration.t
-> prev_dir:string
-> next_dir:string
-> file_filter:(string * Unix.stats -> bool) option
-> [ `Different | `Same ]
|
|
d8ce272959ec48fd4ad17a38b83f1282e8cf54c841428aa47c137b008b2e114b | dbuenzli/rresult | rresult.mli | ---------------------------------------------------------------------------
Copyright ( c ) 2014 The rresult programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2014 The rresult programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
* Result value combinators .
{ b Note . } OCaml 4.08 provides the { ! . Result } module
which you should prefer to [ ] .
[ ] is a module for handling computation results and errors
in an explicit and declarative manner without resorting to
exceptions . It defines a { ! result } type equal to OCaml 4.03 's
[ result ] type and { { ! R}combinators } to operate on these values .
Open the module to use it , this defines the { { ! type } ,
the { ! R.Infix } operators { ! R } in your scope .
Consult { { ! usage}usage guidelines } for the type .
{b Note.} OCaml 4.08 provides the {!Stdlib.Result} module
which you should prefer to [Rresult].
[Rresult] is a module for handling computation results and errors
in an explicit and declarative manner without resorting to
exceptions. It defines a {!result} type equal to OCaml 4.03's
[result] type and {{!R}combinators} to operate on these values.
Open the module to use it, this defines the {{!result}result type},
the {!R.Infix} operators {!R} in your scope.
Consult {{!usage}usage guidelines} for the type. *)
* { 1 Results }
(** The type for results. *)
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
* [ ( > > =) ] is { ! ) } .
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
(** [(>>|)] is {!R.(>>|)}. *)
(** Result value combinators. *)
module R : sig
* { 1 Results }
type ('a, 'b) t = ('a, 'b) result
(** The type for results. *)
val ok : 'a -> ('a, 'b) result
(** [ok v] is [Ok v]. *)
val error : 'b -> ('a, 'b) result
(** [error e] is [Error e]. *)
val reword_error : ('b -> 'c) -> ('a, 'b) result -> ('a, 'c) result
(** [reword_error reword r] is:
{ul
{- [r] if [r = Ok v]}
{- [Error (reword e)] if [r = Error e]}} *)
val get_ok : ('a, 'b) result -> 'a
(** [get_ok r] is [v] if [r = Ok v] and raises [Invalid_argument]
otherwise. *)
val get_error : ('a, 'b) result -> 'b
(** [get_error r] is [e] if [r = Error e] and raises [Invalid_argument]
otherwise. *)
(**/**)
val return : 'a -> ('a, 'b) result
val fail : 'b -> ('a, 'b) result
(**/**)
(** {1 Composing results} *)
val bind : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
(** [bind r f] is [f v] if [r = Ok v] and [r] if [r = Error _]. *)
val map : ('a -> 'c) -> ('a, 'b) result -> ('c, 'b) result
* [ map f r ] is [ bind ( fun v - > ret ( f v ) ) ] r.
val join : (('a, 'b) result, 'b) result -> ('a, 'b) result
(** [join r] is [v] if [r = Ok v] and [r] otherwise. *)
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
(** [r >>= f] is {!bind}[ r f]. *)
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
(** [r >>| f] is {!map}[ r f]. *)
(** Infix operators.
Gathers {!R}'s infix operators. *)
module Infix : sig
(** {1 Infix operators} *)
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
* [ ( > > =) ] is { ! ) } .
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
(** [(>>|)] is {!R.(>>|)}. *)
end
* { 1 : msgs Error messages }
type msg = [ `Msg of string ]
(** The type for (error) messages. *)
val msg : string -> [> msg]
(** [msg s] is [`Msg s]. *)
val msgf : ('a, Format.formatter, unit, [> msg]) format4 -> 'a
(** [msgf fmt ...] formats a message according to [fmt]. *)
val pp_msg : Format.formatter -> msg -> unit
(** [pp_msg ppf m] prints [m] on [ppf]. *)
val error_msg : string -> ('a, [> msg]) result
(** [error_msg s] is [error (`Msg s)]. *)
val error_msgf : ('a, Format.formatter, unit, ('b, [> msg]) result)
format4 -> 'a
(** [error_msgf fmt ...] is an error message formatted according to [fmt]. *)
val reword_error_msg : ?replace:bool -> (string -> msg) ->
('a, msg) result -> ('a, [> msg]) result
(** [reword_error_msg ~replace reword r] is like {!reword_error} except
if [replace] is [false] (default), the result of [reword old_msg] is
concatened, on a new line to the old message. *)
val error_to_msg : pp_error:(Format.formatter -> 'b -> unit) ->
('a, 'b) result -> ('a, [> msg]) result
(** [error_to_msg ~pp_error r] converts errors in [r] with [pp_error] to
an error message. *)
val error_msg_to_invalid_arg : ('a, msg) result -> 'a
(** [err_msg_to_invalid_arg r] is [v] if [r = Ok v] and
@raise Invalid_argument with the error message otherwise. *)
val open_error_msg : ('a, msg) result -> ('a, [> msg]) result
(** [open_error_msg r] allows to combine a closed error message
variant with other variants. *)
val failwith_error_msg : ('a, msg) result -> 'a
(** [failwith_error_msg r] raises [Failure m] if [r] is
[Error (`Msg m)]. *)
* { 1 : exn Trapping unexpected exceptions }
{ e Getting rid of [ null ] was not enough } .
{e Getting rid of [null] was not enough}. *)
type exn_trap = [ `Exn_trap of exn * Printexc.raw_backtrace ]
(** The type for exception traps. *)
val pp_exn_trap : Format.formatter -> exn_trap -> unit
(** [pp_exn_trap ppf bt] prints [bt] on [ppf]. *)
val trap_exn : ('a -> 'b) -> 'a -> ('b, [> exn_trap]) result
(** [trap_exn f v] is [f v] and traps any exception that may occur as
an exception trap error. *)
val error_exn_trap_to_msg : ('a, exn_trap) result -> ('a, [> msg]) result
(** [error_exn_trap_to_msg r] converts exception trap errors in
[r] to an error message. *)
val open_error_exn_trap : ('a, exn_trap) result -> ('a, [> exn_trap]) result
(** [open_error_exn_trap r] allows to combine a closed exception trap error
variant with other variants. *)
* { 1 : print Pretty printing }
val pp :
ok:(Format.formatter -> 'a -> unit) ->
error:(Format.formatter -> 'b -> unit) -> Format.formatter ->
('a, 'b) result -> unit
* [ pp ~ok ~error ppf r ] prints [ r ] on [ ppf ] using [ ok ] and [ error ]
according to [ r ] .
according to [r]. *)
val dump :
ok:(Format.formatter -> 'a -> unit) ->
error:(Format.formatter -> 'b -> unit) -> Format.formatter ->
('a, 'b) result -> unit
* [ dump ~ok ~error ] formats an OCaml result value using [ ok ] or [ error ]
according to case , no parentheses are added .
according to case, no parentheses are added. *)
* { 1 : pred Predicates and comparison }
val is_ok : ('a, 'b) result -> bool
(** [is_ok r] is [true] iff [r = Ok _]. *)
val is_error : ('a, 'b) result -> bool
(** [is_error r] is [true] iff [r = Error _]. *)
val equal : ok:('a -> 'a -> bool) -> error:('b -> 'b -> bool) ->
('a, 'b) result -> ('a, 'b) result -> bool
* [ equal ~ok ~error r r ' ] tests [ r ] and [ r ' ] for equality using [ ok ]
and [ error ] .
and [error]. *)
val compare : ok:('a -> 'a -> int) -> error:('b -> 'b -> int) ->
('a, 'b) result -> ('a, 'b) result -> int
* [ compare ~ok ~error r r ' ] totally orders [ r ] and [ r ' ] using [ ok ]
and [ error ] .
and [error]. *)
* { 1 : convert Converting }
val to_option : ('a, 'b) result -> 'a option
(** [to_option r] is [Some v] if [r = Ok v] and [None] otherwise. *)
val of_option : none:(unit -> ('a, 'b) result) -> 'a option -> ('a, 'b) result
(** [of_option ~none r] is [Ok v] if [r = Some v] and [none ()] otherwise. *)
val to_presult : ('a, 'b) result -> [> `Ok of 'a | `Error of 'b ]
(** [to_presult r] is [r] as a polymorphic variant result value. *)
val of_presult : [< `Ok of 'a | `Error of 'b ] -> ('a, 'b) result
(** [of_presult pr] is [pr] as a result value. *)
* { 1 : ignore Ignoring errors }
{ b Warning . } Using these functions is , most of the time , a bad idea .
{b Warning.} Using these functions is, most of the time, a bad idea. *)
val ignore_error : use:('b -> 'a) -> ('a, 'b) result -> 'a
(** [ignore_error ~use r] is [v] if [r = Ok v] and [use e] if
[r = Error e]. *)
val kignore_error :
use:('b -> ('a, 'c) result) -> ('a, 'b) result -> ('a, 'c) result
(** [kignore_error ~use r] is [r] if [r = Ok v] and [use e] if
[r = Error e]. *)
end
* { 1 : usage Usage design guidelines }
These are rough design guidelines , do n't forget to think .
{ 2 Error messages }
Use { { ! R.msgs}error messages } if :
{ ol
{ - Your error messages do n't need to be localized , e.g. scripts ,
command line programs . }
{ - The errors do n't need to be processed . They are just meant to
be logged at certain point in your program . } }
If the above does n't hold and your errors need to be processed for
localization or error recovery then use a custom error type in your
result values .
{ 2 Custom error types }
If your module has specific errors then define an error type , and
a result type that tags this error type with the library name ( or
any other tag that may make sense , see for example { ! R.exn } ) along
with the following functions :
{ [
module : sig
type error = ...
type ' a result = ( ' a , [ ` Mod of error ] ) Rresult.result
val pp_error : Format.formatter - > [ ` Mod of error ] - > unit
val open_error : ' a result - > ( ' a , [ > ` Mod of error ] ) Rresult.result
val error_to_msg : ' a result - > ( ' a , Rresult.R.msg ) Rresult.result
val f : ... - > ' a result
end
] }
If your library has generic errors that may be useful in other context
or shared among modules and to be composed together , then define your
error type itself as being a variant and return these values
without tagging them .
{ [
module : sig
type error = [ ` Generic of ... | ... ]
type ' a result = ( ' a , error ) Rresult.result
val pp_error : Format.formatter - > error - > unit
val open_error : ' a result - > ( ' a , [ > error ] ) Rresult.result
val error_to_msg : ' a result - > ( ' a , Rresult.R.msg ) Rresult.result
val f : ... - > ' a result
end
] }
In the latter case it may still be useful to provide a function to
tag these errors whenever they reach a certain point of the program .
For this the following function could be added to [ Mod ] :
{ [
val pack_error : ' a result - > ( ' a , [ > ` Mod of error ] ) Rresult.result
] }
You should then provide the following functions aswell , so that
the packed error composes well in the system :
{ [
val pp_pack_error : Format.formatter - > [ ` Mod of error ] - > unit
val open_pack_error : ( ' a , [ ` Mod of error ] ) Rresult.result - >
( ' a , [ > ` Mod of error ] ) Rresult.result
val error_pack_to_msg : ( ' a , [ ` Mod of error ] ) Rresult.result - >
( ' a , Rresult.R.msg ) Rresult.result
] }
These are rough design guidelines, don't forget to think.
{2 Error messages}
Use {{!R.msgs}error messages} if:
{ol
{- Your error messages don't need to be localized, e.g. scripts,
command line programs.}
{- The errors don't need to be processed. They are just meant to
be logged at certain point in your program.}}
If the above doesn't hold and your errors need to be processed for
localization or error recovery then use a custom error type in your
result values.
{2 Custom error types}
If your module has specific errors then define an error type, and
a result type that tags this error type with the library name (or
any other tag that may make sense, see for example {!R.exn}) along
with the following functions:
{[
module Mod : sig
type error = ...
type 'a result = ('a, [`Mod of error]) Rresult.result
val pp_error : Format.formatter -> [`Mod of error] -> unit
val open_error : 'a result -> ('a, [> `Mod of error]) Rresult.result
val error_to_msg : 'a result -> ('a, Rresult.R.msg) Rresult.result
val f : ... -> 'a result
end
]}
If your library has generic errors that may be useful in other context
or shared among modules and to be composed together, then define your
error type itself as being a variant and return these values
without tagging them.
{[
module Mod : sig
type error = [`Generic of ... | ... ]
type 'a result = ('a, error) Rresult.result
val pp_error : Format.formatter -> error -> unit
val open_error : 'a result -> ('a, [> error]) Rresult.result
val error_to_msg : 'a result -> ('a, Rresult.R.msg) Rresult.result
val f : ... -> 'a result
end
]}
In the latter case it may still be useful to provide a function to
tag these errors whenever they reach a certain point of the program.
For this the following function could be added to [Mod]:
{[
val pack_error : 'a result -> ('a, [> `Mod of error]) Rresult.result
]}
You should then provide the following functions aswell, so that
the packed error composes well in the system:
{[
val pp_pack_error : Format.formatter -> [ `Mod of error] -> unit
val open_pack_error : ('a, [ `Mod of error]) Rresult.result ->
('a, [> `Mod of error]) Rresult.result
val error_pack_to_msg : ('a, [ `Mod of error]) Rresult.result ->
('a, Rresult.R.msg) Rresult.result
]}
*)
---------------------------------------------------------------------------
Copyright ( c ) 2014 The rresult programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 The rresult programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/rresult/5324558067a391bf8827ee76d413399887030c2f/src/rresult.mli | ocaml | * The type for results.
* [(>>|)] is {!R.(>>|)}.
* Result value combinators.
* The type for results.
* [ok v] is [Ok v].
* [error e] is [Error e].
* [reword_error reword r] is:
{ul
{- [r] if [r = Ok v]}
{- [Error (reword e)] if [r = Error e]}}
* [get_ok r] is [v] if [r = Ok v] and raises [Invalid_argument]
otherwise.
* [get_error r] is [e] if [r = Error e] and raises [Invalid_argument]
otherwise.
*/*
*/*
* {1 Composing results}
* [bind r f] is [f v] if [r = Ok v] and [r] if [r = Error _].
* [join r] is [v] if [r = Ok v] and [r] otherwise.
* [r >>= f] is {!bind}[ r f].
* [r >>| f] is {!map}[ r f].
* Infix operators.
Gathers {!R}'s infix operators.
* {1 Infix operators}
* [(>>|)] is {!R.(>>|)}.
* The type for (error) messages.
* [msg s] is [`Msg s].
* [msgf fmt ...] formats a message according to [fmt].
* [pp_msg ppf m] prints [m] on [ppf].
* [error_msg s] is [error (`Msg s)].
* [error_msgf fmt ...] is an error message formatted according to [fmt].
* [reword_error_msg ~replace reword r] is like {!reword_error} except
if [replace] is [false] (default), the result of [reword old_msg] is
concatened, on a new line to the old message.
* [error_to_msg ~pp_error r] converts errors in [r] with [pp_error] to
an error message.
* [err_msg_to_invalid_arg r] is [v] if [r = Ok v] and
@raise Invalid_argument with the error message otherwise.
* [open_error_msg r] allows to combine a closed error message
variant with other variants.
* [failwith_error_msg r] raises [Failure m] if [r] is
[Error (`Msg m)].
* The type for exception traps.
* [pp_exn_trap ppf bt] prints [bt] on [ppf].
* [trap_exn f v] is [f v] and traps any exception that may occur as
an exception trap error.
* [error_exn_trap_to_msg r] converts exception trap errors in
[r] to an error message.
* [open_error_exn_trap r] allows to combine a closed exception trap error
variant with other variants.
* [is_ok r] is [true] iff [r = Ok _].
* [is_error r] is [true] iff [r = Error _].
* [to_option r] is [Some v] if [r = Ok v] and [None] otherwise.
* [of_option ~none r] is [Ok v] if [r = Some v] and [none ()] otherwise.
* [to_presult r] is [r] as a polymorphic variant result value.
* [of_presult pr] is [pr] as a result value.
* [ignore_error ~use r] is [v] if [r = Ok v] and [use e] if
[r = Error e].
* [kignore_error ~use r] is [r] if [r = Ok v] and [use e] if
[r = Error e]. | ---------------------------------------------------------------------------
Copyright ( c ) 2014 The rresult programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2014 The rresult programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
* Result value combinators .
{ b Note . } OCaml 4.08 provides the { ! . Result } module
which you should prefer to [ ] .
[ ] is a module for handling computation results and errors
in an explicit and declarative manner without resorting to
exceptions . It defines a { ! result } type equal to OCaml 4.03 's
[ result ] type and { { ! R}combinators } to operate on these values .
Open the module to use it , this defines the { { ! type } ,
the { ! R.Infix } operators { ! R } in your scope .
Consult { { ! usage}usage guidelines } for the type .
{b Note.} OCaml 4.08 provides the {!Stdlib.Result} module
which you should prefer to [Rresult].
[Rresult] is a module for handling computation results and errors
in an explicit and declarative manner without resorting to
exceptions. It defines a {!result} type equal to OCaml 4.03's
[result] type and {{!R}combinators} to operate on these values.
Open the module to use it, this defines the {{!result}result type},
the {!R.Infix} operators {!R} in your scope.
Consult {{!usage}usage guidelines} for the type. *)
* { 1 Results }
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
* [ ( > > =) ] is { ! ) } .
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
module R : sig
* { 1 Results }
type ('a, 'b) t = ('a, 'b) result
val ok : 'a -> ('a, 'b) result
val error : 'b -> ('a, 'b) result
val reword_error : ('b -> 'c) -> ('a, 'b) result -> ('a, 'c) result
val get_ok : ('a, 'b) result -> 'a
val get_error : ('a, 'b) result -> 'b
val return : 'a -> ('a, 'b) result
val fail : 'b -> ('a, 'b) result
val bind : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
val map : ('a -> 'c) -> ('a, 'b) result -> ('c, 'b) result
* [ map f r ] is [ bind ( fun v - > ret ( f v ) ) ] r.
val join : (('a, 'b) result, 'b) result -> ('a, 'b) result
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
module Infix : sig
val ( >>= ) : ('a, 'b) result -> ('a -> ('c, 'b) result) -> ('c, 'b) result
* [ ( > > =) ] is { ! ) } .
val ( >>| ) : ('a, 'b) result -> ('a -> 'c) -> ('c, 'b) result
end
* { 1 : msgs Error messages }
type msg = [ `Msg of string ]
val msg : string -> [> msg]
val msgf : ('a, Format.formatter, unit, [> msg]) format4 -> 'a
val pp_msg : Format.formatter -> msg -> unit
val error_msg : string -> ('a, [> msg]) result
val error_msgf : ('a, Format.formatter, unit, ('b, [> msg]) result)
format4 -> 'a
val reword_error_msg : ?replace:bool -> (string -> msg) ->
('a, msg) result -> ('a, [> msg]) result
val error_to_msg : pp_error:(Format.formatter -> 'b -> unit) ->
('a, 'b) result -> ('a, [> msg]) result
val error_msg_to_invalid_arg : ('a, msg) result -> 'a
val open_error_msg : ('a, msg) result -> ('a, [> msg]) result
val failwith_error_msg : ('a, msg) result -> 'a
* { 1 : exn Trapping unexpected exceptions }
{ e Getting rid of [ null ] was not enough } .
{e Getting rid of [null] was not enough}. *)
type exn_trap = [ `Exn_trap of exn * Printexc.raw_backtrace ]
val pp_exn_trap : Format.formatter -> exn_trap -> unit
val trap_exn : ('a -> 'b) -> 'a -> ('b, [> exn_trap]) result
val error_exn_trap_to_msg : ('a, exn_trap) result -> ('a, [> msg]) result
val open_error_exn_trap : ('a, exn_trap) result -> ('a, [> exn_trap]) result
* { 1 : print Pretty printing }
val pp :
ok:(Format.formatter -> 'a -> unit) ->
error:(Format.formatter -> 'b -> unit) -> Format.formatter ->
('a, 'b) result -> unit
* [ pp ~ok ~error ppf r ] prints [ r ] on [ ppf ] using [ ok ] and [ error ]
according to [ r ] .
according to [r]. *)
val dump :
ok:(Format.formatter -> 'a -> unit) ->
error:(Format.formatter -> 'b -> unit) -> Format.formatter ->
('a, 'b) result -> unit
* [ dump ~ok ~error ] formats an OCaml result value using [ ok ] or [ error ]
according to case , no parentheses are added .
according to case, no parentheses are added. *)
* { 1 : pred Predicates and comparison }
val is_ok : ('a, 'b) result -> bool
val is_error : ('a, 'b) result -> bool
val equal : ok:('a -> 'a -> bool) -> error:('b -> 'b -> bool) ->
('a, 'b) result -> ('a, 'b) result -> bool
* [ equal ~ok ~error r r ' ] tests [ r ] and [ r ' ] for equality using [ ok ]
and [ error ] .
and [error]. *)
val compare : ok:('a -> 'a -> int) -> error:('b -> 'b -> int) ->
('a, 'b) result -> ('a, 'b) result -> int
* [ compare ~ok ~error r r ' ] totally orders [ r ] and [ r ' ] using [ ok ]
and [ error ] .
and [error]. *)
* { 1 : convert Converting }
val to_option : ('a, 'b) result -> 'a option
val of_option : none:(unit -> ('a, 'b) result) -> 'a option -> ('a, 'b) result
val to_presult : ('a, 'b) result -> [> `Ok of 'a | `Error of 'b ]
val of_presult : [< `Ok of 'a | `Error of 'b ] -> ('a, 'b) result
* { 1 : ignore Ignoring errors }
{ b Warning . } Using these functions is , most of the time , a bad idea .
{b Warning.} Using these functions is, most of the time, a bad idea. *)
val ignore_error : use:('b -> 'a) -> ('a, 'b) result -> 'a
val kignore_error :
use:('b -> ('a, 'c) result) -> ('a, 'b) result -> ('a, 'c) result
end
* { 1 : usage Usage design guidelines }
These are rough design guidelines , do n't forget to think .
{ 2 Error messages }
Use { { ! R.msgs}error messages } if :
{ ol
{ - Your error messages do n't need to be localized , e.g. scripts ,
command line programs . }
{ - The errors do n't need to be processed . They are just meant to
be logged at certain point in your program . } }
If the above does n't hold and your errors need to be processed for
localization or error recovery then use a custom error type in your
result values .
{ 2 Custom error types }
If your module has specific errors then define an error type , and
a result type that tags this error type with the library name ( or
any other tag that may make sense , see for example { ! R.exn } ) along
with the following functions :
{ [
module : sig
type error = ...
type ' a result = ( ' a , [ ` Mod of error ] ) Rresult.result
val pp_error : Format.formatter - > [ ` Mod of error ] - > unit
val open_error : ' a result - > ( ' a , [ > ` Mod of error ] ) Rresult.result
val error_to_msg : ' a result - > ( ' a , Rresult.R.msg ) Rresult.result
val f : ... - > ' a result
end
] }
If your library has generic errors that may be useful in other context
or shared among modules and to be composed together , then define your
error type itself as being a variant and return these values
without tagging them .
{ [
module : sig
type error = [ ` Generic of ... | ... ]
type ' a result = ( ' a , error ) Rresult.result
val pp_error : Format.formatter - > error - > unit
val open_error : ' a result - > ( ' a , [ > error ] ) Rresult.result
val error_to_msg : ' a result - > ( ' a , Rresult.R.msg ) Rresult.result
val f : ... - > ' a result
end
] }
In the latter case it may still be useful to provide a function to
tag these errors whenever they reach a certain point of the program .
For this the following function could be added to [ Mod ] :
{ [
val pack_error : ' a result - > ( ' a , [ > ` Mod of error ] ) Rresult.result
] }
You should then provide the following functions aswell , so that
the packed error composes well in the system :
{ [
val pp_pack_error : Format.formatter - > [ ` Mod of error ] - > unit
val open_pack_error : ( ' a , [ ` Mod of error ] ) Rresult.result - >
( ' a , [ > ` Mod of error ] ) Rresult.result
val error_pack_to_msg : ( ' a , [ ` Mod of error ] ) Rresult.result - >
( ' a , Rresult.R.msg ) Rresult.result
] }
These are rough design guidelines, don't forget to think.
{2 Error messages}
Use {{!R.msgs}error messages} if:
{ol
{- Your error messages don't need to be localized, e.g. scripts,
command line programs.}
{- The errors don't need to be processed. They are just meant to
be logged at certain point in your program.}}
If the above doesn't hold and your errors need to be processed for
localization or error recovery then use a custom error type in your
result values.
{2 Custom error types}
If your module has specific errors then define an error type, and
a result type that tags this error type with the library name (or
any other tag that may make sense, see for example {!R.exn}) along
with the following functions:
{[
module Mod : sig
type error = ...
type 'a result = ('a, [`Mod of error]) Rresult.result
val pp_error : Format.formatter -> [`Mod of error] -> unit
val open_error : 'a result -> ('a, [> `Mod of error]) Rresult.result
val error_to_msg : 'a result -> ('a, Rresult.R.msg) Rresult.result
val f : ... -> 'a result
end
]}
If your library has generic errors that may be useful in other context
or shared among modules and to be composed together, then define your
error type itself as being a variant and return these values
without tagging them.
{[
module Mod : sig
type error = [`Generic of ... | ... ]
type 'a result = ('a, error) Rresult.result
val pp_error : Format.formatter -> error -> unit
val open_error : 'a result -> ('a, [> error]) Rresult.result
val error_to_msg : 'a result -> ('a, Rresult.R.msg) Rresult.result
val f : ... -> 'a result
end
]}
In the latter case it may still be useful to provide a function to
tag these errors whenever they reach a certain point of the program.
For this the following function could be added to [Mod]:
{[
val pack_error : 'a result -> ('a, [> `Mod of error]) Rresult.result
]}
You should then provide the following functions aswell, so that
the packed error composes well in the system:
{[
val pp_pack_error : Format.formatter -> [ `Mod of error] -> unit
val open_pack_error : ('a, [ `Mod of error]) Rresult.result ->
('a, [> `Mod of error]) Rresult.result
val error_pack_to_msg : ('a, [ `Mod of error]) Rresult.result ->
('a, Rresult.R.msg) Rresult.result
]}
*)
---------------------------------------------------------------------------
Copyright ( c ) 2014 The rresult programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 The rresult programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
255f9e4dbbcd96ee7e0e8f7f311c3b5204f186164ef9956a2ffd435f6539981c | klarna/snabbkaffe | asciiart.erl | Copyright 2019 - 2020 Klarna Bank AB
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(asciiart).
-export([ init/0
, dimensions/1
, render/1
, char/3
, char/2
, line/4
, line/3
, string/4
, string/3
, plot/1
, plot/2
, draw/2
, draw/1
, visible/3
]).
%%====================================================================
%% Types
%%====================================================================
-type vector() :: {integer(), integer()}.
-type cont() :: fun((canvas()) -> canvas()).
-opaque canvas() :: #{vector() => char()}.
-type plot_data() :: [{char(), [{float(), float()}]}].
-export_type([vector/0, canvas/0]).
-define(epsilon, 1.0e-6).
%%====================================================================
%% API functions
%%====================================================================
-spec init() -> canvas().
init() ->
#{}.
-spec render(canvas()) -> iolist().
render(Cnv) ->
{{Xm, Ym}, {XM, YM}} = dimensions(Cnv),
[[[maps:get({X, Y}, Cnv, $ ) || X <- lists:seq(Xm, XM)], $\n]
|| Y <- lists:reverse(lists:seq(Ym, YM))].
-spec draw([cont()], canvas()) -> canvas().
draw(Ops, Cnv) ->
lists:foldl(fun(F, Acc) -> F(Acc) end, Cnv, Ops).
-spec draw([cont()]) -> canvas().
draw(Ops) ->
draw(Ops, init()).
-spec dimensions(canvas()) -> {vector(), vector()}.
dimensions(Cnv) ->
Fun = fun({X, Y}, _, {{Xm, Ym}, {XM, YM}}) ->
{ {min(X, Xm), min(Y, Ym)}
, {max(X, XM), max(Y, YM)}
}
end,
maps:fold(Fun, {{1, 1}, {1, 1}}, Cnv).
-spec char(canvas(), vector(), char()) -> canvas().
char(Cnv, Pos, Char) ->
Cnv #{Pos => Char}.
-spec char(vector(), char()) -> cont().
char(Pos, Char) ->
fun(Cnv) -> char(Cnv, Pos, Char) end.
-spec line(canvas(), vector(), vector(), char()) -> canvas().
line(Cnv, {X1, Y1}, {X2, Y2}, Char) ->
X = X2 - X1,
Y = Y2 - Y1,
N = max(1, max(abs(X), abs(Y))),
lists:foldl( fun(Pos, Cnv) -> char(Cnv, Pos, Char) end
, Cnv
, [{ X1 + round(X * I / N)
, Y1 + round(Y * I / N)
} || I <- lists:seq(0, N)]
).
-spec line(vector(), vector(), char()) -> cont().
line(F, T, C) ->
fun(Cnv) -> line(Cnv, F, T, C) end.
-spec string(canvas(), vector(), string(), left | right) -> canvas().
string(Cnv, _, [], _) ->
Cnv;
string(Cnv, {X, Y}, String, Direction) ->
XL = case Direction of
right ->
lists:seq(X, X + length(String) - 1);
left ->
lists:seq(X - length(String) + 1, X)
end,
L = lists:zip(XL, String),
lists:foldl( fun({X, Char}, Cnv) ->
char(Cnv, {X, Y}, Char)
end
, Cnv
, L
).
-spec string(vector(), string(), left | right) -> cont().
string(Pos, Str, Dir) ->
fun(Cnv) -> string(Cnv, Pos, Str, Dir) end.
-spec plot(plot_data()) -> canvas().
plot(Datapoints) ->
plot(Datapoints, #{}).
-spec plot(plot_data(), map()) -> canvas().
plot(Datapoints, Config) ->
AllDatapoints = lists:append([L || {_, L} <- Datapoints]),
{XX, YY} = lists:unzip(AllDatapoints),
Xm = bound(min, Config, XX),
XM = bound(max, Config, XX),
Ym = bound(min, Config, YY),
YM = bound(max, Config, YY),
DX = max(?epsilon, XM - Xm),
DY = max(?epsilon, YM - Ym),
%% Dimensions of the plot:
AspectRatio = maps:get(aspect_ratio, Config, 0.2),
Width = max(length(Datapoints) * 2, 70),
Height = round(Width * AspectRatio),
Frame = {{Xm, Ym}, {Width / DX, Height / DY}},
%% Draw axis
Cnv0 = draw( [ %% Vertical:
line({0, 0}, {0, Height - 1}, $|)
, char({0, Height}, $^)
%% Labels:
, string({-2, 0}, print_num(Ym), left)
, string({-2, Height}, print_num(YM), left)
Horizontal :
, line({0, 0}, {Width - 1, 0}, $-)
, char({Width, 0}, $>)
, char({0, 0}, $+)
%% Labels
, string({0, -1}, print_num(Xm), right)
, string({Width, -1}, print_num(XM), left)
]
, init()
),
lists:foldl( fun({Char, Data}, Acc) ->
draw_datapoints(Frame, Char, Data, Acc)
end
, Cnv0
, Datapoints
).
draw_datapoints(Frame, Char, Data, Acc) ->
lists:foldl( fun(Coords, Acc) ->
char(Acc, plot_coord(Frame, Coords), Char)
end
, Acc
, Data
).
print_num(Num) when is_integer(Num) ->
integer_to_list(Num);
print_num(Num) ->
lists:flatten(io_lib:format("~.6..f", [Num])).
plot_coord({{Xm, Ym}, {SX, SY}}, {X, Y}) ->
{round((X - Xm) * SX), round((Y - Ym) * SY)}.
bound(Fun, Cfg, L) ->
N = case L of
[] -> 0;
_ -> lists:Fun(L)
end,
case maps:get(include_zero, Cfg, true) of
true ->
erlang:Fun(0, N);
false ->
N
end.
-spec visible(char(), string(), [term()]) -> iolist().
visible(Char, Fmt, Args) ->
Str = lines(lists:flatten(io_lib:format(Fmt, Args))),
Width = max(79, lists:max([length(I) || I <- Str])) + 1,
N = length(Str),
Text = [string({4, Y}, S, right)
|| {Y, S} <- lists:zip( lists:seq(1, N)
, lists:reverse(Str)
)],
Cnv = draw([ asciiart:line({1, -1}, {Width, -1}, Char)
, asciiart:line({1, N + 2}, {Width, N + 2}, Char)
, asciiart:line({1, 0}, {1, N + 1}, Char)
, asciiart:line({2, 0}, {2, N + 1}, Char)
, asciiart:line({Width - 1, 0}, {Width - 1, N + 1}, Char)
, asciiart:line({Width, 0}, {Width, N + 1}, Char)
] ++ Text),
[$\n, render(Cnv), $\n].
-spec lines(string()) -> [string()].
lines(Str) ->
re:split(Str, "\n", [{return, list}]).
| null | https://raw.githubusercontent.com/klarna/snabbkaffe/2bdf6e842c825ca935b34884528f51158dd31e6e/src/asciiart.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
====================================================================
Types
====================================================================
====================================================================
API functions
====================================================================
Dimensions of the plot:
Draw axis
Vertical:
Labels:
Labels | Copyright 2019 - 2020 Klarna Bank AB
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(asciiart).
-export([ init/0
, dimensions/1
, render/1
, char/3
, char/2
, line/4
, line/3
, string/4
, string/3
, plot/1
, plot/2
, draw/2
, draw/1
, visible/3
]).
-type vector() :: {integer(), integer()}.
-type cont() :: fun((canvas()) -> canvas()).
-opaque canvas() :: #{vector() => char()}.
-type plot_data() :: [{char(), [{float(), float()}]}].
-export_type([vector/0, canvas/0]).
-define(epsilon, 1.0e-6).
-spec init() -> canvas().
init() ->
#{}.
-spec render(canvas()) -> iolist().
render(Cnv) ->
{{Xm, Ym}, {XM, YM}} = dimensions(Cnv),
[[[maps:get({X, Y}, Cnv, $ ) || X <- lists:seq(Xm, XM)], $\n]
|| Y <- lists:reverse(lists:seq(Ym, YM))].
-spec draw([cont()], canvas()) -> canvas().
draw(Ops, Cnv) ->
lists:foldl(fun(F, Acc) -> F(Acc) end, Cnv, Ops).
-spec draw([cont()]) -> canvas().
draw(Ops) ->
draw(Ops, init()).
-spec dimensions(canvas()) -> {vector(), vector()}.
dimensions(Cnv) ->
Fun = fun({X, Y}, _, {{Xm, Ym}, {XM, YM}}) ->
{ {min(X, Xm), min(Y, Ym)}
, {max(X, XM), max(Y, YM)}
}
end,
maps:fold(Fun, {{1, 1}, {1, 1}}, Cnv).
-spec char(canvas(), vector(), char()) -> canvas().
char(Cnv, Pos, Char) ->
Cnv #{Pos => Char}.
-spec char(vector(), char()) -> cont().
char(Pos, Char) ->
fun(Cnv) -> char(Cnv, Pos, Char) end.
-spec line(canvas(), vector(), vector(), char()) -> canvas().
line(Cnv, {X1, Y1}, {X2, Y2}, Char) ->
X = X2 - X1,
Y = Y2 - Y1,
N = max(1, max(abs(X), abs(Y))),
lists:foldl( fun(Pos, Cnv) -> char(Cnv, Pos, Char) end
, Cnv
, [{ X1 + round(X * I / N)
, Y1 + round(Y * I / N)
} || I <- lists:seq(0, N)]
).
-spec line(vector(), vector(), char()) -> cont().
line(F, T, C) ->
fun(Cnv) -> line(Cnv, F, T, C) end.
-spec string(canvas(), vector(), string(), left | right) -> canvas().
string(Cnv, _, [], _) ->
Cnv;
string(Cnv, {X, Y}, String, Direction) ->
XL = case Direction of
right ->
lists:seq(X, X + length(String) - 1);
left ->
lists:seq(X - length(String) + 1, X)
end,
L = lists:zip(XL, String),
lists:foldl( fun({X, Char}, Cnv) ->
char(Cnv, {X, Y}, Char)
end
, Cnv
, L
).
-spec string(vector(), string(), left | right) -> cont().
string(Pos, Str, Dir) ->
fun(Cnv) -> string(Cnv, Pos, Str, Dir) end.
-spec plot(plot_data()) -> canvas().
plot(Datapoints) ->
plot(Datapoints, #{}).
-spec plot(plot_data(), map()) -> canvas().
plot(Datapoints, Config) ->
AllDatapoints = lists:append([L || {_, L} <- Datapoints]),
{XX, YY} = lists:unzip(AllDatapoints),
Xm = bound(min, Config, XX),
XM = bound(max, Config, XX),
Ym = bound(min, Config, YY),
YM = bound(max, Config, YY),
DX = max(?epsilon, XM - Xm),
DY = max(?epsilon, YM - Ym),
AspectRatio = maps:get(aspect_ratio, Config, 0.2),
Width = max(length(Datapoints) * 2, 70),
Height = round(Width * AspectRatio),
Frame = {{Xm, Ym}, {Width / DX, Height / DY}},
line({0, 0}, {0, Height - 1}, $|)
, char({0, Height}, $^)
, string({-2, 0}, print_num(Ym), left)
, string({-2, Height}, print_num(YM), left)
Horizontal :
, line({0, 0}, {Width - 1, 0}, $-)
, char({Width, 0}, $>)
, char({0, 0}, $+)
, string({0, -1}, print_num(Xm), right)
, string({Width, -1}, print_num(XM), left)
]
, init()
),
lists:foldl( fun({Char, Data}, Acc) ->
draw_datapoints(Frame, Char, Data, Acc)
end
, Cnv0
, Datapoints
).
draw_datapoints(Frame, Char, Data, Acc) ->
lists:foldl( fun(Coords, Acc) ->
char(Acc, plot_coord(Frame, Coords), Char)
end
, Acc
, Data
).
print_num(Num) when is_integer(Num) ->
integer_to_list(Num);
print_num(Num) ->
lists:flatten(io_lib:format("~.6..f", [Num])).
plot_coord({{Xm, Ym}, {SX, SY}}, {X, Y}) ->
{round((X - Xm) * SX), round((Y - Ym) * SY)}.
bound(Fun, Cfg, L) ->
N = case L of
[] -> 0;
_ -> lists:Fun(L)
end,
case maps:get(include_zero, Cfg, true) of
true ->
erlang:Fun(0, N);
false ->
N
end.
-spec visible(char(), string(), [term()]) -> iolist().
visible(Char, Fmt, Args) ->
Str = lines(lists:flatten(io_lib:format(Fmt, Args))),
Width = max(79, lists:max([length(I) || I <- Str])) + 1,
N = length(Str),
Text = [string({4, Y}, S, right)
|| {Y, S} <- lists:zip( lists:seq(1, N)
, lists:reverse(Str)
)],
Cnv = draw([ asciiart:line({1, -1}, {Width, -1}, Char)
, asciiart:line({1, N + 2}, {Width, N + 2}, Char)
, asciiart:line({1, 0}, {1, N + 1}, Char)
, asciiart:line({2, 0}, {2, N + 1}, Char)
, asciiart:line({Width - 1, 0}, {Width - 1, N + 1}, Char)
, asciiart:line({Width, 0}, {Width, N + 1}, Char)
] ++ Text),
[$\n, render(Cnv), $\n].
-spec lines(string()) -> [string()].
lines(Str) ->
re:split(Str, "\n", [{return, list}]).
|
c69e186fa51e4ed2e40e971e7192aa8ce55240d5f9758e984f1cda1e05888e1b | iskandr/parakeet-retired | CSE.ml | (* pp: -parser o pa_macro.cmo *)
open Type
open Base
open TypedSSA
open SSA_Transform
(* expressions without side effects *)
let is_safe_exp expNode = match expNode.exp with
| PrimApp _ | Arr _ | Values _ -> true
| _ -> false (* assume function calls unsafe by default *)
module CSE_Rules = struct
type context = (exp, value) Hashtbl.t
let init _ = Hashtbl.create 127
let finalize _ _ = NoChange
let dir = Forward
let stmt env stmtNode = match stmtNode.stmt with
(* leave simple constants alone *)
| Set ([id], {exp=Values [{value = Num _}]}) -> NoChange
| Set ([id], expNode) when is_safe_exp expNode ->
if Hashtbl.mem env expNode.exp then (
let rhsVal = Hashtbl.find env expNode.exp in
let src = expNode.exp_src in
let expNode' =
TypedSSA.vals_exp ?src expNode.exp_types [rhsVal]
in
Update (TypedSSA.set [id] expNode')
)
else (Hashtbl.add env expNode.exp (Var id); NoChange)
| Set _ -> NoChange
| _ -> Hashtbl.clear env; NoChange
TODO : propagate expressions through phi nodes
let phi env phiNode = NoChange
let exp env envNode = NoChange
let value env valNode = NoChange
end
module CSE_Rewrite = SSA_Transform.Mk(CSE_Rules)
let cse fn = CSE_Rewrite.transform_fn fn
| null | https://raw.githubusercontent.com/iskandr/parakeet-retired/3d7e6e5b699f83ce8a1c01290beed0b78c0d0945/SSA/Optimizations/CSE.ml | ocaml | pp: -parser o pa_macro.cmo
expressions without side effects
assume function calls unsafe by default
leave simple constants alone |
open Type
open Base
open TypedSSA
open SSA_Transform
let is_safe_exp expNode = match expNode.exp with
| PrimApp _ | Arr _ | Values _ -> true
module CSE_Rules = struct
type context = (exp, value) Hashtbl.t
let init _ = Hashtbl.create 127
let finalize _ _ = NoChange
let dir = Forward
let stmt env stmtNode = match stmtNode.stmt with
| Set ([id], {exp=Values [{value = Num _}]}) -> NoChange
| Set ([id], expNode) when is_safe_exp expNode ->
if Hashtbl.mem env expNode.exp then (
let rhsVal = Hashtbl.find env expNode.exp in
let src = expNode.exp_src in
let expNode' =
TypedSSA.vals_exp ?src expNode.exp_types [rhsVal]
in
Update (TypedSSA.set [id] expNode')
)
else (Hashtbl.add env expNode.exp (Var id); NoChange)
| Set _ -> NoChange
| _ -> Hashtbl.clear env; NoChange
TODO : propagate expressions through phi nodes
let phi env phiNode = NoChange
let exp env envNode = NoChange
let value env valNode = NoChange
end
module CSE_Rewrite = SSA_Transform.Mk(CSE_Rules)
let cse fn = CSE_Rewrite.transform_fn fn
|
1e53ff35528e0c9fc691e0c091fb302c12c3dcf5994061e0714caa89c5920042 | tobbebex/GPipe-Core | Buffer.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE Arrows , TypeFamilies , ScopedTypeVariables ,
FlexibleContexts , FlexibleInstances , TypeSynonymInstances #
FlexibleContexts, FlexibleInstances , TypeSynonymInstances #-}
module Graphics.GPipe.Internal.Buffer
(
BufferFormat(..),
BufferColor,
Buffer(),
ToBuffer(..),
B(..), B2(..), B3(..), B4(..),
toB22, toB3, toB21, toB12, toB11,
Uniform(..), Normalized(..), BPacked(),
BInput(..),
newBuffer,
writeBuffer,
copyBuffer,
BufferStartPos,
bufSize, bufName, bufElementSize, bufferLength, bufBElement, bufferWriteInternal, makeBuffer, getUniformAlignment, UniformAlignment
) where
import Graphics.GPipe.Internal.Context
import Graphics.GL.Core33
import Graphics.GL.Types
import Foreign.Marshal.Utils
import Foreign.Marshal.Alloc
import Prelude hiding ((.), id)
import Control.Category
import Control.Arrow
import Control.Monad (void)
import Foreign.Storable
import Foreign.Ptr
import Control.Monad.IO.Class
import Data.Word
import Data.Int
import Control.Monad.Trans.State.Strict
import Control.Monad.Trans.Writer.Strict
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Class (lift)
import Data.IORef
import Control.Applicative ((<$>))
import Linear.V4
import Linear.V3
import Linear.V2
import Linear.V1
import Linear.V0
import Linear.Plucker (Plucker(..))
import Linear.Quaternion (Quaternion(..))
import Linear.Affine (Point(..))
-- | The class that constraints which types can live in a buffer.
class BufferFormat f where
| The type a value of this format has when it lives on the host ( i.e. normal world )
type HostFormat f
-- | An arrow action that turns a value from it's host representation to it's buffer representation. Use 'toBuffer' from
the GPipe provided instances to operate in this arrow . Also note that this arrow needs to be able to return a value
-- lazily, so ensure you use
--
-- @proc ~pattern -> do ...@
toBuffer :: ToBuffer (HostFormat f) f
getGlType :: f -> GLenum
peekPixel :: f -> Ptr () -> IO (HostFormat f)
getGlPaddedFormat :: f -> GLenum
getGlType = error "This is only defined for BufferColor types"
peekPixel = error "This is only defined for BufferColor types"
getGlPaddedFormat = error "This is only defined for BufferColor types"
| A @Buffer os b@ lives in the object space @os@ and contains elements of type @b@.
data Buffer os b = Buffer {
bufName :: BufferName,
bufElementSize :: Int,
-- | Retrieve the number of elements in a buffer.
bufferLength :: Int,
bufBElement :: BInput -> b,
bufWriter :: Ptr () -> HostFormat b -> IO ()
}
instance Eq (Buffer os b) where
a == b = bufName a == bufName b
bufSize :: forall os b. Buffer os b -> Int
bufSize b = bufElementSize b * bufferLength b
type BufferName = IORef GLuint
type Offset = Int
type Stride = Int
type BufferStartPos = Int
data BInput = BInput {bInSkipElems :: Int, bInInstanceDiv :: Int}
type UniformAlignment = Int
data AlignmentMode = Align4 | AlignUniform | AlignPackedIndices | AlignUnknown deriving (Eq)
-- | The arrow type for 'toBuffer'.
data ToBuffer a b = ToBuffer
Normal = aligned to 4 bytes
!(Kleisli (StateT Offset (Reader (BufferName, Stride, BInput))) a b)
Normal = aligned to 4 bytes
!AlignmentMode
instance Category ToBuffer where
{-# INLINE id #-}
id = ToBuffer id id id AlignUnknown
{-# INLINE (.) #-}
ToBuffer a b c m1 . ToBuffer x y z m2 = ToBuffer (a.x) (b.y) (c.z) (comb m1 m2)
where
If only one uniform or one PackedIndices , use that , otherwise use Align4
comb AlignUniform AlignUnknown = AlignUniform
comb AlignUnknown AlignUniform = AlignUniform
comb AlignUnknown AlignPackedIndices = AlignPackedIndices
comb AlignPackedIndices AlignUnknown = AlignPackedIndices
comb AlignUnknown AlignUnknown = AlignUnknown
comb _ _ = Align4
instance Arrow ToBuffer where
# INLINE arr #
arr f = ToBuffer (arr f) (arr f) (arr f) AlignUnknown
# INLINE first #
first (ToBuffer a b c m) = ToBuffer (first a) (first b) (first c) m
-- | The atomic buffer value that represents a host value of type 'a'.
data B a = B { bName :: IORef GLuint, bOffset :: Int, bStride :: Int, bSkipElems :: Int, bInstanceDiv :: Int}
| An atomic buffer value that represents a vector of 2 ' a 's on the host .
Internal
| An atomic buffer value that represents a vector of 3 ' a 's on the host .
Internal
| An atomic buffer value that represents a vector of 4 ' a 's on the host . This works similar to ' ( B a , B a , B a , B a ) ' but has some performance advantage , especially when used
in ' VertexArray 's .
Internal
| Split up a @'B4 ' a@ into two @'B2 ' a@s .
toB22 :: forall a. (Storable a, BufferFormat (B2 a)) => B4 a -> (B2 a, B2 a)
| Discard the last component of a @'B4 ' a@ to get a @'B3 ' a@.
toB3 :: forall a. (Storable a, BufferFormat (B3 a)) => B4 a -> B3 a
| Split up a @'B3 ' a@ into a @'B2 ' a@ and a @'B1 ' a@.
toB21 :: forall a. (Storable a, BufferFormat (B a)) => B3 a -> (B2 a, B a)
| Split up a @'B3 ' a@ into a @'B1 ' a@ and a @'B2 ' a@.
toB12 :: forall a. (Storable a, BufferFormat (B a)) => B3 a -> (B a, B2 a)
| Split up a @'B2 ' a@ into two @'B1 ' a@s .
toB11 :: forall a. (Storable a, BufferFormat (B a)) => B2 a -> (B a, B a)
toB22 (B4 b) = (B2 b, B2 $ b { bOffset = bOffset b + 2 * sizeOf (undefined :: a) })
toB3 (B4 b) = B3 b
toB21 (B3 b) = (B2 b, b { bOffset = bOffset b + 2*sizeOf (undefined :: a) })
toB12 (B3 b) = (b, B2 $ b { bOffset = bOffset b + sizeOf (undefined :: a) })
toB11 (B2 b) = (b, b { bOffset = bOffset b + sizeOf (undefined :: a) })
-- | Any buffer value that is going to be used as a uniform needs to be wrapped in this newtype. This will cause is to be aligned
-- properly for uniform usage. It can still be used as input for vertex arrays, but due to the uniform alignment it will probably be
-- padded quite heavily and thus wasteful.
newtype Uniform a = Uniform a
| This wrapper is used for integer values to indicate that it should be interpreted as a floating point value , in the range [ -1,1 ] or [ 0,1 ] depending on wether it is a
-- signed or unsigned integer (i.e. 'Int' or 'Word').
newtype Normalized a = Normalized a
| This works like a ' B a ' , but has an alignment smaller than 4 bytes that is the limit for vertex buffers , and thus can not be used for those .
Index buffers on the other hand need to be tightly packed , so you need to use this type for index buffers of ' Word8 ' or ' Word16 ' .
newtype BPacked a = BPacked (B a)
toBufferBUnaligned :: forall a. Storable a => ToBuffer a (B a)
toBufferBUnaligned = ToBuffer
(Kleisli $ const static)
(Kleisli $ const valueProd)
(Kleisli writer)
Align4
where
size = sizeOf (undefined :: a)
static = do offset <- get
put $ offset + size
return undefined
valueProd = do (name, stride, bIn) <- lift ask
offset <- get
put $ offset + size
return $ B name offset stride (bInSkipElems bIn) (bInInstanceDiv bIn)
writer a = do (ptr,pads) <- get
put (ptr `plusPtr` size, pads)
liftIO $ poke (castPtr ptr) a
return undefined
toBufferB :: forall a. Storable a => ToBuffer a (B a)
Will always be 4 aligned , only 4 size types defined for B1
toBufferB2 :: forall a. Storable a => ToBuffer (V2 a) (B2 a)
toBufferB2 = proc ~(V2 a b) -> do
(if sizeOf (undefined :: a) >= 4 then alignWhen [(AlignUniform, 2 * sizeOf (undefined :: a))] else id) -< () -- Small optimization if someone puts non-usable types in a uniform
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
Will always be 4 aligned , only 4 size types defined for B2
toBufferB3 :: forall a. Storable a => ToBuffer (V3 a) (B3 a)
toBufferB3 = proc ~(V3 a b c) -> do
(if sizeOf (undefined :: a) >= 4 then alignWhen [(AlignUniform, 4 * sizeOf (undefined :: a))] else id) -< () -- Small optimization if someone puts non-usable types in a uniform
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
toBufferBUnaligned -< c
For types smaller than 4 we need to pad
returnA -< B3 a'
toBufferB4 :: forall a. Storable a => ToBuffer (V4 a) (B4 a)
toBufferB4 = proc ~(V4 a b c d) -> do
(if sizeOf (undefined :: a) >= 4 then alignWhen [(AlignUniform, 4 * sizeOf (undefined :: a))] else id) -< () -- Small optimization if someone puts non-usable types in a uniform
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
toBufferBUnaligned -< c
toBufferBUnaligned -< d
Will always be 4 aligned
instance BufferFormat a => BufferFormat (Uniform a) where
type HostFormat (Uniform a) = HostFormat a
toBuffer = arr Uniform . ToBuffer
(Kleisli preStep)
(Kleisli elementBuilderA)
(Kleisli writerA)
AlignUniform
where
ToBuffer (Kleisli preStep') (Kleisli elementBuilderA) (Kleisli writerA') _ = toBuffer :: ToBuffer (HostFormat a) a
preStep a = do (x,_) <- lift $ lift ask
a' <- preStep' a
setElemAlignM [(AlignUniform, x)] ()
return a'
writerA a = do a' <- writerA' a
setWriterAlignM ()
return a'
instance BufferFormat a => BufferFormat (Normalized a) where
type HostFormat (Normalized a) = HostFormat a
toBuffer = arr Normalized . toBuffer
getGlType (Normalized a) = getGlType a
getGlPaddedFormat (Normalized a) = case getGlPaddedFormat a of
GL_RGBA_INTEGER -> GL_RGBA
GL_RGB_INTEGER -> GL_RGB
GL_RG_INTEGER -> GL_RG
GL_RED_INTEGER -> GL_RED
x -> x
instance BufferFormat a => BufferFormat (V0 a) where
type HostFormat (V0 a) = V0 (HostFormat a)
toBuffer = arr (const V0)
instance BufferFormat a => BufferFormat (V1 a) where
type HostFormat (V1 a) = V1 (HostFormat a)
toBuffer = proc ~(V1 a) -> do
a' <- toBuffer -< a
returnA -< V1 a'
instance BufferFormat a => BufferFormat (V2 a) where
type HostFormat (V2 a) = V2 (HostFormat a)
toBuffer = proc ~(V2 a b) -> do
(a', b') <- toBuffer -< (a,b)
returnA -< V2 a' b'
instance BufferFormat a => BufferFormat (V3 a) where
type HostFormat (V3 a) = V3 (HostFormat a)
toBuffer = proc ~(V3 a b c) -> do
(a', b', c') <- toBuffer -< (a, b, c)
returnA -< V3 a' b' c'
instance BufferFormat a => BufferFormat (V4 a) where
type HostFormat (V4 a) = V4 (HostFormat a)
toBuffer = proc ~(V4 a b c d) -> do
(a', b', c', d') <- toBuffer -< (a, b, c, d)
returnA -< V4 a' b' c' d'
instance BufferFormat () where
type HostFormat () = ()
toBuffer = arr (const ())
instance (BufferFormat a, BufferFormat b) => BufferFormat (a, b) where
type HostFormat (a,b) = (HostFormat a, HostFormat b)
toBuffer = proc ~(a, b) -> do
a' <- toBuffer -< a
b' <- toBuffer -< b
returnA -< (a', b')
instance (BufferFormat a, BufferFormat b, BufferFormat c) => BufferFormat (a, b, c) where
type HostFormat (a,b,c) = (HostFormat a, HostFormat b, HostFormat c)
toBuffer = proc ~(a, b, c) -> do
((a', b'), c') <- toBuffer -< ((a, b), c)
returnA -< (a', b', c')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d) => BufferFormat (a, b, c, d) where
type HostFormat (a,b,c,d) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d)
toBuffer = proc ~(a, b, c, d) -> do
((a', b', c'), d') <- toBuffer -< ((a, b, c), d)
returnA -< (a', b', c', d')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e) => BufferFormat (a, b, c, d, e) where
type HostFormat (a,b,c,d,e) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e)
toBuffer = proc ~(a, b, c, d, e) -> do
((a', b', c', d'), e') <- toBuffer -< ((a, b, c, d), e)
returnA -< (a', b', c', d', e')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e, BufferFormat f) => BufferFormat (a, b, c, d, e, f) where
type HostFormat (a,b,c,d,e,f) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e, HostFormat f)
toBuffer = proc ~(a, b, c, d, e, f) -> do
((a', b', c', d', e'), f') <- toBuffer -< ((a, b, c, d, e), f)
returnA -< (a', b', c', d', e', f')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e, BufferFormat f, BufferFormat g) => BufferFormat (a, b, c, d, e, f, g) where
type HostFormat (a,b,c,d,e,f,g) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e, HostFormat f, HostFormat g)
toBuffer = proc ~(a, b, c, d, e, f, g) -> do
((a', b', c', d', e', f'), g') <- toBuffer -< ((a, b, c, d, e, f), g)
returnA -< (a', b', c', d', e', f', g')
instance BufferFormat a => BufferFormat (Quaternion a) where
type HostFormat (Quaternion a) = Quaternion (HostFormat a)
toBuffer = proc ~(Quaternion a v) -> do
a' <- toBuffer -< a
v' <- toBuffer -< v
returnA -< Quaternion a' v'
instance (BufferFormat (f a), BufferFormat a, HostFormat (f a) ~ f (HostFormat a)) => BufferFormat (Point f a) where
type HostFormat (Point f a) = Point f (HostFormat a)
toBuffer = proc ~(P a) -> do
a' <- toBuffer -< a
returnA -< P a'
instance BufferFormat a => BufferFormat (Plucker a) where
type HostFormat (Plucker a) = Plucker (HostFormat a)
toBuffer = proc ~(Plucker a b c d e f) -> do
a' <- toBuffer -< a
b' <- toBuffer -< b
c' <- toBuffer -< c
d' <- toBuffer -< d
e' <- toBuffer -< e
f' <- toBuffer -< f
returnA -< Plucker a' b' c' d' e' f'
-- | Create a buffer with a specified number of elements.
newBuffer :: (MonadIO m, BufferFormat b, ContextHandler ctx) => Int -> ContextT ctx os m (Buffer os b)
newBuffer elementCount | elementCount < 0 = error "newBuffer, length negative"
| otherwise = do
(buffer, nameRef, name) <- liftNonWinContextIO $ do
name <- alloca (\ptr -> glGenBuffers 1 ptr >> peek ptr)
nameRef <- newIORef name
uniAl <- getUniformAlignment
let buffer = makeBuffer nameRef elementCount uniAl
bname <- readIORef $ bufName buffer
glBindBuffer GL_COPY_WRITE_BUFFER bname
glBufferData GL_COPY_WRITE_BUFFER (fromIntegral $ bufSize buffer) nullPtr GL_STREAM_DRAW
return (buffer, nameRef, name)
addContextFinalizer nameRef $ with name (glDeleteBuffers 1)
addVAOBufferFinalizer nameRef
return buffer
bufferWriteInternal :: Buffer os f -> Ptr () -> [HostFormat f] -> IO (Ptr ())
bufferWriteInternal b ptr (x:xs) = do bufWriter b ptr x
bufferWriteInternal b (ptr `plusPtr` bufElementSize b) xs
bufferWriteInternal _ ptr [] = return ptr
| Write a buffer from the host ( i.e. the normal world ) .
writeBuffer :: (ContextHandler ctx, MonadIO m) => Buffer os b -> BufferStartPos -> [HostFormat b] -> ContextT ctx os m ()
writeBuffer buffer offset elems | offset < 0 || offset >= bufferLength buffer = error "writeBuffer, offset out of bounds"
| otherwise =
let maxElems = max 0 $ bufferLength buffer - offset
elemSize = bufElementSize buffer
off = fromIntegral $ offset * elemSize
in liftNonWinContextAsyncIO $ do
bname <- readIORef $ bufName buffer
glBindBuffer GL_COPY_WRITE_BUFFER bname
ptr <- glMapBufferRange GL_COPY_WRITE_BUFFER off (fromIntegral $maxElems * elemSize) (GL_MAP_WRITE_BIT + GL_MAP_FLUSH_EXPLICIT_BIT)
end <- bufferWriteInternal buffer ptr (take maxElems elems)
glFlushMappedBufferRange GL_COPY_WRITE_BUFFER off (fromIntegral $ end `minusPtr` ptr)
void $ glUnmapBuffer GL_COPY_WRITE_BUFFER
| Copies values from one buffer to another ( of the same type ) .
--
@copyBuffer fromStart toBuffer toStart length@ will copy @length@ elements from position @fromStart@ in @fromBuffer@ to position @toStart@ in @toBuffer@.
copyBuffer :: (ContextHandler ctx, MonadIO m) => Buffer os b -> BufferStartPos -> Buffer os b -> BufferStartPos -> Int -> ContextT ctx os m ()
copyBuffer bFrom from bTo to len | from < 0 || from >= bufferLength bFrom = error "writeBuffer, source offset out of bounds"
| to < 0 || to >= bufferLength bTo = error "writeBuffer, destination offset out of bounds"
| len < 0 = error "writeBuffer, length negative"
| len + from > bufferLength bFrom = error "writeBuffer, source buffer too small"
| len + to > bufferLength bTo = error "writeBuffer, destination buffer too small"
| otherwise = liftNonWinContextAsyncIO $ do
bnamef <- readIORef $ bufName bFrom
bnamet <- readIORef $ bufName bTo
glBindBuffer GL_COPY_READ_BUFFER bnamef
glBindBuffer GL_COPY_WRITE_BUFFER bnamet
same as for
glCopyBufferSubData GL_COPY_READ_BUFFER GL_COPY_WRITE_BUFFER (fromIntegral $ from * elemSize) (fromIntegral $ to * elemSize) (fromIntegral $ len * elemSize)
----------------------------------------------
alignWhen :: [(AlignmentMode, Int)] -> ToBuffer a a
alignWhen x = ToBuffer (Kleisli $ setElemAlignM x) (Kleisli return) (Kleisli setWriterAlignM) AlignUniform
setElemAlignM :: [(AlignmentMode, Int)] -> b -> StateT Offset (WriterT [Int] (Reader (UniformAlignment, AlignmentMode))) b
setElemAlignM x a = do
(_,m) <- lift $ lift ask
pad <- case lookup m x of
Nothing -> return 0
Just al -> do
offset <- get
let pad = al - 1 - ((offset - 1) `mod` al)
put $ offset + pad
return pad
lift $ tell [pad]
return a
setWriterAlignM :: b -> StateT (Ptr a, [Int]) IO b
setWriterAlignM a = do (ptr, pad:pads) <- get
put (ptr `plusPtr` pad, pads)
return a
getUniformAlignment :: IO Int
getUniformAlignment = fromIntegral <$> alloca (\ ptr -> glGetIntegerv GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ptr >> peek ptr)
makeBuffer :: forall os b. BufferFormat b => BufferName -> Int -> UniformAlignment -> Buffer os b
makeBuffer name elementCount uniformAlignment = do
let ToBuffer a b c m = toBuffer :: ToBuffer (HostFormat b) b
err = error "toBuffer is creating values that are dependant on the actual HostFormat values, this is not allowed since it doesn't allow static creation of shaders" :: HostFormat b
((_,elementSize),pads) = runReader (runWriterT (runStateT (runKleisli a err) 0)) (uniformAlignment, m)
elementF bIn = fst $ runReader (runStateT (runKleisli b err) 0) (name, elementSize, bIn)
writer ptr x = void $ runStateT (runKleisli c x) (ptr,pads)
Buffer name elementSize elementCount elementF writer
-- | This type family restricts what host and buffer types a texture format may be converted into.
' BufferColor t h ' for a texture representation ' t ' and a host representation ' h ' will evaluate to a buffer type used in the transfer .
-- This family is closed, i.e. you cannot create additional instances to it.
type family BufferColor c h where
BufferColor Float Int32 = Normalized (B Int32)
BufferColor Float Word32 = Normalized (B Word32)
BufferColor Float Float = B Float
BufferColor Int Int32 = B Int32
BufferColor Word Word32 = B Word32
BufferColor Word Word16 = BPacked Word16
BufferColor Word Word8 = BPacked Word8
BufferColor (V2 Float) (V2 Int32) = Normalized (B2 Int32)
BufferColor (V2 Float) (V2 Int16) = Normalized (B2 Int16)
BufferColor (V2 Float) (V2 Word32) = Normalized (B2 Word32)
BufferColor (V2 Float) (V2 Word16) = Normalized (B2 Word16)
BufferColor (V2 Float) (V2 Float) = B2 Float
BufferColor (V2 Int) (V2 Int32) = B2 Int32
BufferColor (V2 Int) (V2 Int16) = B2 Int16
BufferColor (V2 Word) (V2 Word32) = B2 Word32
BufferColor (V2 Word) (V2 Word16) = B2 Word16
BufferColor (V3 Float) (V3 Int32) = Normalized (B3 Int32)
BufferColor (V3 Float) (V3 Int16) = Normalized (B3 Int16)
BufferColor (V3 Float) (V3 Int8) = Normalized (B3 Int8)
BufferColor (V3 Float) (V3 Word32) = Normalized (B3 Word32)
BufferColor (V3 Float) (V3 Word16) = Normalized (B3 Word16)
BufferColor (V3 Float) (V3 Word8) = Normalized (B3 Word8)
BufferColor (V3 Float) (V3 Float) = B3 Float
BufferColor (V3 Int) (V3 Int32) = B3 Int32
BufferColor (V3 Int) (V3 Int16) = B3 Int16
BufferColor (V3 Int) (V3 Int8) = B3 Int8
BufferColor (V3 Word) (V3 Word32) = B3 Word32
BufferColor (V3 Word) (V3 Word16) = B3 Word16
BufferColor (V3 Word) (V3 Word8) = B3 Word8
BufferColor (V4 Float) (V4 Int32) = Normalized (B4 Int32)
BufferColor (V4 Float) (V4 Int16) = Normalized (B4 Int16)
BufferColor (V4 Float) (V4 Int8) = Normalized (B4 Int8)
BufferColor (V4 Float) (V4 Word32) = Normalized (B4 Word32)
BufferColor (V4 Float) (V4 Word16) = Normalized (B4 Word16)
BufferColor (V4 Float) (V4 Word8) = Normalized (B4 Word8)
BufferColor (V4 Float) (V4 Float) = B4 Float
BufferColor (V4 Int) (V4 Int32) = B4 Int32
BufferColor (V4 Int) (V4 Int16) = B4 Int16
BufferColor (V4 Int) (V4 Int8) = B4 Int8
BufferColor (V4 Word) (V4 Word32) = B4 Word32
BufferColor (V4 Word) (V4 Word16) = B4 Word16
BufferColor (V4 Word) (V4 Word8) = B4 Word8
peekPixel1 :: Storable a => Ptr x -> IO a
peekPixel1 = peek . castPtr
peekPixel2 :: (Storable a) => Ptr x -> IO (V2 a)
peekPixel2 ptr = do x <- peek (castPtr ptr)
y <- peekElemOff (castPtr ptr ) 1
return (V2 x y)
peekPixel3 :: (Storable a) => Ptr x -> IO (V3 a)
peekPixel3 ptr = do x <- peek (castPtr ptr)
y <- peekElemOff (castPtr ptr ) 1
z <- peekElemOff (castPtr ptr ) 2
return (V3 x y z)
peekPixel4 :: (Storable a) => Ptr x -> IO (V4 a)
peekPixel4 ptr = do V3 x y z <- peekPixel3 ptr
w <- peekElemOff (castPtr ptr ) 3
return (V4 x y z w)
instance BufferFormat (B Int32) where
type HostFormat (B Int32) = Int32
toBuffer = toBufferB
getGlType _ = GL_INT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (B Word32) where
type HostFormat (B Word32) = Word32
toBuffer = toBufferB
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (BPacked Word16) where
type HostFormat (BPacked Word16) = Word16
toBuffer = let ToBuffer a b c _ = toBufferB :: ToBuffer Word16 (B Word16) in arr BPacked . ToBuffer a b c AlignPackedIndices
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (BPacked Word8) where
type HostFormat (BPacked Word8) = Word8
toBuffer = let ToBuffer a b c _ = toBufferB :: ToBuffer Word8 (B Word8) in arr BPacked . ToBuffer a b c AlignPackedIndices
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (B Float) where
type HostFormat (B Float) = Float
toBuffer = toBufferB
getGlType _ = GL_FLOAT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED
instance BufferFormat (B2 Int32) where
type HostFormat (B2 Int32) = V2 Int32
toBuffer = toBufferB2
getGlType _ = GL_INT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Int16) where
type HostFormat (B2 Int16) = V2 Int16
toBuffer = toBufferB2
getGlType _ = GL_SHORT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Word32) where
type HostFormat (B2 Word32) = V2 Word32
toBuffer = toBufferB2
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Word16) where
type HostFormat (B2 Word16) = V2 Word16
toBuffer = toBufferB2
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Float) where
type HostFormat (B2 Float) = V2 Float
toBuffer = toBufferB2
getGlType _ = GL_FLOAT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG
instance BufferFormat (B3 Int32) where
type HostFormat (B3 Int32) = V3 Int32
toBuffer = toBufferB3
getGlType _ = GL_INT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB_INTEGER
instance BufferFormat (B3 Int16) where
type HostFormat (B3 Int16) = V3 Int16
toBuffer = toBufferB3
getGlType _ = GL_SHORT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Int8) where
type HostFormat (B3 Int8) = V3 Int8
toBuffer = toBufferB3
getGlType _ = GL_BYTE
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Word32) where
type HostFormat (B3 Word32) = V3 Word32
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB_INTEGER
instance BufferFormat (B3 Word16) where
type HostFormat (B3 Word16) = V3 Word16
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Word8) where
type HostFormat (B3 Word8) = V3 Word8
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Float) where
type HostFormat (B3 Float) = V3 Float
toBuffer = toBufferB3
getGlType _ = GL_FLOAT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB
instance BufferFormat (B4 Int32) where
type HostFormat (B4 Int32) = V4 Int32
toBuffer = toBufferB4
getGlType _ = GL_INT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Int16) where
type HostFormat (B4 Int16) = V4 Int16
toBuffer = toBufferB4
getGlType _ = GL_SHORT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Int8) where
type HostFormat (B4 Int8) = V4 Int8
toBuffer = toBufferB4
getGlType _ = GL_BYTE
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word32) where
type HostFormat (B4 Word32) = V4 Word32
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word16) where
type HostFormat (B4 Word16) = V4 Word16
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word8) where
type HostFormat (B4 Word8) = V4 Word8
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Float) where
type HostFormat (B4 Float) = V4 Float
toBuffer = toBufferB4
getGlType _ = GL_FLOAT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA
| null | https://raw.githubusercontent.com/tobbebex/GPipe-Core/4607e2c31d5beec30f2a918ab8ad48472ca236b7/GPipe-Core/src/Graphics/GPipe/Internal/Buffer.hs | haskell | | The class that constraints which types can live in a buffer.
| An arrow action that turns a value from it's host representation to it's buffer representation. Use 'toBuffer' from
lazily, so ensure you use
@proc ~pattern -> do ...@
| Retrieve the number of elements in a buffer.
| The arrow type for 'toBuffer'.
# INLINE id #
# INLINE (.) #
| The atomic buffer value that represents a host value of type 'a'.
| Any buffer value that is going to be used as a uniform needs to be wrapped in this newtype. This will cause is to be aligned
properly for uniform usage. It can still be used as input for vertex arrays, but due to the uniform alignment it will probably be
padded quite heavily and thus wasteful.
signed or unsigned integer (i.e. 'Int' or 'Word').
Small optimization if someone puts non-usable types in a uniform
Small optimization if someone puts non-usable types in a uniform
Small optimization if someone puts non-usable types in a uniform
| Create a buffer with a specified number of elements.
--------------------------------------------
| This type family restricts what host and buffer types a texture format may be converted into.
This family is closed, i.e. you cannot create additional instances to it. | # LANGUAGE PatternSynonyms #
# LANGUAGE Arrows , TypeFamilies , ScopedTypeVariables ,
FlexibleContexts , FlexibleInstances , TypeSynonymInstances #
FlexibleContexts, FlexibleInstances , TypeSynonymInstances #-}
module Graphics.GPipe.Internal.Buffer
(
BufferFormat(..),
BufferColor,
Buffer(),
ToBuffer(..),
B(..), B2(..), B3(..), B4(..),
toB22, toB3, toB21, toB12, toB11,
Uniform(..), Normalized(..), BPacked(),
BInput(..),
newBuffer,
writeBuffer,
copyBuffer,
BufferStartPos,
bufSize, bufName, bufElementSize, bufferLength, bufBElement, bufferWriteInternal, makeBuffer, getUniformAlignment, UniformAlignment
) where
import Graphics.GPipe.Internal.Context
import Graphics.GL.Core33
import Graphics.GL.Types
import Foreign.Marshal.Utils
import Foreign.Marshal.Alloc
import Prelude hiding ((.), id)
import Control.Category
import Control.Arrow
import Control.Monad (void)
import Foreign.Storable
import Foreign.Ptr
import Control.Monad.IO.Class
import Data.Word
import Data.Int
import Control.Monad.Trans.State.Strict
import Control.Monad.Trans.Writer.Strict
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Class (lift)
import Data.IORef
import Control.Applicative ((<$>))
import Linear.V4
import Linear.V3
import Linear.V2
import Linear.V1
import Linear.V0
import Linear.Plucker (Plucker(..))
import Linear.Quaternion (Quaternion(..))
import Linear.Affine (Point(..))
class BufferFormat f where
| The type a value of this format has when it lives on the host ( i.e. normal world )
type HostFormat f
the GPipe provided instances to operate in this arrow . Also note that this arrow needs to be able to return a value
toBuffer :: ToBuffer (HostFormat f) f
getGlType :: f -> GLenum
peekPixel :: f -> Ptr () -> IO (HostFormat f)
getGlPaddedFormat :: f -> GLenum
getGlType = error "This is only defined for BufferColor types"
peekPixel = error "This is only defined for BufferColor types"
getGlPaddedFormat = error "This is only defined for BufferColor types"
| A @Buffer os b@ lives in the object space @os@ and contains elements of type @b@.
data Buffer os b = Buffer {
bufName :: BufferName,
bufElementSize :: Int,
bufferLength :: Int,
bufBElement :: BInput -> b,
bufWriter :: Ptr () -> HostFormat b -> IO ()
}
instance Eq (Buffer os b) where
a == b = bufName a == bufName b
bufSize :: forall os b. Buffer os b -> Int
bufSize b = bufElementSize b * bufferLength b
type BufferName = IORef GLuint
type Offset = Int
type Stride = Int
type BufferStartPos = Int
data BInput = BInput {bInSkipElems :: Int, bInInstanceDiv :: Int}
type UniformAlignment = Int
data AlignmentMode = Align4 | AlignUniform | AlignPackedIndices | AlignUnknown deriving (Eq)
data ToBuffer a b = ToBuffer
Normal = aligned to 4 bytes
!(Kleisli (StateT Offset (Reader (BufferName, Stride, BInput))) a b)
Normal = aligned to 4 bytes
!AlignmentMode
instance Category ToBuffer where
id = ToBuffer id id id AlignUnknown
ToBuffer a b c m1 . ToBuffer x y z m2 = ToBuffer (a.x) (b.y) (c.z) (comb m1 m2)
where
If only one uniform or one PackedIndices , use that , otherwise use Align4
comb AlignUniform AlignUnknown = AlignUniform
comb AlignUnknown AlignUniform = AlignUniform
comb AlignUnknown AlignPackedIndices = AlignPackedIndices
comb AlignPackedIndices AlignUnknown = AlignPackedIndices
comb AlignUnknown AlignUnknown = AlignUnknown
comb _ _ = Align4
instance Arrow ToBuffer where
# INLINE arr #
arr f = ToBuffer (arr f) (arr f) (arr f) AlignUnknown
# INLINE first #
first (ToBuffer a b c m) = ToBuffer (first a) (first b) (first c) m
data B a = B { bName :: IORef GLuint, bOffset :: Int, bStride :: Int, bSkipElems :: Int, bInstanceDiv :: Int}
| An atomic buffer value that represents a vector of 2 ' a 's on the host .
Internal
| An atomic buffer value that represents a vector of 3 ' a 's on the host .
Internal
| An atomic buffer value that represents a vector of 4 ' a 's on the host . This works similar to ' ( B a , B a , B a , B a ) ' but has some performance advantage , especially when used
in ' VertexArray 's .
Internal
| Split up a @'B4 ' a@ into two @'B2 ' a@s .
toB22 :: forall a. (Storable a, BufferFormat (B2 a)) => B4 a -> (B2 a, B2 a)
| Discard the last component of a @'B4 ' a@ to get a @'B3 ' a@.
toB3 :: forall a. (Storable a, BufferFormat (B3 a)) => B4 a -> B3 a
| Split up a @'B3 ' a@ into a @'B2 ' a@ and a @'B1 ' a@.
toB21 :: forall a. (Storable a, BufferFormat (B a)) => B3 a -> (B2 a, B a)
| Split up a @'B3 ' a@ into a @'B1 ' a@ and a @'B2 ' a@.
toB12 :: forall a. (Storable a, BufferFormat (B a)) => B3 a -> (B a, B2 a)
| Split up a @'B2 ' a@ into two @'B1 ' a@s .
toB11 :: forall a. (Storable a, BufferFormat (B a)) => B2 a -> (B a, B a)
toB22 (B4 b) = (B2 b, B2 $ b { bOffset = bOffset b + 2 * sizeOf (undefined :: a) })
toB3 (B4 b) = B3 b
toB21 (B3 b) = (B2 b, b { bOffset = bOffset b + 2*sizeOf (undefined :: a) })
toB12 (B3 b) = (b, B2 $ b { bOffset = bOffset b + sizeOf (undefined :: a) })
toB11 (B2 b) = (b, b { bOffset = bOffset b + sizeOf (undefined :: a) })
newtype Uniform a = Uniform a
| This wrapper is used for integer values to indicate that it should be interpreted as a floating point value , in the range [ -1,1 ] or [ 0,1 ] depending on wether it is a
newtype Normalized a = Normalized a
| This works like a ' B a ' , but has an alignment smaller than 4 bytes that is the limit for vertex buffers , and thus can not be used for those .
Index buffers on the other hand need to be tightly packed , so you need to use this type for index buffers of ' Word8 ' or ' Word16 ' .
newtype BPacked a = BPacked (B a)
toBufferBUnaligned :: forall a. Storable a => ToBuffer a (B a)
toBufferBUnaligned = ToBuffer
(Kleisli $ const static)
(Kleisli $ const valueProd)
(Kleisli writer)
Align4
where
size = sizeOf (undefined :: a)
static = do offset <- get
put $ offset + size
return undefined
valueProd = do (name, stride, bIn) <- lift ask
offset <- get
put $ offset + size
return $ B name offset stride (bInSkipElems bIn) (bInInstanceDiv bIn)
writer a = do (ptr,pads) <- get
put (ptr `plusPtr` size, pads)
liftIO $ poke (castPtr ptr) a
return undefined
toBufferB :: forall a. Storable a => ToBuffer a (B a)
Will always be 4 aligned , only 4 size types defined for B1
toBufferB2 :: forall a. Storable a => ToBuffer (V2 a) (B2 a)
toBufferB2 = proc ~(V2 a b) -> do
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
Will always be 4 aligned , only 4 size types defined for B2
toBufferB3 :: forall a. Storable a => ToBuffer (V3 a) (B3 a)
toBufferB3 = proc ~(V3 a b c) -> do
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
toBufferBUnaligned -< c
For types smaller than 4 we need to pad
returnA -< B3 a'
toBufferB4 :: forall a. Storable a => ToBuffer (V4 a) (B4 a)
toBufferB4 = proc ~(V4 a b c d) -> do
a' <- toBufferBUnaligned -< a
toBufferBUnaligned -< b
toBufferBUnaligned -< c
toBufferBUnaligned -< d
Will always be 4 aligned
instance BufferFormat a => BufferFormat (Uniform a) where
type HostFormat (Uniform a) = HostFormat a
toBuffer = arr Uniform . ToBuffer
(Kleisli preStep)
(Kleisli elementBuilderA)
(Kleisli writerA)
AlignUniform
where
ToBuffer (Kleisli preStep') (Kleisli elementBuilderA) (Kleisli writerA') _ = toBuffer :: ToBuffer (HostFormat a) a
preStep a = do (x,_) <- lift $ lift ask
a' <- preStep' a
setElemAlignM [(AlignUniform, x)] ()
return a'
writerA a = do a' <- writerA' a
setWriterAlignM ()
return a'
instance BufferFormat a => BufferFormat (Normalized a) where
type HostFormat (Normalized a) = HostFormat a
toBuffer = arr Normalized . toBuffer
getGlType (Normalized a) = getGlType a
getGlPaddedFormat (Normalized a) = case getGlPaddedFormat a of
GL_RGBA_INTEGER -> GL_RGBA
GL_RGB_INTEGER -> GL_RGB
GL_RG_INTEGER -> GL_RG
GL_RED_INTEGER -> GL_RED
x -> x
instance BufferFormat a => BufferFormat (V0 a) where
type HostFormat (V0 a) = V0 (HostFormat a)
toBuffer = arr (const V0)
instance BufferFormat a => BufferFormat (V1 a) where
type HostFormat (V1 a) = V1 (HostFormat a)
toBuffer = proc ~(V1 a) -> do
a' <- toBuffer -< a
returnA -< V1 a'
instance BufferFormat a => BufferFormat (V2 a) where
type HostFormat (V2 a) = V2 (HostFormat a)
toBuffer = proc ~(V2 a b) -> do
(a', b') <- toBuffer -< (a,b)
returnA -< V2 a' b'
instance BufferFormat a => BufferFormat (V3 a) where
type HostFormat (V3 a) = V3 (HostFormat a)
toBuffer = proc ~(V3 a b c) -> do
(a', b', c') <- toBuffer -< (a, b, c)
returnA -< V3 a' b' c'
instance BufferFormat a => BufferFormat (V4 a) where
type HostFormat (V4 a) = V4 (HostFormat a)
toBuffer = proc ~(V4 a b c d) -> do
(a', b', c', d') <- toBuffer -< (a, b, c, d)
returnA -< V4 a' b' c' d'
instance BufferFormat () where
type HostFormat () = ()
toBuffer = arr (const ())
instance (BufferFormat a, BufferFormat b) => BufferFormat (a, b) where
type HostFormat (a,b) = (HostFormat a, HostFormat b)
toBuffer = proc ~(a, b) -> do
a' <- toBuffer -< a
b' <- toBuffer -< b
returnA -< (a', b')
instance (BufferFormat a, BufferFormat b, BufferFormat c) => BufferFormat (a, b, c) where
type HostFormat (a,b,c) = (HostFormat a, HostFormat b, HostFormat c)
toBuffer = proc ~(a, b, c) -> do
((a', b'), c') <- toBuffer -< ((a, b), c)
returnA -< (a', b', c')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d) => BufferFormat (a, b, c, d) where
type HostFormat (a,b,c,d) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d)
toBuffer = proc ~(a, b, c, d) -> do
((a', b', c'), d') <- toBuffer -< ((a, b, c), d)
returnA -< (a', b', c', d')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e) => BufferFormat (a, b, c, d, e) where
type HostFormat (a,b,c,d,e) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e)
toBuffer = proc ~(a, b, c, d, e) -> do
((a', b', c', d'), e') <- toBuffer -< ((a, b, c, d), e)
returnA -< (a', b', c', d', e')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e, BufferFormat f) => BufferFormat (a, b, c, d, e, f) where
type HostFormat (a,b,c,d,e,f) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e, HostFormat f)
toBuffer = proc ~(a, b, c, d, e, f) -> do
((a', b', c', d', e'), f') <- toBuffer -< ((a, b, c, d, e), f)
returnA -< (a', b', c', d', e', f')
instance (BufferFormat a, BufferFormat b, BufferFormat c, BufferFormat d, BufferFormat e, BufferFormat f, BufferFormat g) => BufferFormat (a, b, c, d, e, f, g) where
type HostFormat (a,b,c,d,e,f,g) = (HostFormat a, HostFormat b, HostFormat c, HostFormat d, HostFormat e, HostFormat f, HostFormat g)
toBuffer = proc ~(a, b, c, d, e, f, g) -> do
((a', b', c', d', e', f'), g') <- toBuffer -< ((a, b, c, d, e, f), g)
returnA -< (a', b', c', d', e', f', g')
instance BufferFormat a => BufferFormat (Quaternion a) where
type HostFormat (Quaternion a) = Quaternion (HostFormat a)
toBuffer = proc ~(Quaternion a v) -> do
a' <- toBuffer -< a
v' <- toBuffer -< v
returnA -< Quaternion a' v'
instance (BufferFormat (f a), BufferFormat a, HostFormat (f a) ~ f (HostFormat a)) => BufferFormat (Point f a) where
type HostFormat (Point f a) = Point f (HostFormat a)
toBuffer = proc ~(P a) -> do
a' <- toBuffer -< a
returnA -< P a'
instance BufferFormat a => BufferFormat (Plucker a) where
type HostFormat (Plucker a) = Plucker (HostFormat a)
toBuffer = proc ~(Plucker a b c d e f) -> do
a' <- toBuffer -< a
b' <- toBuffer -< b
c' <- toBuffer -< c
d' <- toBuffer -< d
e' <- toBuffer -< e
f' <- toBuffer -< f
returnA -< Plucker a' b' c' d' e' f'
newBuffer :: (MonadIO m, BufferFormat b, ContextHandler ctx) => Int -> ContextT ctx os m (Buffer os b)
newBuffer elementCount | elementCount < 0 = error "newBuffer, length negative"
| otherwise = do
(buffer, nameRef, name) <- liftNonWinContextIO $ do
name <- alloca (\ptr -> glGenBuffers 1 ptr >> peek ptr)
nameRef <- newIORef name
uniAl <- getUniformAlignment
let buffer = makeBuffer nameRef elementCount uniAl
bname <- readIORef $ bufName buffer
glBindBuffer GL_COPY_WRITE_BUFFER bname
glBufferData GL_COPY_WRITE_BUFFER (fromIntegral $ bufSize buffer) nullPtr GL_STREAM_DRAW
return (buffer, nameRef, name)
addContextFinalizer nameRef $ with name (glDeleteBuffers 1)
addVAOBufferFinalizer nameRef
return buffer
bufferWriteInternal :: Buffer os f -> Ptr () -> [HostFormat f] -> IO (Ptr ())
bufferWriteInternal b ptr (x:xs) = do bufWriter b ptr x
bufferWriteInternal b (ptr `plusPtr` bufElementSize b) xs
bufferWriteInternal _ ptr [] = return ptr
| Write a buffer from the host ( i.e. the normal world ) .
writeBuffer :: (ContextHandler ctx, MonadIO m) => Buffer os b -> BufferStartPos -> [HostFormat b] -> ContextT ctx os m ()
writeBuffer buffer offset elems | offset < 0 || offset >= bufferLength buffer = error "writeBuffer, offset out of bounds"
| otherwise =
let maxElems = max 0 $ bufferLength buffer - offset
elemSize = bufElementSize buffer
off = fromIntegral $ offset * elemSize
in liftNonWinContextAsyncIO $ do
bname <- readIORef $ bufName buffer
glBindBuffer GL_COPY_WRITE_BUFFER bname
ptr <- glMapBufferRange GL_COPY_WRITE_BUFFER off (fromIntegral $maxElems * elemSize) (GL_MAP_WRITE_BIT + GL_MAP_FLUSH_EXPLICIT_BIT)
end <- bufferWriteInternal buffer ptr (take maxElems elems)
glFlushMappedBufferRange GL_COPY_WRITE_BUFFER off (fromIntegral $ end `minusPtr` ptr)
void $ glUnmapBuffer GL_COPY_WRITE_BUFFER
| Copies values from one buffer to another ( of the same type ) .
@copyBuffer fromStart toBuffer toStart length@ will copy @length@ elements from position @fromStart@ in @fromBuffer@ to position @toStart@ in @toBuffer@.
copyBuffer :: (ContextHandler ctx, MonadIO m) => Buffer os b -> BufferStartPos -> Buffer os b -> BufferStartPos -> Int -> ContextT ctx os m ()
copyBuffer bFrom from bTo to len | from < 0 || from >= bufferLength bFrom = error "writeBuffer, source offset out of bounds"
| to < 0 || to >= bufferLength bTo = error "writeBuffer, destination offset out of bounds"
| len < 0 = error "writeBuffer, length negative"
| len + from > bufferLength bFrom = error "writeBuffer, source buffer too small"
| len + to > bufferLength bTo = error "writeBuffer, destination buffer too small"
| otherwise = liftNonWinContextAsyncIO $ do
bnamef <- readIORef $ bufName bFrom
bnamet <- readIORef $ bufName bTo
glBindBuffer GL_COPY_READ_BUFFER bnamef
glBindBuffer GL_COPY_WRITE_BUFFER bnamet
same as for
glCopyBufferSubData GL_COPY_READ_BUFFER GL_COPY_WRITE_BUFFER (fromIntegral $ from * elemSize) (fromIntegral $ to * elemSize) (fromIntegral $ len * elemSize)
alignWhen :: [(AlignmentMode, Int)] -> ToBuffer a a
alignWhen x = ToBuffer (Kleisli $ setElemAlignM x) (Kleisli return) (Kleisli setWriterAlignM) AlignUniform
setElemAlignM :: [(AlignmentMode, Int)] -> b -> StateT Offset (WriterT [Int] (Reader (UniformAlignment, AlignmentMode))) b
setElemAlignM x a = do
(_,m) <- lift $ lift ask
pad <- case lookup m x of
Nothing -> return 0
Just al -> do
offset <- get
let pad = al - 1 - ((offset - 1) `mod` al)
put $ offset + pad
return pad
lift $ tell [pad]
return a
setWriterAlignM :: b -> StateT (Ptr a, [Int]) IO b
setWriterAlignM a = do (ptr, pad:pads) <- get
put (ptr `plusPtr` pad, pads)
return a
getUniformAlignment :: IO Int
getUniformAlignment = fromIntegral <$> alloca (\ ptr -> glGetIntegerv GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ptr >> peek ptr)
makeBuffer :: forall os b. BufferFormat b => BufferName -> Int -> UniformAlignment -> Buffer os b
makeBuffer name elementCount uniformAlignment = do
let ToBuffer a b c m = toBuffer :: ToBuffer (HostFormat b) b
err = error "toBuffer is creating values that are dependant on the actual HostFormat values, this is not allowed since it doesn't allow static creation of shaders" :: HostFormat b
((_,elementSize),pads) = runReader (runWriterT (runStateT (runKleisli a err) 0)) (uniformAlignment, m)
elementF bIn = fst $ runReader (runStateT (runKleisli b err) 0) (name, elementSize, bIn)
writer ptr x = void $ runStateT (runKleisli c x) (ptr,pads)
Buffer name elementSize elementCount elementF writer
' BufferColor t h ' for a texture representation ' t ' and a host representation ' h ' will evaluate to a buffer type used in the transfer .
type family BufferColor c h where
BufferColor Float Int32 = Normalized (B Int32)
BufferColor Float Word32 = Normalized (B Word32)
BufferColor Float Float = B Float
BufferColor Int Int32 = B Int32
BufferColor Word Word32 = B Word32
BufferColor Word Word16 = BPacked Word16
BufferColor Word Word8 = BPacked Word8
BufferColor (V2 Float) (V2 Int32) = Normalized (B2 Int32)
BufferColor (V2 Float) (V2 Int16) = Normalized (B2 Int16)
BufferColor (V2 Float) (V2 Word32) = Normalized (B2 Word32)
BufferColor (V2 Float) (V2 Word16) = Normalized (B2 Word16)
BufferColor (V2 Float) (V2 Float) = B2 Float
BufferColor (V2 Int) (V2 Int32) = B2 Int32
BufferColor (V2 Int) (V2 Int16) = B2 Int16
BufferColor (V2 Word) (V2 Word32) = B2 Word32
BufferColor (V2 Word) (V2 Word16) = B2 Word16
BufferColor (V3 Float) (V3 Int32) = Normalized (B3 Int32)
BufferColor (V3 Float) (V3 Int16) = Normalized (B3 Int16)
BufferColor (V3 Float) (V3 Int8) = Normalized (B3 Int8)
BufferColor (V3 Float) (V3 Word32) = Normalized (B3 Word32)
BufferColor (V3 Float) (V3 Word16) = Normalized (B3 Word16)
BufferColor (V3 Float) (V3 Word8) = Normalized (B3 Word8)
BufferColor (V3 Float) (V3 Float) = B3 Float
BufferColor (V3 Int) (V3 Int32) = B3 Int32
BufferColor (V3 Int) (V3 Int16) = B3 Int16
BufferColor (V3 Int) (V3 Int8) = B3 Int8
BufferColor (V3 Word) (V3 Word32) = B3 Word32
BufferColor (V3 Word) (V3 Word16) = B3 Word16
BufferColor (V3 Word) (V3 Word8) = B3 Word8
BufferColor (V4 Float) (V4 Int32) = Normalized (B4 Int32)
BufferColor (V4 Float) (V4 Int16) = Normalized (B4 Int16)
BufferColor (V4 Float) (V4 Int8) = Normalized (B4 Int8)
BufferColor (V4 Float) (V4 Word32) = Normalized (B4 Word32)
BufferColor (V4 Float) (V4 Word16) = Normalized (B4 Word16)
BufferColor (V4 Float) (V4 Word8) = Normalized (B4 Word8)
BufferColor (V4 Float) (V4 Float) = B4 Float
BufferColor (V4 Int) (V4 Int32) = B4 Int32
BufferColor (V4 Int) (V4 Int16) = B4 Int16
BufferColor (V4 Int) (V4 Int8) = B4 Int8
BufferColor (V4 Word) (V4 Word32) = B4 Word32
BufferColor (V4 Word) (V4 Word16) = B4 Word16
BufferColor (V4 Word) (V4 Word8) = B4 Word8
peekPixel1 :: Storable a => Ptr x -> IO a
peekPixel1 = peek . castPtr
peekPixel2 :: (Storable a) => Ptr x -> IO (V2 a)
peekPixel2 ptr = do x <- peek (castPtr ptr)
y <- peekElemOff (castPtr ptr ) 1
return (V2 x y)
peekPixel3 :: (Storable a) => Ptr x -> IO (V3 a)
peekPixel3 ptr = do x <- peek (castPtr ptr)
y <- peekElemOff (castPtr ptr ) 1
z <- peekElemOff (castPtr ptr ) 2
return (V3 x y z)
peekPixel4 :: (Storable a) => Ptr x -> IO (V4 a)
peekPixel4 ptr = do V3 x y z <- peekPixel3 ptr
w <- peekElemOff (castPtr ptr ) 3
return (V4 x y z w)
instance BufferFormat (B Int32) where
type HostFormat (B Int32) = Int32
toBuffer = toBufferB
getGlType _ = GL_INT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (B Word32) where
type HostFormat (B Word32) = Word32
toBuffer = toBufferB
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (BPacked Word16) where
type HostFormat (BPacked Word16) = Word16
toBuffer = let ToBuffer a b c _ = toBufferB :: ToBuffer Word16 (B Word16) in arr BPacked . ToBuffer a b c AlignPackedIndices
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (BPacked Word8) where
type HostFormat (BPacked Word8) = Word8
toBuffer = let ToBuffer a b c _ = toBufferB :: ToBuffer Word8 (B Word8) in arr BPacked . ToBuffer a b c AlignPackedIndices
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED_INTEGER
instance BufferFormat (B Float) where
type HostFormat (B Float) = Float
toBuffer = toBufferB
getGlType _ = GL_FLOAT
peekPixel = const peekPixel1
getGlPaddedFormat _ = GL_RED
instance BufferFormat (B2 Int32) where
type HostFormat (B2 Int32) = V2 Int32
toBuffer = toBufferB2
getGlType _ = GL_INT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Int16) where
type HostFormat (B2 Int16) = V2 Int16
toBuffer = toBufferB2
getGlType _ = GL_SHORT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Word32) where
type HostFormat (B2 Word32) = V2 Word32
toBuffer = toBufferB2
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Word16) where
type HostFormat (B2 Word16) = V2 Word16
toBuffer = toBufferB2
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG_INTEGER
instance BufferFormat (B2 Float) where
type HostFormat (B2 Float) = V2 Float
toBuffer = toBufferB2
getGlType _ = GL_FLOAT
peekPixel = const peekPixel2
getGlPaddedFormat _ = GL_RG
instance BufferFormat (B3 Int32) where
type HostFormat (B3 Int32) = V3 Int32
toBuffer = toBufferB3
getGlType _ = GL_INT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB_INTEGER
instance BufferFormat (B3 Int16) where
type HostFormat (B3 Int16) = V3 Int16
toBuffer = toBufferB3
getGlType _ = GL_SHORT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Int8) where
type HostFormat (B3 Int8) = V3 Int8
toBuffer = toBufferB3
getGlType _ = GL_BYTE
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Word32) where
type HostFormat (B3 Word32) = V3 Word32
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB_INTEGER
instance BufferFormat (B3 Word16) where
type HostFormat (B3 Word16) = V3 Word16
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Word8) where
type HostFormat (B3 Word8) = V3 Word8
toBuffer = toBufferB3
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B3 Float) where
type HostFormat (B3 Float) = V3 Float
toBuffer = toBufferB3
getGlType _ = GL_FLOAT
peekPixel = const peekPixel3
getGlPaddedFormat _ = GL_RGB
instance BufferFormat (B4 Int32) where
type HostFormat (B4 Int32) = V4 Int32
toBuffer = toBufferB4
getGlType _ = GL_INT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Int16) where
type HostFormat (B4 Int16) = V4 Int16
toBuffer = toBufferB4
getGlType _ = GL_SHORT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Int8) where
type HostFormat (B4 Int8) = V4 Int8
toBuffer = toBufferB4
getGlType _ = GL_BYTE
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word32) where
type HostFormat (B4 Word32) = V4 Word32
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_INT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word16) where
type HostFormat (B4 Word16) = V4 Word16
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_SHORT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Word8) where
type HostFormat (B4 Word8) = V4 Word8
toBuffer = toBufferB4
getGlType _ = GL_UNSIGNED_BYTE
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA_INTEGER
instance BufferFormat (B4 Float) where
type HostFormat (B4 Float) = V4 Float
toBuffer = toBufferB4
getGlType _ = GL_FLOAT
peekPixel = const peekPixel4
getGlPaddedFormat _ = GL_RGBA
|
a0cdeb751fcb2ca488577289d7b6c505eac9419be91acf058bb38edc8d762ce3 | asmala/clj-simple-form | giddyup.clj | (ns clj-simple-form.giddyup
"Scope functions for Hiccup interoperability. Requiring this namespace sets
the form HTML functions to the contents of `giddyup.forms`."
(:use [clj-simple-form.util :only [set-html-fns!]])
(:require [giddyup.forms]
[clj-simple-form.form-scope :as form-scope]
[hiccup.form :as f]))
(set-html-fns! 'giddyup.forms)
(defmacro with-form-scope
"Sets up bindings for form I18n, form values and errors, as well as Hiccup form
elements.
### Example
(with-form-scope :profile {:email \"\"} {}
(email-field-input :email))"
[object values errors & content]
`(->> (f/with-group ~object ~@content)
(form-scope/with-form-scope ~object ~values ~errors)))
(defmacro with-nested-form-scope
"Sets up bindings for form I18n, form values and errors, as well as Hiccup form
elements, using the current form scope as a basis.
### Example
(with-form-scope :profile {} {:address {:street \"is required\"}}
(with-nested-form-scope :address
(text-field-input :street)))"
[object & content]
`(->> (f/with-group ~object ~@content)
(form-scope/with-nested-form-scope ~object)))
| null | https://raw.githubusercontent.com/asmala/clj-simple-form/b1c566b1f0fe532639b15832b557f1608598a0a2/clj-simple-form-giddyup/src/clj_simple_form/giddyup.clj | clojure | (ns clj-simple-form.giddyup
"Scope functions for Hiccup interoperability. Requiring this namespace sets
the form HTML functions to the contents of `giddyup.forms`."
(:use [clj-simple-form.util :only [set-html-fns!]])
(:require [giddyup.forms]
[clj-simple-form.form-scope :as form-scope]
[hiccup.form :as f]))
(set-html-fns! 'giddyup.forms)
(defmacro with-form-scope
"Sets up bindings for form I18n, form values and errors, as well as Hiccup form
elements.
### Example
(with-form-scope :profile {:email \"\"} {}
(email-field-input :email))"
[object values errors & content]
`(->> (f/with-group ~object ~@content)
(form-scope/with-form-scope ~object ~values ~errors)))
(defmacro with-nested-form-scope
"Sets up bindings for form I18n, form values and errors, as well as Hiccup form
elements, using the current form scope as a basis.
### Example
(with-form-scope :profile {} {:address {:street \"is required\"}}
(with-nested-form-scope :address
(text-field-input :street)))"
[object & content]
`(->> (f/with-group ~object ~@content)
(form-scope/with-nested-form-scope ~object)))
|
|
49007581386b5df231ae4e5492fd92d00480424a9f5c741a025d3dfa2c4d3fee | mtnygard/simulant-example | db.clj | (ns example-ui.db
"Datomic bootstrap and Datomic + Pedestal interceptor"
(:require [environ.core :refer [env]]
[datomic.api :as d]
[io.pedestal.interceptor :refer [interceptor]]
[environ.core :refer [env]]))
(defonce uri (env :datomic-uri (str "datomic:mem://" (d/squuid))))
(def insert-datomic
"Provide a Datomic conn and db in all incoming requests"
(interceptor
{:name ::insert-datomic
:enter (fn [context]
(let [conn (d/connect uri)]
(-> context
(assoc-in [:request :conn] conn)
(assoc-in [:request :db] (d/db conn)))))}))
(defn e->m [e] (select-keys e (keys e)))
| null | https://raw.githubusercontent.com/mtnygard/simulant-example/dcb76b2eda47dfb6be10a2077ade319873eacce1/example-ui/src/clj/example_ui/db.clj | clojure | (ns example-ui.db
"Datomic bootstrap and Datomic + Pedestal interceptor"
(:require [environ.core :refer [env]]
[datomic.api :as d]
[io.pedestal.interceptor :refer [interceptor]]
[environ.core :refer [env]]))
(defonce uri (env :datomic-uri (str "datomic:mem://" (d/squuid))))
(def insert-datomic
"Provide a Datomic conn and db in all incoming requests"
(interceptor
{:name ::insert-datomic
:enter (fn [context]
(let [conn (d/connect uri)]
(-> context
(assoc-in [:request :conn] conn)
(assoc-in [:request :db] (d/db conn)))))}))
(defn e->m [e] (select-keys e (keys e)))
|
|
19c156d3c5c460bd12dcc45000fd0f760b2be27aed86df24ba2294821792dcc7 | piotr-yuxuan/dove | project.clj | (defproject com.github.piotr-yuxuan/dove (-> "./resources/dove.version" slurp .trim)
:description "Recursively infer clojure spec from any (nested) org.apache.avro.Schema"
:url "-yuxuan/dove"
:license {:name "European Union Public License 1.2 or later"
:url "-text-eupl-12"
:distribution :repo}
:scm {:name "git"
:url "-yuxuan/dove"}
:pom-addition [:developers [:developer
[:name "胡雨軒 Петр"]
[:url "-yuxuan"]]]
:dependencies [[org.clojure/clojure "1.10.3"]
[camel-snake-kebab "0.4.0"]
[org.clojure/clojure "1.10.0"]
[org.apache.avro/avro "1.8.2"]
[clj-time "0.15.1"]]
:main piotr-yuxuan.walter-ci.main
:profiles {:github {:github/topics ["clojure" "avro" "spec" "avro-schema"
"clojure-specs" "clojure-spec" "avro-format"]}
:provided {:dependencies [[org.clojure/clojure "1.10.3"]]}
:dev {:global-vars {*warn-on-reflection* true}
:dependencies [[org.apache.avro/avro-maven-plugin "1.8.2"]
[org.clojure/test.check "0.10.0-alpha3"]
[org.clojure/spec.alpha "0.2.176"]
[danlentz/clj-uuid "0.1.7"]]}
:uberjar {:aot :all
:jvm-opts ["-Dclojure.compiler.direct-linking=true"]}})
| null | https://raw.githubusercontent.com/piotr-yuxuan/dove/94b6769e747dfc5639b5972a023c26b6d7488a0f/project.clj | clojure | (defproject com.github.piotr-yuxuan/dove (-> "./resources/dove.version" slurp .trim)
:description "Recursively infer clojure spec from any (nested) org.apache.avro.Schema"
:url "-yuxuan/dove"
:license {:name "European Union Public License 1.2 or later"
:url "-text-eupl-12"
:distribution :repo}
:scm {:name "git"
:url "-yuxuan/dove"}
:pom-addition [:developers [:developer
[:name "胡雨軒 Петр"]
[:url "-yuxuan"]]]
:dependencies [[org.clojure/clojure "1.10.3"]
[camel-snake-kebab "0.4.0"]
[org.clojure/clojure "1.10.0"]
[org.apache.avro/avro "1.8.2"]
[clj-time "0.15.1"]]
:main piotr-yuxuan.walter-ci.main
:profiles {:github {:github/topics ["clojure" "avro" "spec" "avro-schema"
"clojure-specs" "clojure-spec" "avro-format"]}
:provided {:dependencies [[org.clojure/clojure "1.10.3"]]}
:dev {:global-vars {*warn-on-reflection* true}
:dependencies [[org.apache.avro/avro-maven-plugin "1.8.2"]
[org.clojure/test.check "0.10.0-alpha3"]
[org.clojure/spec.alpha "0.2.176"]
[danlentz/clj-uuid "0.1.7"]]}
:uberjar {:aot :all
:jvm-opts ["-Dclojure.compiler.direct-linking=true"]}})
|
|
37386583cc0a9cc0af041610937041d8649ea618d94acfe65045dc914851a936 | brandonbloom/wabt-clj | xref.clj | (ns wabt-clj.xref
(:use [wabt-clj.util])
(:require [wabt-clj.values :refer [id? index?]]
[wabt-clj.inst :as inst]))
(def ^:dynamic *module*)
(defn resolve-id [section id]
{:pre [(keyword? section)]}
(or (get-in *module* [section :env id])
(fail (str id " undefined in " section) {:section section :id id})))
(defn resolved [{:keys [section id index] :as ast}]
(if index
ast
(assoc ast :index (resolve-id section id))))
(defn xref-export [export]
(update export :desc resolved))
(def ^:dynamic *locals*)
(def ^:dynamic *labels*) ; name->index.
(def ^:dynamic *frames*) ; index->label.
(defn resolved-local [{:keys [id] :as local}]
(or (*locals* id)
(fail (str "undefined local: " id)
{:local local :env *locals*})))
(defn resolved-label [{:keys [id] :as label}]
(if-let [index (if (int? id)
(- (count *frames*) id 1)
(*labels* id))]
(assoc (*frames* index) :depth (- (count *frames*) index 1))
(fail (str "undefined label: " id
{:label label :env *frames*}))))
(declare xref-inst)
(defn xref-body [body]
{:pre [(vector? body)]}
(mapv xref-inst body))
(defn xref-bodies [{:keys [label body] :as ast} keys]
(let [index (count *frames*)
{:keys [id] :as label} (assoc label :index index)
ast (assoc ast :label label)]
(binding [*labels* (cond-> *labels*
id (assoc id index))
*frames* (conj *frames* label)]
(reduce (fn [ast key]
(update ast key xref-body))
ast
keys))))
(defn xref-inst [{:keys [op] :as inst}]
(case (get-in inst/by-name [(:op inst) :shape])
:nullary inst
:block (xref-bodies inst [:body])
:if (xref-bodies inst [:then :else])
:label (update inst :label resolved-label)
:br_table (-> inst
(update :branches #(mapv resolved-label %))
(update :default resolved-label))
:call (update inst :func resolved)
:call_indirect (update inst :type resolved)
:local (update inst :local resolved-local)
:global (update inst :global resolved)
:mem inst
:i32 inst
:i64 inst
:f32 inst
:f64 inst
))
(defn xref-func [func]
(binding [*locals* (into {}
(mapcat (fn [{:keys [id] :as local} index]
(let [local (assoc local :index index)]
(cons [index local]
(when id
[[id local]]))))
(concat (-> func :type :params)
(:locals func))
(range)))
*labels* {}
*frames* []]
(-> func
(update :type resolved)
(update :body xref-body))))
(defn xref-elem [elem]
(update elem :table resolved))
(defn xref-data [data]
(update data :memory resolved))
(defn xref-vecsec [module xref section]
(update-in module [section :fields] #(mapv xref %)))
(defn xref-module [module]
(binding [*module* module]
(change! *module* xref-vecsec xref-export :exports)
(change! *module* xref-vecsec xref-func :funcs)
(change! *module* xref-vecsec xref-elem :elems)
(change! *module* xref-vecsec xref-data :data)
(when (:start *module*)
(change! *module* update-in [:start :func] resolved))
*module*))
| null | https://raw.githubusercontent.com/brandonbloom/wabt-clj/45b80fb05fc49d52ab117a699e9c56582a7078b3/src/wabt_clj/xref.clj | clojure | name->index.
index->label. | (ns wabt-clj.xref
(:use [wabt-clj.util])
(:require [wabt-clj.values :refer [id? index?]]
[wabt-clj.inst :as inst]))
(def ^:dynamic *module*)
(defn resolve-id [section id]
{:pre [(keyword? section)]}
(or (get-in *module* [section :env id])
(fail (str id " undefined in " section) {:section section :id id})))
(defn resolved [{:keys [section id index] :as ast}]
(if index
ast
(assoc ast :index (resolve-id section id))))
(defn xref-export [export]
(update export :desc resolved))
(def ^:dynamic *locals*)
(defn resolved-local [{:keys [id] :as local}]
(or (*locals* id)
(fail (str "undefined local: " id)
{:local local :env *locals*})))
(defn resolved-label [{:keys [id] :as label}]
(if-let [index (if (int? id)
(- (count *frames*) id 1)
(*labels* id))]
(assoc (*frames* index) :depth (- (count *frames*) index 1))
(fail (str "undefined label: " id
{:label label :env *frames*}))))
(declare xref-inst)
(defn xref-body [body]
{:pre [(vector? body)]}
(mapv xref-inst body))
(defn xref-bodies [{:keys [label body] :as ast} keys]
(let [index (count *frames*)
{:keys [id] :as label} (assoc label :index index)
ast (assoc ast :label label)]
(binding [*labels* (cond-> *labels*
id (assoc id index))
*frames* (conj *frames* label)]
(reduce (fn [ast key]
(update ast key xref-body))
ast
keys))))
(defn xref-inst [{:keys [op] :as inst}]
(case (get-in inst/by-name [(:op inst) :shape])
:nullary inst
:block (xref-bodies inst [:body])
:if (xref-bodies inst [:then :else])
:label (update inst :label resolved-label)
:br_table (-> inst
(update :branches #(mapv resolved-label %))
(update :default resolved-label))
:call (update inst :func resolved)
:call_indirect (update inst :type resolved)
:local (update inst :local resolved-local)
:global (update inst :global resolved)
:mem inst
:i32 inst
:i64 inst
:f32 inst
:f64 inst
))
(defn xref-func [func]
(binding [*locals* (into {}
(mapcat (fn [{:keys [id] :as local} index]
(let [local (assoc local :index index)]
(cons [index local]
(when id
[[id local]]))))
(concat (-> func :type :params)
(:locals func))
(range)))
*labels* {}
*frames* []]
(-> func
(update :type resolved)
(update :body xref-body))))
(defn xref-elem [elem]
(update elem :table resolved))
(defn xref-data [data]
(update data :memory resolved))
(defn xref-vecsec [module xref section]
(update-in module [section :fields] #(mapv xref %)))
(defn xref-module [module]
(binding [*module* module]
(change! *module* xref-vecsec xref-export :exports)
(change! *module* xref-vecsec xref-func :funcs)
(change! *module* xref-vecsec xref-elem :elems)
(change! *module* xref-vecsec xref-data :data)
(when (:start *module*)
(change! *module* update-in [:start :func] resolved))
*module*))
|
8709fa536609ee8d8901eb4472dd9a843f25d5d9d5a2ac8c7654036ac2e04f8a | byorgey/BlogLiterately | NewMediaObject.hs | import Network.XmlRpc.Client (remote)
import Network.XmlRpc.Internals (Value(..), toValue)
import Data.Char (toLower)
import System.FilePath (takeFileName, takeExtension)
import qualified Data.ByteString.Char8 as B
import Data.Functor ((<$>))
import Control.DeepSeq
The bottleneck seems to be in the actual haxr library ( base64 encoding ? )
instance NFData Value where
rnf ( ValueInt i ) = rnf i
rnf ( ValueBool b ) = rnf b
rnf ( s ) = rnf s
rnf ( d ) = rnf d
rnf ( ValueDateTime lt ) = rnf lt
rnf ( ValueBase64 s ) = rnf s
rnf ( ValueStruct s ) = rnf s
rnf ( ValueArray vs ) = rnf vs
instance NFData Value where
rnf (ValueInt i) = rnf i
rnf (ValueBool b) = rnf b
rnf (ValueString s) = rnf s
rnf (ValueDouble d) = rnf d
rnf (ValueDateTime lt) = rnf lt
rnf (ValueBase64 s) = rnf s
rnf (ValueStruct s) = rnf s
rnf (ValueArray vs) = rnf vs
-}
uploadMediaObject :: FilePath -> IO Value
uploadMediaObject file = do
media <- mkMediaObject file
remote "" "metaWeblog.newMediaObject" "default" "byorgey" "a0303017" media
-- note: same successes + failures with wp.uploadFile in place of
metaWeblog.newMediaObject
-- Create the required struct representing the image.
mkMediaObject :: FilePath -> IO Value
mkMediaObject filePath = do
bits <- B.unpack <$> B.readFile filePath
return $ ValueStruct
[ ("name", toValue fileName)
, ("type", toValue fileType)
, ("bits", ValueBase64 bits)
]
where
fileName = takeFileName filePath
fileType = case (map toLower . drop 1 . takeExtension) fileName of
"png" -> "image/png"
"jpg" -> "image/jpeg"
"jpeg" -> "image/jpeg"
"gif" -> "image/gif"
main = do
v <- uploadMediaObject "images/puppy.jpg"
print v
| null | https://raw.githubusercontent.com/byorgey/BlogLiterately/fbc8dc238c7e5bc570bef4d0c1dd9cf2f92de72a/test/NewMediaObject.hs | haskell | note: same successes + failures with wp.uploadFile in place of
Create the required struct representing the image. | import Network.XmlRpc.Client (remote)
import Network.XmlRpc.Internals (Value(..), toValue)
import Data.Char (toLower)
import System.FilePath (takeFileName, takeExtension)
import qualified Data.ByteString.Char8 as B
import Data.Functor ((<$>))
import Control.DeepSeq
The bottleneck seems to be in the actual haxr library ( base64 encoding ? )
instance NFData Value where
rnf ( ValueInt i ) = rnf i
rnf ( ValueBool b ) = rnf b
rnf ( s ) = rnf s
rnf ( d ) = rnf d
rnf ( ValueDateTime lt ) = rnf lt
rnf ( ValueBase64 s ) = rnf s
rnf ( ValueStruct s ) = rnf s
rnf ( ValueArray vs ) = rnf vs
instance NFData Value where
rnf (ValueInt i) = rnf i
rnf (ValueBool b) = rnf b
rnf (ValueString s) = rnf s
rnf (ValueDouble d) = rnf d
rnf (ValueDateTime lt) = rnf lt
rnf (ValueBase64 s) = rnf s
rnf (ValueStruct s) = rnf s
rnf (ValueArray vs) = rnf vs
-}
uploadMediaObject :: FilePath -> IO Value
uploadMediaObject file = do
media <- mkMediaObject file
remote "" "metaWeblog.newMediaObject" "default" "byorgey" "a0303017" media
metaWeblog.newMediaObject
mkMediaObject :: FilePath -> IO Value
mkMediaObject filePath = do
bits <- B.unpack <$> B.readFile filePath
return $ ValueStruct
[ ("name", toValue fileName)
, ("type", toValue fileType)
, ("bits", ValueBase64 bits)
]
where
fileName = takeFileName filePath
fileType = case (map toLower . drop 1 . takeExtension) fileName of
"png" -> "image/png"
"jpg" -> "image/jpeg"
"jpeg" -> "image/jpeg"
"gif" -> "image/gif"
main = do
v <- uploadMediaObject "images/puppy.jpg"
print v
|
c148c74aeea321f079390e81ab871b12bfdc50bb649c018dfebfffb1abd1da12 | clojure-interop/aws-api | project.clj | (defproject clojure-interop/com.amazonaws.services.serverlessapplicationrepository "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.serverlessapplicationrepository"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.serverlessapplicationrepository/project.clj | clojure | (defproject clojure-interop/com.amazonaws.services.serverlessapplicationrepository "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.serverlessapplicationrepository"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
|
|
662e006269236813e57c8be7545d9dbfb11614a06c5ac953d1d275a5642066dc | simonmar/parconc-examples | crc32_acc.hs | import Data.Word
import Data.Bits
import Data.Char
import Debug.Trace
import Data.Array.Accelerate hiding (fromIntegral, shiftR, map, zipWith, unzip, replicate)
import qualified Data.Array.Accelerate as A
import Data.Array.Accelerate.Interpreter
import CRC32
crc32_one :: Acc (Vector Word32) -> Exp Word32 -> Exp Word8 -> Exp Word32
crc32_one tab crc char
= (char ==* 0) ?
( crc
, (tab ! index1 (A.fromIntegral (A.fromIntegral crc `xor` char)))
`xor`
crc `A.shiftR` 8
)
crcAll :: [String] -> Acc (Vector Word32)
crcAll words = all width init_crcs words
where
n = length words
width = maximum (0 : map length words)
table :: Acc (Vector Word32)
table = use (fromList (Z:.256) crc32_tab)
init_crcs :: Acc (Vector Word32)
init_crcs = fill (index1 (constant n)) (constant 0xffffffff)
myHead :: String -> (Word8, String)
myHead [] = (0, [])
myHead (c:cs) = (fromIntegral (ord c), cs)
one_iter :: Acc (Vector Word32) -> Acc (Vector Word8)
-> Acc (Vector Word32)
one_iter crcs chars = A.zipWith (crc32_one table) crcs chars
all :: Int -> Acc (Vector Word32) -> [String] -> Acc (Vector Word32)
all 0 crcs _ = crcs
all x crcs words = all (x-1) (one_iter crcs chars) tails
where
chars = use (fromList (Z:.n) heads)
(heads, tails) = unzip (map myHead words)
find :: Acc (Vector Word32) -> Acc (Scalar Int)
find arr = A.fold A.max (constant 0) (A.zipWith check arr ixs)
where check :: Exp Word32 -> Exp Int -> Exp Int
check x ix = x ==* password_hash ? ( ix, 0 )
ixs = generate (shape arr) unindex1
password_hash = constant 0xb4967c42 :: Exp Word32
main = do
s <- readFile "/usr/share/dict/american-english"
let ls = lines s
let [r] = toList $ run $ find $ crcAll ls
print (ls !! r)
| null | https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/crc32/crc32_acc.hs | haskell | import Data.Word
import Data.Bits
import Data.Char
import Debug.Trace
import Data.Array.Accelerate hiding (fromIntegral, shiftR, map, zipWith, unzip, replicate)
import qualified Data.Array.Accelerate as A
import Data.Array.Accelerate.Interpreter
import CRC32
crc32_one :: Acc (Vector Word32) -> Exp Word32 -> Exp Word8 -> Exp Word32
crc32_one tab crc char
= (char ==* 0) ?
( crc
, (tab ! index1 (A.fromIntegral (A.fromIntegral crc `xor` char)))
`xor`
crc `A.shiftR` 8
)
crcAll :: [String] -> Acc (Vector Word32)
crcAll words = all width init_crcs words
where
n = length words
width = maximum (0 : map length words)
table :: Acc (Vector Word32)
table = use (fromList (Z:.256) crc32_tab)
init_crcs :: Acc (Vector Word32)
init_crcs = fill (index1 (constant n)) (constant 0xffffffff)
myHead :: String -> (Word8, String)
myHead [] = (0, [])
myHead (c:cs) = (fromIntegral (ord c), cs)
one_iter :: Acc (Vector Word32) -> Acc (Vector Word8)
-> Acc (Vector Word32)
one_iter crcs chars = A.zipWith (crc32_one table) crcs chars
all :: Int -> Acc (Vector Word32) -> [String] -> Acc (Vector Word32)
all 0 crcs _ = crcs
all x crcs words = all (x-1) (one_iter crcs chars) tails
where
chars = use (fromList (Z:.n) heads)
(heads, tails) = unzip (map myHead words)
find :: Acc (Vector Word32) -> Acc (Scalar Int)
find arr = A.fold A.max (constant 0) (A.zipWith check arr ixs)
where check :: Exp Word32 -> Exp Int -> Exp Int
check x ix = x ==* password_hash ? ( ix, 0 )
ixs = generate (shape arr) unindex1
password_hash = constant 0xb4967c42 :: Exp Word32
main = do
s <- readFile "/usr/share/dict/american-english"
let ls = lines s
let [r] = toList $ run $ find $ crcAll ls
print (ls !! r)
|
|
93437e6a4ff55683dcdb4b55da405c7440553dc63a02359cc45ef72483c44b93 | sigscale/snmp-collector | snmp_collector_get_sup.erl | %%% snmp_collector_get_sup.erl
%%% vim: ts=3
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2016 - 2019 SigScale Global Inc.
%%% @end
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% @docfile "{@docsrc supervision.edoc}"
%%%
-module(snmp_collector_get_sup).
-copyright('Copyright (c) 2016 - 2019 SigScale Global Inc.').
-behaviour(supervisor).
%% export the callback needed for supervisor behaviour
-export([init/1]).
%%----------------------------------------------------------------------
%% The supervisor callback
%%----------------------------------------------------------------------
-spec init(Args) -> Result
when
Args :: [],
Result :: {ok,{{RestartStrategy, MaxR, MaxT}, [ChildSpec]}} | ignore,
RestartStrategy :: simple_one_for_one,
MaxR :: non_neg_integer(),
MaxT :: pos_integer(),
ChildSpec :: supervisor:child_spec().
%% @doc Initialize the {@module} supervisor.
%% @see //stdlib/supervisor:init/1
@private
%%
init([]) ->
ignore.
StartMod = snmp_collector_get_fsm ,
StartFunc = { gen_fsm , start_link , [ StartMod ] } ,
ChildSpec = { StartMod , StartFunc , transient , 4000 , worker , [ StartMod ] } ,
{ ok , { { simple_one_for_one , 10 , 60 } , [ ChildSpec ] } } .
| null | https://raw.githubusercontent.com/sigscale/snmp-collector/cb6b95ed331abd6f258d8ea55bf34c57f2992444/src/snmp_collector_get_sup.erl | erlang | snmp_collector_get_sup.erl
vim: ts=3
@end
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@docfile "{@docsrc supervision.edoc}"
export the callback needed for supervisor behaviour
----------------------------------------------------------------------
The supervisor callback
----------------------------------------------------------------------
@doc Initialize the {@module} supervisor.
@see //stdlib/supervisor:init/1
| 2016 - 2019 SigScale Global Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(snmp_collector_get_sup).
-copyright('Copyright (c) 2016 - 2019 SigScale Global Inc.').
-behaviour(supervisor).
-export([init/1]).
-spec init(Args) -> Result
when
Args :: [],
Result :: {ok,{{RestartStrategy, MaxR, MaxT}, [ChildSpec]}} | ignore,
RestartStrategy :: simple_one_for_one,
MaxR :: non_neg_integer(),
MaxT :: pos_integer(),
ChildSpec :: supervisor:child_spec().
@private
init([]) ->
ignore.
StartMod = snmp_collector_get_fsm ,
StartFunc = { gen_fsm , start_link , [ StartMod ] } ,
ChildSpec = { StartMod , StartFunc , transient , 4000 , worker , [ StartMod ] } ,
{ ok , { { simple_one_for_one , 10 , 60 } , [ ChildSpec ] } } .
|
4ccdfe9385c410fc7aeb5128fb07a0abea5e8499461d49ccbfa654a8459d9bf4 | onedata/op-worker | api_test_utils.erl | %%%-------------------------------------------------------------------
@author
( C ) 2020 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% Utility functions used in API tests.
%%% @end
%%%-------------------------------------------------------------------
-module(api_test_utils).
-author("Bartosz Walkowicz").
-include("api_test_runner.hrl").
-include("api_file_test_utils.hrl").
-include("modules/dataset/dataset.hrl").
-include("modules/fslogic/file_details.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("modules/logical_file_manager/lfm.hrl").
-include("proto/oneclient/common_messages.hrl").
-include("test_utils/initializer.hrl").
-export([
build_rest_url/2,
create_shared_file_in_space_krk/0,
create_and_sync_shared_file_in_space_krk_par/1,
create_and_sync_shared_file_in_space_krk_par/2,
create_and_sync_shared_file_in_space_krk_par/3,
create_file_in_space_krk_par_with_additional_metadata/3,
create_file_in_space_krk_par_with_additional_metadata/4,
randomly_choose_file_type_for_test/0,
randomly_choose_file_type_for_test/1,
share_file_and_sync_file_attrs/4,
set_and_sync_metadata/4,
set_metadata/4,
get_metadata/3,
set_xattrs/3,
get_xattrs/2,
randomly_add_qos/4,
randomly_set_metadata/2,
randomly_set_acl/2,
randomly_create_share/3,
guids_to_object_ids/1,
file_details_to_gs_json/2,
file_attrs_to_json/2
]).
-export([
add_file_id_errors_for_operations_available_in_share_mode/3,
add_file_id_errors_for_operations_available_in_share_mode/4,
add_file_id_errors_for_operations_not_available_in_share_mode/3,
add_file_id_errors_for_operations_not_available_in_share_mode/4,
add_cdmi_id_errors_for_operations_not_available_in_share_mode/4,
add_cdmi_id_errors_for_operations_not_available_in_share_mode/5,
replace_enoent_with_error_not_found_in_error_expectations/1,
maybe_substitute_bad_id/2
]).
-type file_type() :: binary(). % <<"file">> | <<"dir">>
< < " rdf " > > | < < " json " > > | < < " " > > .
-export_type([file_type/0, metadata_type/0]).
-define(ATTEMPTS, 30).
%%%===================================================================
%%% API
%%%===================================================================
-spec build_rest_url(node(), [binary()]) -> binary().
build_rest_url(Node, PathTokens) ->
rpc:call(Node, oneprovider, build_rest_url, [PathTokens]).
-spec create_shared_file_in_space_krk() ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_shared_file_in_space_krk() ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessId = oct_background:get_user_session_id(user3, krakow),
SpaceOwnerSessId = oct_background:get_user_session_id(user1, krakow),
FileType = randomly_choose_file_type_for_test(),
FilePath = filename:join(["/", ?SPACE_KRK, ?RANDOM_FILE_NAME()]),
{ok, FileGuid} = lfm_test_utils:create_file(FileType, P1Node, UserSessId, FilePath),
{ok, ShareId} = opt_shares:create(P1Node, SpaceOwnerSessId, ?FILE_REF(FileGuid), <<"share">>),
{FileType, FilePath, FileGuid, ShareId}.
-spec create_and_sync_shared_file_in_space_krk_par(file_meta:mode()) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(Mode) ->
FileType = randomly_choose_file_type_for_test(),
create_and_sync_shared_file_in_space_krk_par(FileType, Mode).
-spec create_and_sync_shared_file_in_space_krk_par(file_type(), file_meta:mode()) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(FileType, Mode) ->
create_and_sync_shared_file_in_space_krk_par(FileType, ?RANDOM_FILE_NAME(), Mode).
-spec create_and_sync_shared_file_in_space_krk_par(
file_type(),
file_meta:name(),
file_meta:mode()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(FileType, FileName, Mode) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
SpaceOwnerSessIdP1 = kv_utils:get([users, user2, sessions, krakow], node_cache:get(oct_mapping)),
UserSessIdP1 = kv_utils:get([users, user3, sessions, krakow], node_cache:get(oct_mapping)),
FilePath = filename:join(["/", ?SPACE_KRK_PAR, FileName]),
{ok, FileGuid} = lfm_test_utils:create_file(FileType, P1Node, UserSessIdP1, FilePath, Mode),
{ok, ShareId} = opt_shares:create(P1Node, SpaceOwnerSessIdP1, ?FILE_REF(FileGuid), <<"share">>),
file_test_utils:await_sync(P2Node, FileGuid),
{FileType, FilePath, FileGuid, ShareId}.
-spec create_file_in_space_krk_par_with_additional_metadata(
file_meta:path(),
boolean(),
file_meta:name()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), #file_details{}}.
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileName) ->
FileType = randomly_choose_file_type_for_test(false),
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileType, FileName).
-spec create_file_in_space_krk_par_with_additional_metadata(
file_meta:path(),
boolean(),
file_type(),
file_meta:name()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), #file_details{}}.
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileType, FileName) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
Nodes = [P1Node, P2Node],
UserSessIdP1 = oct_background:get_user_session_id(user3, krakow),
SpaceOwnerSessIdP1 = oct_background:get_user_session_id(user2, krakow),
FilePath = filename:join([ParentPath, FileName]),
FileMode = lists_utils:random_element([8#707, 8#705, 8#700]),
{ok, FileGuid} = lfm_test_utils:create_file(
FileType, P1Node, UserSessIdP1, FilePath, FileMode
),
FileShares = case randomly_create_share(P1Node, SpaceOwnerSessIdP1, FileGuid) of
undefined -> [];
ShareId -> [ShareId]
end,
Size = case FileType of
<<"file">> ->
RandSize = rand:uniform(20),
lfm_test_utils:write_file(P1Node, SpaceOwnerSessIdP1, FileGuid, {rand_content, RandSize}),
RandSize;
<<"dir">> ->
0
end,
{ok, FileAttrs} = ?assertMatch(
{ok, #file_attr{size = Size, shares = FileShares}},
file_test_utils:get_attrs(P2Node, FileGuid),
?ATTEMPTS
),
HasDirectQos = randomly_add_qos(Nodes, FileGuid, <<"key=value2">>, 2),
HasMetadata = randomly_set_metadata(Nodes, FileGuid),
HasAcl = randomly_set_acl(Nodes, FileGuid),
FileDetails = #file_details{
file_attr = FileAttrs,
active_permissions_type = case HasAcl of
true -> acl;
false -> posix
end,
eff_protection_flags = ?no_flags_mask,
eff_dataset_protection_flags = ?no_flags_mask,
eff_qos_membership = case {HasDirectQos, HasParentQos} of
{true, true} -> ?DIRECT_AND_ANCESTOR_MEMBERSHIP;
{true, _} -> ?DIRECT_MEMBERSHIP;
{_, true} -> ?ANCESTOR_MEMBERSHIP;
_ -> ?NONE_MEMBERSHIP
end,
eff_dataset_membership = ?NONE_MEMBERSHIP,
has_metadata = HasMetadata
},
{FileType, FilePath, FileGuid, FileDetails}.
-spec randomly_choose_file_type_for_test() -> file_type().
randomly_choose_file_type_for_test() ->
randomly_choose_file_type_for_test(true).
-spec randomly_choose_file_type_for_test(boolean()) -> file_type().
randomly_choose_file_type_for_test(LogSelectedFileType) ->
FileType = ?RANDOM_FILE_TYPE(),
LogSelectedFileType andalso ct:pal("Chosen file type for test: ~s", [FileType]),
FileType.
-spec share_file_and_sync_file_attrs(node(), session:id(), [node()], file_id:file_guid()) ->
od_share:id().
share_file_and_sync_file_attrs(CreationNode, SessionId, SyncNodes, FileGuid) ->
{ok, ShareId} = ?assertMatch(
{ok, _},
opt_shares:create(CreationNode, SessionId, ?FILE_REF(FileGuid), <<"share">>),
?ATTEMPTS
),
lists:foreach(fun(Node) ->
?assertMatch(
{ok, #file_attr{shares = [ShareId | _]}},
file_test_utils:get_attrs(Node, FileGuid),
?ATTEMPTS
)
end, SyncNodes),
ShareId.
-spec set_and_sync_metadata([node()], file_id:file_guid(), metadata_type(), term()) -> ok.
set_and_sync_metadata(Nodes, FileGuid, MetadataType, Metadata) ->
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, set_metadata(RandNode, FileGuid, MetadataType, Metadata), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, Metadata}, get_metadata(Node, FileGuid, MetadataType), ?ATTEMPTS)
end, Nodes).
-spec set_metadata(node(), file_id:file_guid(), metadata_type(), term()) -> ok.
set_metadata(Node, FileGuid, <<"rdf">>, Metadata) ->
opt_file_metadata:set_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), rdf, Metadata, []);
set_metadata(Node, FileGuid, <<"json">>, Metadata) ->
opt_file_metadata:set_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), json, Metadata, []);
set_metadata(Node, FileGuid, <<"xattrs">>, Metadata) ->
set_xattrs(Node, FileGuid, Metadata).
-spec get_metadata(node(), file_id:file_guid(), metadata_type()) -> {ok, term()}.
get_metadata(Node, FileGuid, <<"rdf">>) ->
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), rdf, [], false);
get_metadata(Node, FileGuid, <<"json">>) ->
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), json, [], false);
get_metadata(Node, FileGuid, <<"xattrs">>) ->
get_xattrs(Node, FileGuid).
-spec set_xattrs(node(), file_id:file_guid(), map()) -> ok.
set_xattrs(Node, FileGuid, Xattrs) ->
lists:foreach(fun({Key, Val}) ->
?assertMatch(ok, lfm_proxy:set_xattr(
Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), #xattr{
name = Key,
value = Val
}
), ?ATTEMPTS)
end, maps:to_list(Xattrs)).
-spec get_xattrs(node(), file_id:file_guid()) -> {ok, map()}.
get_xattrs(Node, FileGuid) ->
FileKey = ?FILE_REF(FileGuid),
{ok, Keys} = ?assertMatch(
{ok, _}, lfm_proxy:list_xattr(Node, ?ROOT_SESS_ID, FileKey, false, true), ?ATTEMPTS
),
{ok, lists:foldl(fun(Key, Acc) ->
Check in case of race between listing and fetching xattr value
case lfm_proxy:get_xattr(Node, ?ROOT_SESS_ID, FileKey, Key) of
{ok, #xattr{name = Name, value = Value}} ->
Acc#{Name => Value};
{error, _} ->
Acc
end
end, #{}, Keys)}.
-spec randomly_add_qos([node()], file_id:file_guid(), qos_expression:expression(), qos_entry:replicas_num()) ->
Added :: boolean().
randomly_add_qos(Nodes, FileGuid, Expression, ReplicasNum) ->
case rand:uniform(2) of
1 ->
RandNode = lists_utils:random_element(Nodes),
{ok, QosEntryId} = ?assertMatch({ok, _}, opt_qos:add_qos_entry(
RandNode, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), Expression, ReplicasNum
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, _}, opt_qos:get_qos_entry(Node, ?ROOT_SESS_ID, QosEntryId), ?ATTEMPTS)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_set_metadata([node()], file_id:file_guid()) -> Set :: boolean().
randomly_set_metadata(Nodes, FileGuid) ->
case rand:uniform(2) of
1 ->
FileKey = ?FILE_REF(FileGuid),
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, opt_file_metadata:set_custom_metadata(
RandNode, ?ROOT_SESS_ID, FileKey, rdf, ?RDF_METADATA_1, []
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch(
{ok, _},
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, FileKey, rdf, [], false),
?ATTEMPTS
)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_set_acl([node()], file_id:file_guid()) -> Set ::boolean().
randomly_set_acl(Nodes, FileGuid) ->
case rand:uniform(2) of
1 ->
FileKey = ?FILE_REF(FileGuid),
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, lfm_proxy:set_acl(
RandNode, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), acl:from_json(?OWNER_ONLY_ALLOW_ACL, cdmi)
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, [_]}, lfm_proxy:get_acl(Node, ?ROOT_SESS_ID, FileKey), ?ATTEMPTS)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_create_share(node(), session:id(), file_id:file_guid()) ->
ShareId :: undefined | od_share:id().
randomly_create_share(Node, SessionId, FileGuid) ->
case rand:uniform(2) of
1 ->
{ok, ShId} = ?assertMatch({ok, _}, opt_shares:create(
Node, SessionId, ?FILE_REF(FileGuid), <<"share">>
)),
ShId;
2 ->
undefined
end.
-spec guids_to_object_ids([file_id:file_guid()]) -> [file_id:objectid()].
guids_to_object_ids(Guids) ->
lists:map(fun(Guid) ->
{ok, ObjectId} = file_id:guid_to_objectid(Guid),
ObjectId
end, Guids).
-spec file_details_to_gs_json(undefined | od_share:id(), #file_details{}) -> map().
file_details_to_gs_json(undefined, #file_details{
file_attr = #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = FileName,
type = Type,
mode = Mode,
size = Size,
mtime = MTime,
shares = Shares,
owner_id = OwnerId,
provider_id = ProviderId,
nlink = LinksCount,
index = Index
},
active_permissions_type = ActivePermissionsType,
eff_protection_flags = EffProtectionFlags,
eff_dataset_protection_flags = EffDatasetProtectionFlags,
eff_qos_membership = EffQosMembership,
eff_dataset_membership = EffDatasetMembership,
has_metadata = HasMetadata,
recall_root_id = RecallRootId
}) ->
#{
<<"hasMetadata">> => HasMetadata,
<<"guid">> => FileGuid,
<<"name">> => FileName,
<<"index">> => file_listing:encode_index(Index),
<<"posixPermissions">> => list_to_binary(string:right(integer_to_list(Mode, 8), 3, $0)),
<<"effProtectionFlags">> => file_meta:protection_flags_to_json(EffProtectionFlags),
<<"effDatasetProtectionFlags">> => file_meta:protection_flags_to_json(EffDatasetProtectionFlags),
% For space dir gs returns null as parentId instead of user root dir
% (gui doesn't know about user root dir)
<<"parentId">> => case fslogic_file_id:is_space_dir_guid(FileGuid) of
true -> null;
false -> ParentGuid
end,
<<"mtime">> => MTime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => Shares,
<<"activePermissionsType">> => atom_to_binary(ActivePermissionsType, utf8),
<<"providerId">> => ProviderId,
<<"ownerId">> => OwnerId,
<<"effQosMembership">> => translate_membership(EffQosMembership),
<<"effDatasetMembership">> => translate_membership(EffDatasetMembership),
<<"hardlinksCount">> => utils:undefined_to_null(LinksCount),
<<"recallRootId">> => utils:undefined_to_null(RecallRootId)
};
file_details_to_gs_json(ShareId, #file_details{
file_attr = #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = FileName,
type = Type,
mode = Mode,
size = Size,
mtime = MTime,
shares = Shares,
index = Index
},
active_permissions_type = ActivePermissionsType,
has_metadata = HasMetadata
}) ->
IsShareRoot = lists:member(ShareId, Shares),
#{
<<"hasMetadata">> => HasMetadata,
<<"guid">> => file_id:guid_to_share_guid(FileGuid, ShareId),
<<"name">> => FileName,
<<"index">> => file_listing:encode_index(Index),
<<"posixPermissions">> => list_to_binary(string:right(integer_to_list(Mode band 2#111, 8), 3, $0)),
<<"parentId">> => case IsShareRoot of
true -> null;
false -> file_id:guid_to_share_guid(ParentGuid, ShareId)
end,
<<"mtime">> => MTime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => case IsShareRoot of
true -> [ShareId];
false -> []
end,
<<"activePermissionsType">> => atom_to_binary(ActivePermissionsType, utf8)
}.
-spec file_attrs_to_json(undefined | od_share:id(), #file_attr{}) -> map().
file_attrs_to_json(undefined, #file_attr{
guid = Guid,
name = Name,
mode = Mode,
parent_guid = ParentGuid,
uid = Uid,
gid = Gid,
atime = Atime,
mtime = Mtime,
ctime = Ctime,
type = Type,
size = Size,
shares = Shares,
provider_id = ProviderId,
owner_id = OwnerId,
nlink = HardlinksCount,
index = Index,
xattrs = Xattrs
}) ->
{ok, ObjectId} = file_id:guid_to_objectid(Guid),
BaseJson = #{
<<"file_id">> => ObjectId,
<<"name">> => Name,
<<"mode">> => list_to_binary(string:right(integer_to_list(Mode, 8), 3, $0)),
<<"parent_id">> => case ParentGuid of
undefined ->
null;
_ ->
{ok, ParentObjectId} = file_id:guid_to_objectid(ParentGuid),
ParentObjectId
end,
<<"storage_user_id">> => Uid,
<<"storage_group_id">> => Gid,
<<"atime">> => Atime,
<<"mtime">> => Mtime,
<<"ctime">> => Ctime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => Shares,
<<"provider_id">> => ProviderId,
<<"owner_id">> => OwnerId,
<<"hardlinks_count">> => utils:undefined_to_null(HardlinksCount),
<<"index">> => file_listing:encode_index(Index)
},
maps:fold(fun(XattrName, XattrValue, Acc) ->
Acc#{<<"xattr.", XattrName/binary>> => utils:undefined_to_null(XattrValue)}
end, BaseJson, Xattrs);
file_attrs_to_json(ShareId, #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = Name,
type = Type,
mode = Mode,
size = Size,
mtime = Mtime,
atime = Atime,
ctime = Ctime,
shares = Shares,
index = Index,
xattrs = Xattrs
}) ->
{ok, ObjectId} = file_id:guid_to_objectid(file_id:guid_to_share_guid(FileGuid, ShareId)),
IsShareRoot = lists:member(ShareId, Shares),
BaseJson = #{
<<"file_id">> => ObjectId,
<<"name">> => Name,
<<"mode">> => list_to_binary(string:right(integer_to_list(Mode band 2#111, 8), 3, $0)),
<<"parent_id">> => case IsShareRoot of
true -> null;
false ->
{ok, ParentObjectId} = file_id:guid_to_objectid(file_id:guid_to_share_guid(ParentGuid, ShareId)),
ParentObjectId
end,
<<"atime">> => Atime,
<<"mtime">> => Mtime,
<<"ctime">> => Ctime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => case IsShareRoot of
true -> [ShareId];
false -> []
end,
<<"index">> => file_listing:encode_index(Index)
},
maps:fold(fun(XattrName, XattrValue, Acc) ->
Acc#{<<"xattr.", XattrName/binary>> => utils:undefined_to_null(XattrValue)}
end, BaseJson, Xattrs).
%%--------------------------------------------------------------------
%% @doc
Adds to data_spec ( ) errors for invalid file i d 's ( guid , path , cdmi_id ) for
%% either normal and share mode (since operation is available in both modes
%% it is expected that it will have distinct tests for each mode).
%%
%% ATTENTION !!!
%%
%% Bad ids are available under 'bad_id' atom key - test implementation should
%% make sure to substitute them for fileId component in rest path or #gri.id
%% before making test call.
%% @end
%%--------------------------------------------------------------------
-spec add_file_id_errors_for_operations_available_in_share_mode(
file_id:file_guid(),
undefined | od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_available_in_share_mode(FileGuid, ShareId, DataSpec) ->
add_file_id_errors_for_operations_available_in_share_mode(<<"id">>, FileGuid, ShareId, DataSpec).
-spec add_file_id_errors_for_operations_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
undefined | od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_available_in_share_mode(IdKey, FileGuid, ShareId, DataSpec) ->
InvalidFileIdErrors = get_invalid_file_id_errors(IdKey),
NonExistentSpaceGuid = file_id:pack_share_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID, ShareId),
SpaceId = file_id:guid_to_space_id(FileGuid),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceExpError = case ShareId of
undefined ->
% For authenticated users it should fail on authorization step
% (checks if user belongs to space)
?ERROR_FORBIDDEN;
_ ->
% For share request it should fail on validation step
% (checks if space is supported by provider)
{error_fun, fun(#api_test_ctx{node = Node}) ->
ProvId = opw_test_rpc:get_provider_id(Node),
?ERROR_SPACE_NOT_SUPPORTED_BY(?NOT_SUPPORTED_SPACE_ID, ProvId)
end}
end,
NonExistentFileGuid = file_id:pack_share_guid(<<"InvalidUuid">>, SpaceId, ShareId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
BadFileIdErrors = InvalidFileIdErrors ++ [
{bad_id, NonExistentSpaceObjectId, {rest, NonExistentSpaceExpError}},
{bad_id, NonExistentSpaceGuid, {gs, NonExistentSpaceExpError}},
% Errors thrown by internal logic (all middleware checks were passed)
{bad_id, NonExistentFileObjectId, {rest, ?ERROR_POSIX(?ENOENT)}},
{bad_id, NonExistentFileGuid, {gs, ?ERROR_POSIX(?ENOENT)}}
],
add_bad_values_to_data_spec(BadFileIdErrors, DataSpec).
%%--------------------------------------------------------------------
%% @doc
Adds to data_spec ( ) errors for invalid file i d 's ( guid , path , cdmi_id )
%% for both normal and share mode (since operation is not available in share
%% mode there is no need to write distinct test for share mode - access errors
%% when using share file id can be checked along with other bad_values errors).
%%
%% ATTENTION !!!
%%
%% Bad ids are available under 'bad_id' atom key - test implementation should
%% make sure to substitute them for fileId component in rest path or #gri.id
%% before making test call.
%% @end
%%--------------------------------------------------------------------
-spec add_file_id_errors_for_operations_not_available_in_share_mode(
file_id:file_guid(),
od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_not_available_in_share_mode(FileGuid, ShareId, DataSpec) ->
add_file_id_errors_for_operations_not_available_in_share_mode(<<"id">>, FileGuid, ShareId, DataSpec).
-spec add_file_id_errors_for_operations_not_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_not_available_in_share_mode(IdKey, FileGuid, ShareId, DataSpec) ->
InvalidFileIdErrors = get_invalid_file_id_errors(IdKey),
NonExistentSpaceGuid = file_id:pack_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
NonExistentSpaceGuid, ShareId, [
Errors in normal mode - thrown by middleware auth checks
% (checks whether authenticated user belongs to space)
{bad_id, NonExistentSpaceObjectId, {rest, ?ERROR_FORBIDDEN}},
{bad_id, NonExistentSpaceGuid, {gs, ?ERROR_FORBIDDEN}}
]
),
SpaceId = file_id:guid_to_space_id(FileGuid),
NonExistentFileGuid = file_id:pack_guid(<<"InvalidUuid">>, SpaceId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
NonExistentFileErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
NonExistentFileGuid, ShareId, [
% Errors in normal mode - thrown by internal logic
% (all middleware checks were passed)
{bad_id, NonExistentFileObjectId, {rest, ?ERROR_POSIX(?ENOENT)}},
{bad_id, NonExistentFileGuid, {gs, ?ERROR_POSIX(?ENOENT)}}
]
),
ShareFileErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
FileGuid, ShareId, []
),
BadFileIdErrors = lists:flatten([
InvalidFileIdErrors,
NonExistentSpaceErrors,
NonExistentFileErrors,
ShareFileErrors
]),
add_bad_values_to_data_spec(BadFileIdErrors, DataSpec).
%%--------------------------------------------------------------------
%% @doc
%% Extends data_spec() with file id bad values and errors for operations
%% not available in share mode that provide file id as parameter in data spec map.
%% All added bad values are in cdmi form and are stored under <<"fileId">> key.
%% @end
%%--------------------------------------------------------------------
-spec add_cdmi_id_errors_for_operations_not_available_in_share_mode(
file_id:file_guid(),
od_space:id(),
od_share:id(),
onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_cdmi_id_errors_for_operations_not_available_in_share_mode(FileGuid, SpaceId, ShareId, DataSpec) ->
add_cdmi_id_errors_for_operations_not_available_in_share_mode(<<"fileId">>, FileGuid, SpaceId, ShareId, DataSpec).
-spec add_cdmi_id_errors_for_operations_not_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
od_space:id(),
od_share:id(),
onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_cdmi_id_errors_for_operations_not_available_in_share_mode(IdKey, FileGuid, SpaceId, ShareId, DataSpec) ->
{ok, DummyObjectId} = file_id:guid_to_objectid(<<"DummyGuid">>),
NonExistentSpaceGuid = file_id:pack_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceShareGuid = file_id:guid_to_share_guid(NonExistentSpaceGuid, ShareId),
{ok, NonExistentSpaceShareObjectId} = file_id:guid_to_objectid(NonExistentSpaceShareGuid),
NonExistentFileGuid = file_id:pack_guid(<<"InvalidUuid">>, SpaceId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
NonExistentFileShareGuid = file_id:guid_to_share_guid(NonExistentFileGuid, ShareId),
{ok, NonExistentFileShareObjectId} = file_id:guid_to_objectid(NonExistentFileShareGuid),
ShareFileGuid = file_id:guid_to_share_guid(FileGuid, ShareId),
{ok, ShareFileObjectId} = file_id:guid_to_objectid(ShareFileGuid),
BadFileIdValues = [
{IdKey, <<"InvalidObjectId">>, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)},
{IdKey, DummyObjectId, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)},
user has no privileges in non existent space and so he should receive ? ERROR_FORBIDDEN
{IdKey, NonExistentSpaceObjectId, ?ERROR_FORBIDDEN},
{IdKey, NonExistentSpaceShareObjectId, ?ERROR_FORBIDDEN},
{IdKey, NonExistentFileObjectId, ?ERROR_POSIX(?ENOENT)},
% operation is not available in share mode - it should result in ?EPERM
{IdKey, ShareFileObjectId, ?ERROR_POSIX(?EPERM)},
{IdKey, NonExistentFileShareObjectId, ?ERROR_POSIX(?EPERM)}
],
add_bad_values_to_data_spec(BadFileIdValues, DataSpec).
-spec replace_enoent_with_error_not_found_in_error_expectations(onenv_api_test_runner:data_spec()) ->
onenv_api_test_runner:data_spec().
replace_enoent_with_error_not_found_in_error_expectations(DataSpec = #data_spec{bad_values = BadValues}) ->
DataSpec#data_spec{bad_values = lists:map(fun
({Key, Value, ?ERROR_POSIX(?ENOENT)}) -> {Key, Value, ?ERROR_NOT_FOUND};
({Key, Value, {Interface, ?ERROR_POSIX(?ENOENT)}}) -> {Key, Value, {Interface, ?ERROR_NOT_FOUND}};
(Spec) -> Spec
end, BadValues)}.
maybe_substitute_bad_id(ValidId, undefined) ->
{ValidId, undefined};
maybe_substitute_bad_id(ValidId, Data) ->
case maps:take(bad_id, Data) of
{BadId, LeftoverData} -> {BadId, LeftoverData};
error -> {ValidId, Data}
end.
%%%===================================================================
Internal functions
%%%===================================================================
@private
add_bad_values_to_data_spec(BadValuesToAdd, undefined) ->
#data_spec{bad_values = BadValuesToAdd};
add_bad_values_to_data_spec(BadValuesToAdd, #data_spec{bad_values = BadValues} = DataSpec) ->
DataSpec#data_spec{bad_values = BadValuesToAdd ++ BadValues}.
@private
get_invalid_file_id_errors(IdKey) ->
InvalidGuid = <<"InvalidGuid">>,
{ok, InvalidObjectId} = file_id:guid_to_objectid(InvalidGuid),
[
% Errors thrown by rest_handler, which failed to convert file path/cdmi_id to guid
{bad_id, <<"/NonExistentPath">>, {rest_with_file_path, ?ERROR_POSIX(?ENOENT)}},
{bad_id, <<"InvalidObjectId">>, {rest, ?ERROR_SPACE_NOT_SUPPORTED_BY(<<"InvalidObjectId">>, provider_id_placeholder)}},
% Errors thrown by middleware and internal logic
{bad_id, InvalidObjectId, {rest, ?ERROR_SPACE_NOT_SUPPORTED_BY(InvalidObjectId, provider_id_placeholder)}},
{bad_id, InvalidGuid, {gs, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)}}
].
@private
add_share_file_id_errors_for_operations_not_available_in_share_mode(FileGuid, ShareId, Errors) ->
ShareFileGuid = file_id:guid_to_share_guid(FileGuid, ShareId),
{ok, ShareFileObjectId} = file_id:guid_to_objectid(ShareFileGuid),
[
% Errors in share mode:
% - rest: thrown by middleware operation_supported check (rest_handler
% changes scope to public when using share object id)
% - gs: scope is left intact (in contrast to rest) but client is changed
% to ?GUEST. Then it fails middleware auth checks (whether user belongs
% to space or has some space privileges)
{bad_id, ShareFileObjectId, {rest, ?ERROR_NOT_SUPPORTED}},
{bad_id, ShareFileGuid, {gs, ?ERROR_UNAUTHORIZED}}
| Errors
].
@private
translate_membership(?NONE_MEMBERSHIP) -> <<"none">>;
translate_membership(?DIRECT_MEMBERSHIP) -> <<"direct">>;
translate_membership(?ANCESTOR_MEMBERSHIP) -> <<"ancestor">>;
translate_membership(?DIRECT_AND_ANCESTOR_MEMBERSHIP) -> <<"directAndAncestor">>.
| null | https://raw.githubusercontent.com/onedata/op-worker/e0f8d666ff664a558050d1fc8f0e33f939a18030/test_distributed/utils/api/api_test_utils.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
Utility functions used in API tests.
@end
-------------------------------------------------------------------
<<"file">> | <<"dir">>
===================================================================
API
===================================================================
For space dir gs returns null as parentId instead of user root dir
(gui doesn't know about user root dir)
--------------------------------------------------------------------
@doc
either normal and share mode (since operation is available in both modes
it is expected that it will have distinct tests for each mode).
ATTENTION !!!
Bad ids are available under 'bad_id' atom key - test implementation should
make sure to substitute them for fileId component in rest path or #gri.id
before making test call.
@end
--------------------------------------------------------------------
For authenticated users it should fail on authorization step
(checks if user belongs to space)
For share request it should fail on validation step
(checks if space is supported by provider)
Errors thrown by internal logic (all middleware checks were passed)
--------------------------------------------------------------------
@doc
for both normal and share mode (since operation is not available in share
mode there is no need to write distinct test for share mode - access errors
when using share file id can be checked along with other bad_values errors).
ATTENTION !!!
Bad ids are available under 'bad_id' atom key - test implementation should
make sure to substitute them for fileId component in rest path or #gri.id
before making test call.
@end
--------------------------------------------------------------------
(checks whether authenticated user belongs to space)
Errors in normal mode - thrown by internal logic
(all middleware checks were passed)
--------------------------------------------------------------------
@doc
Extends data_spec() with file id bad values and errors for operations
not available in share mode that provide file id as parameter in data spec map.
All added bad values are in cdmi form and are stored under <<"fileId">> key.
@end
--------------------------------------------------------------------
operation is not available in share mode - it should result in ?EPERM
===================================================================
===================================================================
Errors thrown by rest_handler, which failed to convert file path/cdmi_id to guid
Errors thrown by middleware and internal logic
Errors in share mode:
- rest: thrown by middleware operation_supported check (rest_handler
changes scope to public when using share object id)
- gs: scope is left intact (in contrast to rest) but client is changed
to ?GUEST. Then it fails middleware auth checks (whether user belongs
to space or has some space privileges) | @author
( C ) 2020 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(api_test_utils).
-author("Bartosz Walkowicz").
-include("api_test_runner.hrl").
-include("api_file_test_utils.hrl").
-include("modules/dataset/dataset.hrl").
-include("modules/fslogic/file_details.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("modules/logical_file_manager/lfm.hrl").
-include("proto/oneclient/common_messages.hrl").
-include("test_utils/initializer.hrl").
-export([
build_rest_url/2,
create_shared_file_in_space_krk/0,
create_and_sync_shared_file_in_space_krk_par/1,
create_and_sync_shared_file_in_space_krk_par/2,
create_and_sync_shared_file_in_space_krk_par/3,
create_file_in_space_krk_par_with_additional_metadata/3,
create_file_in_space_krk_par_with_additional_metadata/4,
randomly_choose_file_type_for_test/0,
randomly_choose_file_type_for_test/1,
share_file_and_sync_file_attrs/4,
set_and_sync_metadata/4,
set_metadata/4,
get_metadata/3,
set_xattrs/3,
get_xattrs/2,
randomly_add_qos/4,
randomly_set_metadata/2,
randomly_set_acl/2,
randomly_create_share/3,
guids_to_object_ids/1,
file_details_to_gs_json/2,
file_attrs_to_json/2
]).
-export([
add_file_id_errors_for_operations_available_in_share_mode/3,
add_file_id_errors_for_operations_available_in_share_mode/4,
add_file_id_errors_for_operations_not_available_in_share_mode/3,
add_file_id_errors_for_operations_not_available_in_share_mode/4,
add_cdmi_id_errors_for_operations_not_available_in_share_mode/4,
add_cdmi_id_errors_for_operations_not_available_in_share_mode/5,
replace_enoent_with_error_not_found_in_error_expectations/1,
maybe_substitute_bad_id/2
]).
< < " rdf " > > | < < " json " > > | < < " " > > .
-export_type([file_type/0, metadata_type/0]).
-define(ATTEMPTS, 30).
-spec build_rest_url(node(), [binary()]) -> binary().
build_rest_url(Node, PathTokens) ->
rpc:call(Node, oneprovider, build_rest_url, [PathTokens]).
-spec create_shared_file_in_space_krk() ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_shared_file_in_space_krk() ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessId = oct_background:get_user_session_id(user3, krakow),
SpaceOwnerSessId = oct_background:get_user_session_id(user1, krakow),
FileType = randomly_choose_file_type_for_test(),
FilePath = filename:join(["/", ?SPACE_KRK, ?RANDOM_FILE_NAME()]),
{ok, FileGuid} = lfm_test_utils:create_file(FileType, P1Node, UserSessId, FilePath),
{ok, ShareId} = opt_shares:create(P1Node, SpaceOwnerSessId, ?FILE_REF(FileGuid), <<"share">>),
{FileType, FilePath, FileGuid, ShareId}.
-spec create_and_sync_shared_file_in_space_krk_par(file_meta:mode()) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(Mode) ->
FileType = randomly_choose_file_type_for_test(),
create_and_sync_shared_file_in_space_krk_par(FileType, Mode).
-spec create_and_sync_shared_file_in_space_krk_par(file_type(), file_meta:mode()) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(FileType, Mode) ->
create_and_sync_shared_file_in_space_krk_par(FileType, ?RANDOM_FILE_NAME(), Mode).
-spec create_and_sync_shared_file_in_space_krk_par(
file_type(),
file_meta:name(),
file_meta:mode()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), od_share:id()}.
create_and_sync_shared_file_in_space_krk_par(FileType, FileName, Mode) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
SpaceOwnerSessIdP1 = kv_utils:get([users, user2, sessions, krakow], node_cache:get(oct_mapping)),
UserSessIdP1 = kv_utils:get([users, user3, sessions, krakow], node_cache:get(oct_mapping)),
FilePath = filename:join(["/", ?SPACE_KRK_PAR, FileName]),
{ok, FileGuid} = lfm_test_utils:create_file(FileType, P1Node, UserSessIdP1, FilePath, Mode),
{ok, ShareId} = opt_shares:create(P1Node, SpaceOwnerSessIdP1, ?FILE_REF(FileGuid), <<"share">>),
file_test_utils:await_sync(P2Node, FileGuid),
{FileType, FilePath, FileGuid, ShareId}.
-spec create_file_in_space_krk_par_with_additional_metadata(
file_meta:path(),
boolean(),
file_meta:name()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), #file_details{}}.
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileName) ->
FileType = randomly_choose_file_type_for_test(false),
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileType, FileName).
-spec create_file_in_space_krk_par_with_additional_metadata(
file_meta:path(),
boolean(),
file_type(),
file_meta:name()
) ->
{file_type(), file_meta:path(), file_id:file_guid(), #file_details{}}.
create_file_in_space_krk_par_with_additional_metadata(ParentPath, HasParentQos, FileType, FileName) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
Nodes = [P1Node, P2Node],
UserSessIdP1 = oct_background:get_user_session_id(user3, krakow),
SpaceOwnerSessIdP1 = oct_background:get_user_session_id(user2, krakow),
FilePath = filename:join([ParentPath, FileName]),
FileMode = lists_utils:random_element([8#707, 8#705, 8#700]),
{ok, FileGuid} = lfm_test_utils:create_file(
FileType, P1Node, UserSessIdP1, FilePath, FileMode
),
FileShares = case randomly_create_share(P1Node, SpaceOwnerSessIdP1, FileGuid) of
undefined -> [];
ShareId -> [ShareId]
end,
Size = case FileType of
<<"file">> ->
RandSize = rand:uniform(20),
lfm_test_utils:write_file(P1Node, SpaceOwnerSessIdP1, FileGuid, {rand_content, RandSize}),
RandSize;
<<"dir">> ->
0
end,
{ok, FileAttrs} = ?assertMatch(
{ok, #file_attr{size = Size, shares = FileShares}},
file_test_utils:get_attrs(P2Node, FileGuid),
?ATTEMPTS
),
HasDirectQos = randomly_add_qos(Nodes, FileGuid, <<"key=value2">>, 2),
HasMetadata = randomly_set_metadata(Nodes, FileGuid),
HasAcl = randomly_set_acl(Nodes, FileGuid),
FileDetails = #file_details{
file_attr = FileAttrs,
active_permissions_type = case HasAcl of
true -> acl;
false -> posix
end,
eff_protection_flags = ?no_flags_mask,
eff_dataset_protection_flags = ?no_flags_mask,
eff_qos_membership = case {HasDirectQos, HasParentQos} of
{true, true} -> ?DIRECT_AND_ANCESTOR_MEMBERSHIP;
{true, _} -> ?DIRECT_MEMBERSHIP;
{_, true} -> ?ANCESTOR_MEMBERSHIP;
_ -> ?NONE_MEMBERSHIP
end,
eff_dataset_membership = ?NONE_MEMBERSHIP,
has_metadata = HasMetadata
},
{FileType, FilePath, FileGuid, FileDetails}.
-spec randomly_choose_file_type_for_test() -> file_type().
randomly_choose_file_type_for_test() ->
randomly_choose_file_type_for_test(true).
-spec randomly_choose_file_type_for_test(boolean()) -> file_type().
randomly_choose_file_type_for_test(LogSelectedFileType) ->
FileType = ?RANDOM_FILE_TYPE(),
LogSelectedFileType andalso ct:pal("Chosen file type for test: ~s", [FileType]),
FileType.
-spec share_file_and_sync_file_attrs(node(), session:id(), [node()], file_id:file_guid()) ->
od_share:id().
share_file_and_sync_file_attrs(CreationNode, SessionId, SyncNodes, FileGuid) ->
{ok, ShareId} = ?assertMatch(
{ok, _},
opt_shares:create(CreationNode, SessionId, ?FILE_REF(FileGuid), <<"share">>),
?ATTEMPTS
),
lists:foreach(fun(Node) ->
?assertMatch(
{ok, #file_attr{shares = [ShareId | _]}},
file_test_utils:get_attrs(Node, FileGuid),
?ATTEMPTS
)
end, SyncNodes),
ShareId.
-spec set_and_sync_metadata([node()], file_id:file_guid(), metadata_type(), term()) -> ok.
set_and_sync_metadata(Nodes, FileGuid, MetadataType, Metadata) ->
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, set_metadata(RandNode, FileGuid, MetadataType, Metadata), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, Metadata}, get_metadata(Node, FileGuid, MetadataType), ?ATTEMPTS)
end, Nodes).
-spec set_metadata(node(), file_id:file_guid(), metadata_type(), term()) -> ok.
set_metadata(Node, FileGuid, <<"rdf">>, Metadata) ->
opt_file_metadata:set_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), rdf, Metadata, []);
set_metadata(Node, FileGuid, <<"json">>, Metadata) ->
opt_file_metadata:set_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), json, Metadata, []);
set_metadata(Node, FileGuid, <<"xattrs">>, Metadata) ->
set_xattrs(Node, FileGuid, Metadata).
-spec get_metadata(node(), file_id:file_guid(), metadata_type()) -> {ok, term()}.
get_metadata(Node, FileGuid, <<"rdf">>) ->
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), rdf, [], false);
get_metadata(Node, FileGuid, <<"json">>) ->
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), json, [], false);
get_metadata(Node, FileGuid, <<"xattrs">>) ->
get_xattrs(Node, FileGuid).
-spec set_xattrs(node(), file_id:file_guid(), map()) -> ok.
set_xattrs(Node, FileGuid, Xattrs) ->
lists:foreach(fun({Key, Val}) ->
?assertMatch(ok, lfm_proxy:set_xattr(
Node, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), #xattr{
name = Key,
value = Val
}
), ?ATTEMPTS)
end, maps:to_list(Xattrs)).
-spec get_xattrs(node(), file_id:file_guid()) -> {ok, map()}.
get_xattrs(Node, FileGuid) ->
FileKey = ?FILE_REF(FileGuid),
{ok, Keys} = ?assertMatch(
{ok, _}, lfm_proxy:list_xattr(Node, ?ROOT_SESS_ID, FileKey, false, true), ?ATTEMPTS
),
{ok, lists:foldl(fun(Key, Acc) ->
Check in case of race between listing and fetching xattr value
case lfm_proxy:get_xattr(Node, ?ROOT_SESS_ID, FileKey, Key) of
{ok, #xattr{name = Name, value = Value}} ->
Acc#{Name => Value};
{error, _} ->
Acc
end
end, #{}, Keys)}.
-spec randomly_add_qos([node()], file_id:file_guid(), qos_expression:expression(), qos_entry:replicas_num()) ->
Added :: boolean().
randomly_add_qos(Nodes, FileGuid, Expression, ReplicasNum) ->
case rand:uniform(2) of
1 ->
RandNode = lists_utils:random_element(Nodes),
{ok, QosEntryId} = ?assertMatch({ok, _}, opt_qos:add_qos_entry(
RandNode, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), Expression, ReplicasNum
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, _}, opt_qos:get_qos_entry(Node, ?ROOT_SESS_ID, QosEntryId), ?ATTEMPTS)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_set_metadata([node()], file_id:file_guid()) -> Set :: boolean().
randomly_set_metadata(Nodes, FileGuid) ->
case rand:uniform(2) of
1 ->
FileKey = ?FILE_REF(FileGuid),
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, opt_file_metadata:set_custom_metadata(
RandNode, ?ROOT_SESS_ID, FileKey, rdf, ?RDF_METADATA_1, []
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch(
{ok, _},
opt_file_metadata:get_custom_metadata(Node, ?ROOT_SESS_ID, FileKey, rdf, [], false),
?ATTEMPTS
)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_set_acl([node()], file_id:file_guid()) -> Set ::boolean().
randomly_set_acl(Nodes, FileGuid) ->
case rand:uniform(2) of
1 ->
FileKey = ?FILE_REF(FileGuid),
RandNode = lists_utils:random_element(Nodes),
?assertMatch(ok, lfm_proxy:set_acl(
RandNode, ?ROOT_SESS_ID, ?FILE_REF(FileGuid), acl:from_json(?OWNER_ONLY_ALLOW_ACL, cdmi)
), ?ATTEMPTS),
lists:foreach(fun(Node) ->
?assertMatch({ok, [_]}, lfm_proxy:get_acl(Node, ?ROOT_SESS_ID, FileKey), ?ATTEMPTS)
end, Nodes),
true;
2 ->
false
end.
-spec randomly_create_share(node(), session:id(), file_id:file_guid()) ->
ShareId :: undefined | od_share:id().
randomly_create_share(Node, SessionId, FileGuid) ->
case rand:uniform(2) of
1 ->
{ok, ShId} = ?assertMatch({ok, _}, opt_shares:create(
Node, SessionId, ?FILE_REF(FileGuid), <<"share">>
)),
ShId;
2 ->
undefined
end.
-spec guids_to_object_ids([file_id:file_guid()]) -> [file_id:objectid()].
guids_to_object_ids(Guids) ->
lists:map(fun(Guid) ->
{ok, ObjectId} = file_id:guid_to_objectid(Guid),
ObjectId
end, Guids).
-spec file_details_to_gs_json(undefined | od_share:id(), #file_details{}) -> map().
file_details_to_gs_json(undefined, #file_details{
file_attr = #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = FileName,
type = Type,
mode = Mode,
size = Size,
mtime = MTime,
shares = Shares,
owner_id = OwnerId,
provider_id = ProviderId,
nlink = LinksCount,
index = Index
},
active_permissions_type = ActivePermissionsType,
eff_protection_flags = EffProtectionFlags,
eff_dataset_protection_flags = EffDatasetProtectionFlags,
eff_qos_membership = EffQosMembership,
eff_dataset_membership = EffDatasetMembership,
has_metadata = HasMetadata,
recall_root_id = RecallRootId
}) ->
#{
<<"hasMetadata">> => HasMetadata,
<<"guid">> => FileGuid,
<<"name">> => FileName,
<<"index">> => file_listing:encode_index(Index),
<<"posixPermissions">> => list_to_binary(string:right(integer_to_list(Mode, 8), 3, $0)),
<<"effProtectionFlags">> => file_meta:protection_flags_to_json(EffProtectionFlags),
<<"effDatasetProtectionFlags">> => file_meta:protection_flags_to_json(EffDatasetProtectionFlags),
<<"parentId">> => case fslogic_file_id:is_space_dir_guid(FileGuid) of
true -> null;
false -> ParentGuid
end,
<<"mtime">> => MTime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => Shares,
<<"activePermissionsType">> => atom_to_binary(ActivePermissionsType, utf8),
<<"providerId">> => ProviderId,
<<"ownerId">> => OwnerId,
<<"effQosMembership">> => translate_membership(EffQosMembership),
<<"effDatasetMembership">> => translate_membership(EffDatasetMembership),
<<"hardlinksCount">> => utils:undefined_to_null(LinksCount),
<<"recallRootId">> => utils:undefined_to_null(RecallRootId)
};
file_details_to_gs_json(ShareId, #file_details{
file_attr = #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = FileName,
type = Type,
mode = Mode,
size = Size,
mtime = MTime,
shares = Shares,
index = Index
},
active_permissions_type = ActivePermissionsType,
has_metadata = HasMetadata
}) ->
IsShareRoot = lists:member(ShareId, Shares),
#{
<<"hasMetadata">> => HasMetadata,
<<"guid">> => file_id:guid_to_share_guid(FileGuid, ShareId),
<<"name">> => FileName,
<<"index">> => file_listing:encode_index(Index),
<<"posixPermissions">> => list_to_binary(string:right(integer_to_list(Mode band 2#111, 8), 3, $0)),
<<"parentId">> => case IsShareRoot of
true -> null;
false -> file_id:guid_to_share_guid(ParentGuid, ShareId)
end,
<<"mtime">> => MTime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => case IsShareRoot of
true -> [ShareId];
false -> []
end,
<<"activePermissionsType">> => atom_to_binary(ActivePermissionsType, utf8)
}.
-spec file_attrs_to_json(undefined | od_share:id(), #file_attr{}) -> map().
file_attrs_to_json(undefined, #file_attr{
guid = Guid,
name = Name,
mode = Mode,
parent_guid = ParentGuid,
uid = Uid,
gid = Gid,
atime = Atime,
mtime = Mtime,
ctime = Ctime,
type = Type,
size = Size,
shares = Shares,
provider_id = ProviderId,
owner_id = OwnerId,
nlink = HardlinksCount,
index = Index,
xattrs = Xattrs
}) ->
{ok, ObjectId} = file_id:guid_to_objectid(Guid),
BaseJson = #{
<<"file_id">> => ObjectId,
<<"name">> => Name,
<<"mode">> => list_to_binary(string:right(integer_to_list(Mode, 8), 3, $0)),
<<"parent_id">> => case ParentGuid of
undefined ->
null;
_ ->
{ok, ParentObjectId} = file_id:guid_to_objectid(ParentGuid),
ParentObjectId
end,
<<"storage_user_id">> => Uid,
<<"storage_group_id">> => Gid,
<<"atime">> => Atime,
<<"mtime">> => Mtime,
<<"ctime">> => Ctime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => Shares,
<<"provider_id">> => ProviderId,
<<"owner_id">> => OwnerId,
<<"hardlinks_count">> => utils:undefined_to_null(HardlinksCount),
<<"index">> => file_listing:encode_index(Index)
},
maps:fold(fun(XattrName, XattrValue, Acc) ->
Acc#{<<"xattr.", XattrName/binary>> => utils:undefined_to_null(XattrValue)}
end, BaseJson, Xattrs);
file_attrs_to_json(ShareId, #file_attr{
guid = FileGuid,
parent_guid = ParentGuid,
name = Name,
type = Type,
mode = Mode,
size = Size,
mtime = Mtime,
atime = Atime,
ctime = Ctime,
shares = Shares,
index = Index,
xattrs = Xattrs
}) ->
{ok, ObjectId} = file_id:guid_to_objectid(file_id:guid_to_share_guid(FileGuid, ShareId)),
IsShareRoot = lists:member(ShareId, Shares),
BaseJson = #{
<<"file_id">> => ObjectId,
<<"name">> => Name,
<<"mode">> => list_to_binary(string:right(integer_to_list(Mode band 2#111, 8), 3, $0)),
<<"parent_id">> => case IsShareRoot of
true -> null;
false ->
{ok, ParentObjectId} = file_id:guid_to_objectid(file_id:guid_to_share_guid(ParentGuid, ShareId)),
ParentObjectId
end,
<<"atime">> => Atime,
<<"mtime">> => Mtime,
<<"ctime">> => Ctime,
<<"type">> => str_utils:to_binary(Type),
<<"size">> => utils:undefined_to_null(Size),
<<"shares">> => case IsShareRoot of
true -> [ShareId];
false -> []
end,
<<"index">> => file_listing:encode_index(Index)
},
maps:fold(fun(XattrName, XattrValue, Acc) ->
Acc#{<<"xattr.", XattrName/binary>> => utils:undefined_to_null(XattrValue)}
end, BaseJson, Xattrs).
Adds to data_spec ( ) errors for invalid file i d 's ( guid , path , cdmi_id ) for
-spec add_file_id_errors_for_operations_available_in_share_mode(
file_id:file_guid(),
undefined | od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_available_in_share_mode(FileGuid, ShareId, DataSpec) ->
add_file_id_errors_for_operations_available_in_share_mode(<<"id">>, FileGuid, ShareId, DataSpec).
-spec add_file_id_errors_for_operations_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
undefined | od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_available_in_share_mode(IdKey, FileGuid, ShareId, DataSpec) ->
InvalidFileIdErrors = get_invalid_file_id_errors(IdKey),
NonExistentSpaceGuid = file_id:pack_share_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID, ShareId),
SpaceId = file_id:guid_to_space_id(FileGuid),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceExpError = case ShareId of
undefined ->
?ERROR_FORBIDDEN;
_ ->
{error_fun, fun(#api_test_ctx{node = Node}) ->
ProvId = opw_test_rpc:get_provider_id(Node),
?ERROR_SPACE_NOT_SUPPORTED_BY(?NOT_SUPPORTED_SPACE_ID, ProvId)
end}
end,
NonExistentFileGuid = file_id:pack_share_guid(<<"InvalidUuid">>, SpaceId, ShareId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
BadFileIdErrors = InvalidFileIdErrors ++ [
{bad_id, NonExistentSpaceObjectId, {rest, NonExistentSpaceExpError}},
{bad_id, NonExistentSpaceGuid, {gs, NonExistentSpaceExpError}},
{bad_id, NonExistentFileObjectId, {rest, ?ERROR_POSIX(?ENOENT)}},
{bad_id, NonExistentFileGuid, {gs, ?ERROR_POSIX(?ENOENT)}}
],
add_bad_values_to_data_spec(BadFileIdErrors, DataSpec).
Adds to data_spec ( ) errors for invalid file i d 's ( guid , path , cdmi_id )
-spec add_file_id_errors_for_operations_not_available_in_share_mode(
file_id:file_guid(),
od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_not_available_in_share_mode(FileGuid, ShareId, DataSpec) ->
add_file_id_errors_for_operations_not_available_in_share_mode(<<"id">>, FileGuid, ShareId, DataSpec).
-spec add_file_id_errors_for_operations_not_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
od_share:id(),
undefined | onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_file_id_errors_for_operations_not_available_in_share_mode(IdKey, FileGuid, ShareId, DataSpec) ->
InvalidFileIdErrors = get_invalid_file_id_errors(IdKey),
NonExistentSpaceGuid = file_id:pack_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
NonExistentSpaceGuid, ShareId, [
Errors in normal mode - thrown by middleware auth checks
{bad_id, NonExistentSpaceObjectId, {rest, ?ERROR_FORBIDDEN}},
{bad_id, NonExistentSpaceGuid, {gs, ?ERROR_FORBIDDEN}}
]
),
SpaceId = file_id:guid_to_space_id(FileGuid),
NonExistentFileGuid = file_id:pack_guid(<<"InvalidUuid">>, SpaceId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
NonExistentFileErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
NonExistentFileGuid, ShareId, [
{bad_id, NonExistentFileObjectId, {rest, ?ERROR_POSIX(?ENOENT)}},
{bad_id, NonExistentFileGuid, {gs, ?ERROR_POSIX(?ENOENT)}}
]
),
ShareFileErrors = add_share_file_id_errors_for_operations_not_available_in_share_mode(
FileGuid, ShareId, []
),
BadFileIdErrors = lists:flatten([
InvalidFileIdErrors,
NonExistentSpaceErrors,
NonExistentFileErrors,
ShareFileErrors
]),
add_bad_values_to_data_spec(BadFileIdErrors, DataSpec).
-spec add_cdmi_id_errors_for_operations_not_available_in_share_mode(
file_id:file_guid(),
od_space:id(),
od_share:id(),
onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_cdmi_id_errors_for_operations_not_available_in_share_mode(FileGuid, SpaceId, ShareId, DataSpec) ->
add_cdmi_id_errors_for_operations_not_available_in_share_mode(<<"fileId">>, FileGuid, SpaceId, ShareId, DataSpec).
-spec add_cdmi_id_errors_for_operations_not_available_in_share_mode(
IdKey :: binary(),
file_id:file_guid(),
od_space:id(),
od_share:id(),
onenv_api_test_runner:data_spec()
) ->
onenv_api_test_runner:data_spec().
add_cdmi_id_errors_for_operations_not_available_in_share_mode(IdKey, FileGuid, SpaceId, ShareId, DataSpec) ->
{ok, DummyObjectId} = file_id:guid_to_objectid(<<"DummyGuid">>),
NonExistentSpaceGuid = file_id:pack_guid(<<"InvalidUuid">>, ?NOT_SUPPORTED_SPACE_ID),
{ok, NonExistentSpaceObjectId} = file_id:guid_to_objectid(NonExistentSpaceGuid),
NonExistentSpaceShareGuid = file_id:guid_to_share_guid(NonExistentSpaceGuid, ShareId),
{ok, NonExistentSpaceShareObjectId} = file_id:guid_to_objectid(NonExistentSpaceShareGuid),
NonExistentFileGuid = file_id:pack_guid(<<"InvalidUuid">>, SpaceId),
{ok, NonExistentFileObjectId} = file_id:guid_to_objectid(NonExistentFileGuid),
NonExistentFileShareGuid = file_id:guid_to_share_guid(NonExistentFileGuid, ShareId),
{ok, NonExistentFileShareObjectId} = file_id:guid_to_objectid(NonExistentFileShareGuid),
ShareFileGuid = file_id:guid_to_share_guid(FileGuid, ShareId),
{ok, ShareFileObjectId} = file_id:guid_to_objectid(ShareFileGuid),
BadFileIdValues = [
{IdKey, <<"InvalidObjectId">>, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)},
{IdKey, DummyObjectId, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)},
user has no privileges in non existent space and so he should receive ? ERROR_FORBIDDEN
{IdKey, NonExistentSpaceObjectId, ?ERROR_FORBIDDEN},
{IdKey, NonExistentSpaceShareObjectId, ?ERROR_FORBIDDEN},
{IdKey, NonExistentFileObjectId, ?ERROR_POSIX(?ENOENT)},
{IdKey, ShareFileObjectId, ?ERROR_POSIX(?EPERM)},
{IdKey, NonExistentFileShareObjectId, ?ERROR_POSIX(?EPERM)}
],
add_bad_values_to_data_spec(BadFileIdValues, DataSpec).
-spec replace_enoent_with_error_not_found_in_error_expectations(onenv_api_test_runner:data_spec()) ->
onenv_api_test_runner:data_spec().
replace_enoent_with_error_not_found_in_error_expectations(DataSpec = #data_spec{bad_values = BadValues}) ->
DataSpec#data_spec{bad_values = lists:map(fun
({Key, Value, ?ERROR_POSIX(?ENOENT)}) -> {Key, Value, ?ERROR_NOT_FOUND};
({Key, Value, {Interface, ?ERROR_POSIX(?ENOENT)}}) -> {Key, Value, {Interface, ?ERROR_NOT_FOUND}};
(Spec) -> Spec
end, BadValues)}.
maybe_substitute_bad_id(ValidId, undefined) ->
{ValidId, undefined};
maybe_substitute_bad_id(ValidId, Data) ->
case maps:take(bad_id, Data) of
{BadId, LeftoverData} -> {BadId, LeftoverData};
error -> {ValidId, Data}
end.
Internal functions
@private
add_bad_values_to_data_spec(BadValuesToAdd, undefined) ->
#data_spec{bad_values = BadValuesToAdd};
add_bad_values_to_data_spec(BadValuesToAdd, #data_spec{bad_values = BadValues} = DataSpec) ->
DataSpec#data_spec{bad_values = BadValuesToAdd ++ BadValues}.
@private
get_invalid_file_id_errors(IdKey) ->
InvalidGuid = <<"InvalidGuid">>,
{ok, InvalidObjectId} = file_id:guid_to_objectid(InvalidGuid),
[
{bad_id, <<"/NonExistentPath">>, {rest_with_file_path, ?ERROR_POSIX(?ENOENT)}},
{bad_id, <<"InvalidObjectId">>, {rest, ?ERROR_SPACE_NOT_SUPPORTED_BY(<<"InvalidObjectId">>, provider_id_placeholder)}},
{bad_id, InvalidObjectId, {rest, ?ERROR_SPACE_NOT_SUPPORTED_BY(InvalidObjectId, provider_id_placeholder)}},
{bad_id, InvalidGuid, {gs, ?ERROR_BAD_VALUE_IDENTIFIER(IdKey)}}
].
@private
add_share_file_id_errors_for_operations_not_available_in_share_mode(FileGuid, ShareId, Errors) ->
ShareFileGuid = file_id:guid_to_share_guid(FileGuid, ShareId),
{ok, ShareFileObjectId} = file_id:guid_to_objectid(ShareFileGuid),
[
{bad_id, ShareFileObjectId, {rest, ?ERROR_NOT_SUPPORTED}},
{bad_id, ShareFileGuid, {gs, ?ERROR_UNAUTHORIZED}}
| Errors
].
@private
translate_membership(?NONE_MEMBERSHIP) -> <<"none">>;
translate_membership(?DIRECT_MEMBERSHIP) -> <<"direct">>;
translate_membership(?ANCESTOR_MEMBERSHIP) -> <<"ancestor">>;
translate_membership(?DIRECT_AND_ANCESTOR_MEMBERSHIP) -> <<"directAndAncestor">>.
|
7cf570a1b1f14e551f8bfcfea2f634ee31067965eab83589774eaf5c25a6366b | jafingerhut/cljol | jdk8_and_earlier.clj | (ns cljol.jdk8-and-earlier
(:import (java.lang.reflect Field)))
(set! *warn-on-reflection* true)
(defn obj-field-value [obj ^Field fld _inaccessible-field-val-sentinel]
(. fld setAccessible true)
(.get fld obj))
| null | https://raw.githubusercontent.com/jafingerhut/cljol/2b0eb2b6ec3197434ede3adbd0a13d4b53c06dd0/src/clj/cljol/jdk8_and_earlier.clj | clojure | (ns cljol.jdk8-and-earlier
(:import (java.lang.reflect Field)))
(set! *warn-on-reflection* true)
(defn obj-field-value [obj ^Field fld _inaccessible-field-val-sentinel]
(. fld setAccessible true)
(.get fld obj))
|
|
19e8e59168055fb7065c3dc36d18f06c4ba22185fb43e5b700881a85502bd140 | bjpop/blip | Scope.hs | {-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RecordWildCards, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Blip.Compiler.Scope
Copyright : ( c ) 2012 , 2013 , 2014
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : ghc
--
-- A variable can be:
-- explicit global
-- implicit global
-- local
-- free
--
-- Global variables are either:
-- - defined (assigned) at the top level of a module
-- OR
-- - declared global in a nested scope
--
-- Local variables are (with respect to the current scope) either:
-- - Assigned in the current local scope AND not declared global or non-local.
-- OR
-- - Parameters to a function definition.
--
-- Free variables are (with respect to the current scope):
-- - Local to an enclosing scope AND either:
-- - Declared non-local in the current scope.
-- OR
-- - Read from but not assigned-to in the current local scope.
--
Cellvars are :
-- - Local to the current scope.
-- AND
-- - Free variables of a scope which is nested from the current scope.
--
Cellvars are used to implement closures such that modifications to the
-- variable binding itself are visible in the closure. They are implemented
-- as a pointer to a heap allocated cell, which itself points to a Python
-- object. The extra level of indirection allows the cell to be updated to
-- point to something else.
--
-----------------------------------------------------------------------------
module Blip.Compiler.Scope
(topScope, renderScope)
where
import Blip.Compiler.Types
( Identifier, VarSet, LocalScope (..)
, NestedScope (..), ScopeIdentifier, ParameterTypes (..) )
import Data.Set as Set
( empty, singleton, fromList, union, difference
, intersection, toList, size )
import Data.Map as Map (empty, insert, toList, union)
import Data.List (foldl', intersperse)
import Language.Python.Common.AST as AST
( Statement (..), StatementSpan, Ident (..), Expr (..), ExprSpan
, Argument (..), ArgumentSpan, RaiseExpr (..), RaiseExprSpan
, Slice (..), SliceSpan, ModuleSpan, Module (..), ParameterSpan
, YieldArg (..), YieldArgSpan
, Parameter (..), Op (..), Comprehension (..), ComprehensionSpan
, ComprehensionExpr (..), ComprehensionExprSpan
, DictKeyDatumList(..), DictKeyDatumListSpan
, CompIter (..), CompIterSpan, CompFor (..), CompForSpan, CompIf (..)
, CompIfSpan, Handler (..), HandlerSpan, ExceptClause (..), ExceptClauseSpan )
import Data.Monoid (Monoid (..))
import Control.Monad (mapAndUnzipM)
import Control.Monad.Reader (ReaderT, local, ask, runReaderT)
import Text.PrettyPrint.HughesPJ as Pretty
( Doc, ($$), nest, text, vcat, hsep, ($+$), (<+>), empty
, render, parens, comma, int, hcat )
import Blip.Pretty (Pretty (..))
import Blip.Compiler.State (emptyVarSet, emptyParameterTypes)
import Blip.Compiler.Utils ( identsFromParameters, spanToScopeIdentifier
, fromIdentString, maybeToList )
type ScopeM a = ReaderT VarSet IO a
instance Pretty ScopeIdentifier where
pretty (row1, col1, row2, col2) =
parens $ hcat $ intersperse comma $ map int [row1, col1, row2, col2]
instance Pretty NestedScope where
pretty (NestedScope scope) =
vcat $ map prettyLocalScope identsScopes
where
identsScopes = Map.toList scope
prettyLocalScope :: (ScopeIdentifier, (String, LocalScope)) -> Doc
prettyLocalScope (span, (identifier, defScope)) =
text identifier <+> pretty span <+> text "->" $$
nest 5 (pretty defScope)
instance Pretty LocalScope where
pretty (LocalScope {..}) =
text "params:" <+> (nest 5 $ pretty localScope_params) $$
prettyVarSet "locals:" localScope_locals $$
prettyVarSet "freevars:" localScope_freeVars $$
prettyVarSet "cellvars:" localScope_cellVars $$
prettyVarSet "globals:" localScope_explicitGlobals
instance Pretty ParameterTypes where
pretty (ParameterTypes {..}) =
prettyVarList "positional:" parameterTypes_pos $$
prettyVarList "varArgPos:" (maybeToList parameterTypes_varPos) $$
prettyVarList "varArgKeyword:" (maybeToList parameterTypes_varKeyword)
prettyVarList :: String -> [Identifier] -> Doc
prettyVarList label list
| length list == 0 = Pretty.empty
| otherwise =
text label <+> (hsep $ map text list)
prettyVarSet :: String -> VarSet -> Doc
prettyVarSet label varSet
| Set.size varSet == 0 = Pretty.empty
| otherwise =
text label <+>
(hsep $ map text $ Set.toList varSet)
renderScope :: NestedScope -> String
renderScope = render . prettyScope
prettyScope :: NestedScope -> Doc
prettyScope nestedScope =
text "nested scope:" $+$
(nest 5 $ pretty nestedScope)
-- class, function, lambda, or comprehension
data Definition
= DefStmt StatementSpan -- class, or def
| DefLambda ExprSpan -- lambda
| forall e . VarUsage e = > DefComprehension ( ComprehensionSpan e ) -- comprehension
| DefComprehension ComprehensionSpan -- comprehension
data Usage =
Usage
{ usage_assigned :: !VarSet -- variables assigned to (written to) in this scope
, usage_nonlocals :: !VarSet -- variables declared nonlocal in this scope
, usage_globals :: !VarSet -- variables declared global in this scope
, usage_referenced :: !VarSet -- variables referred to (read from) in this scope
, usage_definitions :: ![Definition] -- locally defined lambdas, classes, functions, comprehensions
}
emptyNestedScope :: NestedScope
emptyNestedScope = NestedScope Map.empty
-- returns the 'local' scope of the top-level of the module and
-- the nested scope of the module (anything not at the top level)
topScope :: ModuleSpan -> IO (LocalScope, NestedScope)
topScope (Module suite) = do
-- XXX should check that nothing was declared global at the top level
let Usage {..} = varUsage suite
moduleLocals =
LocalScope
{ localScope_params = emptyParameterTypes
, localScope_locals = usage_assigned
, localScope_freeVars = Set.empty
, localScope_cellVars = Set.empty
, localScope_explicitGlobals = Set.empty }
(nested, _freeVars) <- runReaderT (foldNestedScopes usage_definitions) emptyVarSet
return (moduleLocals, nested)
insertNestedScope :: ScopeIdentifier -> (String, LocalScope) -> NestedScope -> NestedScope
insertNestedScope key value (NestedScope scope) =
NestedScope $ Map.insert key value scope
joinNestedScopes :: NestedScope -> NestedScope -> NestedScope
joinNestedScopes (NestedScope scope1) (NestedScope scope2)
= NestedScope $ Map.union scope1 scope2
joinVarSets :: VarSet -> VarSet -> VarSet
joinVarSets = Set.union
foldNestedScopes :: [Definition] -> ScopeM (NestedScope, VarSet)
foldNestedScopes defs = do
(scopes, vars) <- mapAndUnzipM buildNestedScope defs
let joinedScopes = foldl' joinNestedScopes emptyNestedScope scopes
joinedVars = foldl' joinVarSets emptyVarSet vars
seq joinedScopes $ seq joinedVars $ return (joinedScopes, joinedVars)
buildNestedScope :: Definition -> ScopeM (NestedScope, VarSet)
buildNestedScope (DefStmt (Fun {..})) = do
let usage = varUsage fun_body `mappend`
varUsage fun_result_annotation
parameterTypes = parseParameterTypes fun_args
functionNestedScope usage parameterTypes
(spanToScopeIdentifier stmt_annot) $ fromIdentString fun_name
buildNestedScope (DefLambda (Lambda {..})) = do
let usage = varUsage lambda_body
parameterTypes = parseParameterTypes lambda_args
functionNestedScope usage parameterTypes
(spanToScopeIdentifier expr_annot) "<lambda>"
buildNestedScope (DefComprehension (Comprehension {..})) = do
-- we introduce a new local variable called $result when compiling
-- comprehensions, when they are desugared into functions
let resultVarSet = Set.singleton "$result"
usage = mempty { usage_assigned = resultVarSet
, usage_referenced = resultVarSet } `mappend`
varUsage comprehension_expr `mappend`
varUsage comprehension_for
are turned into functions whose parameters are the
-- variables which are free in the comprehension. This is equal
-- to the variables which are referenced but not assigned.
parameters = usage_referenced usage `Set.difference` usage_assigned usage
parameterTypes = emptyParameterTypes { parameterTypes_pos = Set.toList parameters }
functionNestedScope usage parameterTypes
(spanToScopeIdentifier comprehension_annot) "<comprehension>"
Classes can have freeVars , but they do n't have cellVars .
We have a problem where a class can have a free variable with the same
name as a " locally " defined variable .
def f ( ):
y = 3
class C ( ):
y = 5
def g ( ):
nonlocal y
print(y )
The g ( ) method of the C ( ) class prints the value 3 , because its free
variable y is bound in the body of f , not in the class definition .
The bases of a class are actually in the enclosing scope of the class
definition .
We record both instances of the variable , and are careful to disambiguate
when the variables are looked - up in the scope during compilation .
Classes can have freeVars, but they don't have cellVars.
We have a problem where a class can have a free variable with the same
name as a "locally" defined variable.
def f():
y = 3
class C():
y = 5
def g():
nonlocal y
print(y)
The g() method of the C() class prints the value 3, because its free
variable y is bound in the body of f, not in the class definition.
The bases of a class are actually in the enclosing scope of the class
definition.
We record both instances of the variable, and are careful to disambiguate
when the variables are looked-up in the scope during compilation.
-}
buildNestedScope (DefStmt (Class {..})) = do
let Usage {..} = varUsage class_body
locals = usage_assigned
(thisNestedScope, nestedFreeVars) <- foldNestedScopes usage_definitions
enclosingScope <- ask
let directFreeVars
= ((usage_referenced `Set.difference` locals) `Set.union`
usage_nonlocals) `Set.intersection` enclosingScope
freeVars = directFreeVars `Set.union` nestedFreeVars
let thisLocalScope =
LocalScope
{ localScope_params = emptyParameterTypes
, localScope_locals = locals
, localScope_freeVars = freeVars
, localScope_cellVars = Set.empty
, localScope_explicitGlobals = usage_globals }
let newScope =
insertNestedScope (spanToScopeIdentifier stmt_annot)
(fromIdentString class_name, thisLocalScope)
thisNestedScope
return (newScope, freeVars)
buildNestedScope _def =
error $ "buildNestedScope called on unexpected definition"
functionNestedScope :: Usage
-> ParameterTypes
-> ScopeIdentifier
-> String
-> ScopeM (NestedScope, VarSet)
functionNestedScope (Usage {..}) parameters scopeIdentifier name = do
let locals = (usage_assigned `Set.difference`
usage_globals `Set.difference`
usage_nonlocals) `Set.union`
(Set.fromList $ identsFromParameters parameters)
(thisNestedScope, nestedFreeVars) <-
local (Set.union locals) $ foldNestedScopes usage_definitions
enclosingScope <- ask
let -- get all the variables which are free in the top level of
-- this current nested scope
-- variables which are free in nested scopes and bound in the current scope
cellVars = locals `Set.intersection` nestedFreeVars
-- variables which are referenced in the current scope but not local,
-- or declared nonlocal and are bound in an enclosing scope
-- (hence free in the current scope).
directFreeVars
= ((usage_referenced `Set.difference` locals) `Set.union`
usage_nonlocals) `Set.intersection` enclosingScope
-- free variables from nested scopes which are not bound in the
-- current scope, and thus are free in the current scope
indirectFreeVars = nestedFreeVars `Set.difference` cellVars
freeVars = directFreeVars `Set.union` indirectFreeVars
thisLocalScope =
LocalScope
{ localScope_params = parameters
, localScope_locals = locals
, localScope_freeVars = freeVars
, localScope_cellVars = cellVars
, localScope_explicitGlobals = usage_globals }
let newScope =
insertNestedScope scopeIdentifier (name, thisLocalScope) thisNestedScope
return (newScope, freeVars)
-- separate the positional parameters from the positional varargs and the
keyword
parseParameterTypes :: [ParameterSpan] -> ParameterTypes
parseParameterTypes = parseAcc [] Nothing Nothing
where
parseAcc :: [Identifier] -> Maybe Identifier -> Maybe Identifier -> [ParameterSpan] -> ParameterTypes
parseAcc pos varPos varKeyword [] =
ParameterTypes { parameterTypes_pos = reverse pos
, parameterTypes_varPos = varPos
, parameterTypes_varKeyword = varKeyword }
parseAcc pos varPos varKeyword (param:rest) =
case param of
Param {..} -> parseAcc (fromIdentString param_name : pos) varPos varKeyword rest
VarArgsPos {..} -> parseAcc pos (Just $ fromIdentString param_name) varKeyword rest
VarArgsKeyword {..} -> parseAcc pos varPos (Just $ fromIdentString param_name) rest
_other -> parseAcc pos varPos varKeyword rest
instance Semigroup Usage where
x <> y
= Usage
{ usage_assigned = usage_assigned x `mappend` usage_assigned y
, usage_nonlocals = usage_nonlocals x `mappend` usage_nonlocals y
, usage_referenced = usage_referenced x `mappend` usage_referenced y
, usage_globals = usage_globals x `mappend` usage_globals y
, usage_definitions = usage_definitions x `mappend` usage_definitions y }
instance Monoid Usage where
mempty = Usage
{ usage_assigned = Set.empty
, usage_nonlocals = Set.empty
, usage_globals = Set.empty
, usage_referenced = Set.empty
, usage_definitions = [] }
instance Semigroup ParameterTypes where
(ParameterTypes pos1 varPos1 varKeyword1) <> (ParameterTypes pos2 varPos2 varKeyword2)
= ParameterTypes (pos1 `mappend` pos2)
(varPos1 `mappend` varPos2)
(varKeyword1 `mappend` varKeyword2)
instance Monoid ParameterTypes where
mempty =
ParameterTypes
{ parameterTypes_pos = []
, parameterTypes_varPos = Nothing
, parameterTypes_varKeyword = Nothing
}
-- determine the set of variables which are either assigned to or explicitly
-- declared global or nonlocal in the current scope.
class VarUsage t where
varUsage :: t -> Usage
instance VarUsage t => VarUsage [t] where
varUsage = mconcat . Prelude.map varUsage
instance (VarUsage t1, VarUsage t2) => VarUsage (t1, t2) where
varUsage (x, y) = varUsage x `mappend` varUsage y
instance VarUsage a => VarUsage (Maybe a) where
varUsage Nothing = mempty
varUsage (Just x) = varUsage x
instance VarUsage StatementSpan where
varUsage (While {..})
= varUsage while_cond `mappend`
varUsage while_body `mappend`
varUsage while_else
varUsage (For {..})
= varUsage (AssignTargets $ for_targets) `mappend`
varUsage for_generator `mappend`
varUsage for_body `mappend`
varUsage for_else
Any varUsage made inside a function body are not collected .
-- The function name _is_ collected, because it is assigned in the current scope,
-- likewise for the class name.
varUsage stmt@(Fun {..})
= mempty { usage_assigned = singleVarSet fun_name
, usage_definitions = [DefStmt stmt] }
-- the bases of the Class are referenced within the scope that defines the class
-- as opposed to being referenced in the body of the class
varUsage stmt@(Class {..})
= mempty { usage_assigned = singleVarSet class_name
, usage_definitions = [DefStmt stmt] } `mappend`
varUsage class_args
varUsage (Conditional {..})
= varUsage cond_guards `mappend` varUsage cond_else
varUsage (Assign {..})
= varUsage (AssignTargets assign_to) `mappend` varUsage assign_expr
varUsage (AugmentedAssign {..})
= varUsage [aug_assign_to] `mappend` varUsage aug_assign_expr
varUsage (Decorated {..})
= varUsage decorated_def
varUsage (Try {..})
= varUsage try_body `mappend` varUsage try_excepts `mappend`
varUsage try_else `mappend` varUsage try_finally
varUsage (With {..})
= varUsage with_context `mappend`
varUsage with_body
varUsage (Global {..})
= mempty { usage_globals = Set.fromList $ Prelude.map fromIdentString global_vars }
varUsage (NonLocal {..})
= mempty { usage_nonlocals = Set.fromList $ Prelude.map fromIdentString nonLocal_vars }
varUsage (StmtExpr {..}) = varUsage stmt_expr
varUsage (Assert {..}) = varUsage assert_exprs
varUsage (Return {..}) = varUsage return_expr
varUsage (Raise {..}) = varUsage raise_expr
varUsage (Delete {..}) = varUsage del_exprs
varUsage _other = mempty
instance VarUsage HandlerSpan where
varUsage (Handler {..}) = varUsage handler_clause `mappend` varUsage handler_suite
instance VarUsage ExceptClauseSpan where
varUsage (ExceptClause {..}) =
case except_clause of
Nothing -> mempty
Just (except, maybeAs) ->
case maybeAs of
Nothing -> varUsage except
Just asName -> varUsage except `mappend` (varUsage $ AssignTargets [asName])
instance VarUsage RaiseExprSpan where
varUsage (RaiseV3 maybeExpr) = varUsage maybeExpr
-- the parser should never generate the following, but we need
-- code to make non-exhaustive pattern warnings go away.
varUsage _other = error $ "varUsage on Python version 2 style raise statement"
instance VarUsage ExprSpan where
varUsage (Var {..}) =
mempty { usage_referenced = singleVarSet var_ident }
varUsage (Call {..}) =
varUsage call_fun `mappend` varUsage call_args
varUsage (Subscript {..}) =
varUsage subscriptee `mappend`
varUsage subscript_expr
varUsage (SlicedExpr {..}) =
varUsage slicee `mappend` varUsage slices
varUsage (CondExpr {..}) =
varUsage ce_true_branch `mappend`
varUsage ce_condition `mappend`
varUsage ce_false_branch
-- if it is a dot operator then the right argument must be a global name
-- but it is not defined in this module so we can ignore it
varUsage (BinaryOp {..}) =
varUsage left_op_arg `mappend` varUsage right_op_arg
| Dot { } < - operator = varUsage left_op_arg
-- | otherwise = varUsage left_op_arg `mappend` varUsage right_op_arg
varUsage (Dot { dot_expr = e }) = varUsage e
varUsage (UnaryOp {..}) = varUsage op_arg
varUsage expr@(Lambda {..}) = mempty { usage_definitions = [DefLambda expr] }
varUsage (Tuple {..}) = varUsage tuple_exprs
varUsage ( Yield { .. } ) = varUsage yield_expr
varUsage (Yield {..}) = varUsage yield_arg
varUsage (Generator {..}) =
mempty { usage_definitions = [DefComprehension gen_comprehension] }
varUsage (ListComp {..}) =
mempty { usage_definitions = [DefComprehension list_comprehension] }
varUsage (List {..}) = varUsage list_exprs
varUsage (Dictionary {..}) = varUsage dict_mappings
varUsage (DictComp {..}) =
mempty { usage_definitions = [DefComprehension dict_comprehension] }
varUsage (Set {..}) = varUsage set_exprs
varUsage (SetComp {..}) =
mempty { usage_definitions = [DefComprehension set_comprehension] }
varUsage (Starred {..}) = varUsage starred_expr
varUsage (Paren {..}) = varUsage paren_expr
varUsage _other = mempty
instance VarUsage YieldArgSpan where
varUsage (YieldFrom e _) = varUsage e
varUsage (YieldExpr e) = varUsage e
instance VarUsage ArgumentSpan where
varUsage (ArgExpr {..}) = varUsage arg_expr
varUsage (ArgVarArgsPos {..}) = varUsage arg_expr
varUsage (ArgVarArgsKeyword {..}) = varUsage arg_expr
varUsage (ArgKeyword {..}) = varUsage arg_expr
instance VarUsage SliceSpan where
varUsage (SliceProper {..}) =
varUsage slice_lower `mappend`
varUsage slice_upper `mappend`
varUsage slice_stride
varUsage (SliceExpr {..}) = varUsage slice_expr
varUsage (SliceEllipsis {}) = mempty
instance VarUsage ComprehensionSpan where
varUsage (Comprehension {..}) =
varUsage comprehension_expr `mappend`
varUsage comprehension_for
instance VarUsage ComprehensionExprSpan where
varUsage (ComprehensionExpr e) = varUsage e
varUsage (ComprehensionDict mapping) = varUsage mapping
instance VarUsage CompForSpan where
varUsage (CompFor {..}) =
varUsage (AssignTargets comp_for_exprs) `mappend`
varUsage comp_in_expr `mappend`
varUsage comp_for_iter
instance VarUsage CompIterSpan where
varUsage (IterFor {..}) = varUsage comp_iter_for
varUsage (IterIf {..}) = varUsage comp_iter_if
instance VarUsage CompIfSpan where
varUsage (CompIf {..}) =
varUsage comp_if `mappend`
varUsage comp_if_iter
instance VarUsage DictKeyDatumListSpan where
varUsage (DictMappingPair e1 e2) =
varUsage e1 `mappend` varUsage e2
varUsage (DictUnpacking e) = varUsage e
newtype AssignTargets = AssignTargets [ExprSpan]
-- Collect all the variables which are assigned to in a list of expressions (patterns).
-- XXX we should support starred assign targets.
instance VarUsage AssignTargets where
varUsage (AssignTargets exprs) = foldl' addUsage mempty exprs
where
addUsage :: Usage -> ExprSpan -> Usage
addUsage usage expr = targetUsage expr `mappend` usage
targetUsage :: ExprSpan -> Usage
targetUsage (Var {..}) = mempty { usage_assigned = singleVarSet var_ident }
targetUsage (List {..}) = varUsage $ AssignTargets list_exprs
targetUsage (Tuple {..}) = varUsage $ AssignTargets tuple_exprs
targetUsage (Paren {..}) = targetUsage paren_expr
-- all variables mentioned in a subscript, attribute lookup
-- and sliced expr are read from, not written to
targetUsage expr@(Subscript {..}) = varUsage expr
targetUsage expr@(BinaryOp{..}) = varUsage expr
targetUsage expr@(SlicedExpr{..}) = varUsage expr
targetUsage expr@(Dot {..}) = varUsage expr
targetUsage other = error $ "Unsupported assignTarget: " ++ show other
singleVarSet :: AST.Ident a -> VarSet
singleVarSet = Set.singleton . fromIdentString
| null | https://raw.githubusercontent.com/bjpop/blip/3d9105a44d1afb7bd007da3742fb19dc69372e10/blipcompiler/src/Blip/Compiler/Scope.hs | haskell | # LANGUAGE TypeSynonymInstances, FlexibleInstances, RecordWildCards, PatternGuards #
---------------------------------------------------------------------------
|
Module : Blip.Compiler.Scope
License : BSD-style
Maintainer :
Stability : experimental
A variable can be:
explicit global
implicit global
local
free
Global variables are either:
- defined (assigned) at the top level of a module
OR
- declared global in a nested scope
Local variables are (with respect to the current scope) either:
- Assigned in the current local scope AND not declared global or non-local.
OR
- Parameters to a function definition.
Free variables are (with respect to the current scope):
- Local to an enclosing scope AND either:
- Declared non-local in the current scope.
OR
- Read from but not assigned-to in the current local scope.
- Local to the current scope.
AND
- Free variables of a scope which is nested from the current scope.
variable binding itself are visible in the closure. They are implemented
as a pointer to a heap allocated cell, which itself points to a Python
object. The extra level of indirection allows the cell to be updated to
point to something else.
---------------------------------------------------------------------------
class, function, lambda, or comprehension
class, or def
lambda
comprehension
comprehension
variables assigned to (written to) in this scope
variables declared nonlocal in this scope
variables declared global in this scope
variables referred to (read from) in this scope
locally defined lambdas, classes, functions, comprehensions
returns the 'local' scope of the top-level of the module and
the nested scope of the module (anything not at the top level)
XXX should check that nothing was declared global at the top level
we introduce a new local variable called $result when compiling
comprehensions, when they are desugared into functions
variables which are free in the comprehension. This is equal
to the variables which are referenced but not assigned.
get all the variables which are free in the top level of
this current nested scope
variables which are free in nested scopes and bound in the current scope
variables which are referenced in the current scope but not local,
or declared nonlocal and are bound in an enclosing scope
(hence free in the current scope).
free variables from nested scopes which are not bound in the
current scope, and thus are free in the current scope
separate the positional parameters from the positional varargs and the
determine the set of variables which are either assigned to or explicitly
declared global or nonlocal in the current scope.
The function name _is_ collected, because it is assigned in the current scope,
likewise for the class name.
the bases of the Class are referenced within the scope that defines the class
as opposed to being referenced in the body of the class
the parser should never generate the following, but we need
code to make non-exhaustive pattern warnings go away.
if it is a dot operator then the right argument must be a global name
but it is not defined in this module so we can ignore it
| otherwise = varUsage left_op_arg `mappend` varUsage right_op_arg
Collect all the variables which are assigned to in a list of expressions (patterns).
XXX we should support starred assign targets.
all variables mentioned in a subscript, attribute lookup
and sliced expr are read from, not written to |
Copyright : ( c ) 2012 , 2013 , 2014
Portability : ghc
Cellvars are :
Cellvars are used to implement closures such that modifications to the
module Blip.Compiler.Scope
(topScope, renderScope)
where
import Blip.Compiler.Types
( Identifier, VarSet, LocalScope (..)
, NestedScope (..), ScopeIdentifier, ParameterTypes (..) )
import Data.Set as Set
( empty, singleton, fromList, union, difference
, intersection, toList, size )
import Data.Map as Map (empty, insert, toList, union)
import Data.List (foldl', intersperse)
import Language.Python.Common.AST as AST
( Statement (..), StatementSpan, Ident (..), Expr (..), ExprSpan
, Argument (..), ArgumentSpan, RaiseExpr (..), RaiseExprSpan
, Slice (..), SliceSpan, ModuleSpan, Module (..), ParameterSpan
, YieldArg (..), YieldArgSpan
, Parameter (..), Op (..), Comprehension (..), ComprehensionSpan
, ComprehensionExpr (..), ComprehensionExprSpan
, DictKeyDatumList(..), DictKeyDatumListSpan
, CompIter (..), CompIterSpan, CompFor (..), CompForSpan, CompIf (..)
, CompIfSpan, Handler (..), HandlerSpan, ExceptClause (..), ExceptClauseSpan )
import Data.Monoid (Monoid (..))
import Control.Monad (mapAndUnzipM)
import Control.Monad.Reader (ReaderT, local, ask, runReaderT)
import Text.PrettyPrint.HughesPJ as Pretty
( Doc, ($$), nest, text, vcat, hsep, ($+$), (<+>), empty
, render, parens, comma, int, hcat )
import Blip.Pretty (Pretty (..))
import Blip.Compiler.State (emptyVarSet, emptyParameterTypes)
import Blip.Compiler.Utils ( identsFromParameters, spanToScopeIdentifier
, fromIdentString, maybeToList )
type ScopeM a = ReaderT VarSet IO a
instance Pretty ScopeIdentifier where
pretty (row1, col1, row2, col2) =
parens $ hcat $ intersperse comma $ map int [row1, col1, row2, col2]
instance Pretty NestedScope where
pretty (NestedScope scope) =
vcat $ map prettyLocalScope identsScopes
where
identsScopes = Map.toList scope
prettyLocalScope :: (ScopeIdentifier, (String, LocalScope)) -> Doc
prettyLocalScope (span, (identifier, defScope)) =
text identifier <+> pretty span <+> text "->" $$
nest 5 (pretty defScope)
instance Pretty LocalScope where
pretty (LocalScope {..}) =
text "params:" <+> (nest 5 $ pretty localScope_params) $$
prettyVarSet "locals:" localScope_locals $$
prettyVarSet "freevars:" localScope_freeVars $$
prettyVarSet "cellvars:" localScope_cellVars $$
prettyVarSet "globals:" localScope_explicitGlobals
instance Pretty ParameterTypes where
pretty (ParameterTypes {..}) =
prettyVarList "positional:" parameterTypes_pos $$
prettyVarList "varArgPos:" (maybeToList parameterTypes_varPos) $$
prettyVarList "varArgKeyword:" (maybeToList parameterTypes_varKeyword)
prettyVarList :: String -> [Identifier] -> Doc
prettyVarList label list
| length list == 0 = Pretty.empty
| otherwise =
text label <+> (hsep $ map text list)
prettyVarSet :: String -> VarSet -> Doc
prettyVarSet label varSet
| Set.size varSet == 0 = Pretty.empty
| otherwise =
text label <+>
(hsep $ map text $ Set.toList varSet)
renderScope :: NestedScope -> String
renderScope = render . prettyScope
prettyScope :: NestedScope -> Doc
prettyScope nestedScope =
text "nested scope:" $+$
(nest 5 $ pretty nestedScope)
data Definition
data Usage =
Usage
}
emptyNestedScope :: NestedScope
emptyNestedScope = NestedScope Map.empty
topScope :: ModuleSpan -> IO (LocalScope, NestedScope)
topScope (Module suite) = do
let Usage {..} = varUsage suite
moduleLocals =
LocalScope
{ localScope_params = emptyParameterTypes
, localScope_locals = usage_assigned
, localScope_freeVars = Set.empty
, localScope_cellVars = Set.empty
, localScope_explicitGlobals = Set.empty }
(nested, _freeVars) <- runReaderT (foldNestedScopes usage_definitions) emptyVarSet
return (moduleLocals, nested)
insertNestedScope :: ScopeIdentifier -> (String, LocalScope) -> NestedScope -> NestedScope
insertNestedScope key value (NestedScope scope) =
NestedScope $ Map.insert key value scope
joinNestedScopes :: NestedScope -> NestedScope -> NestedScope
joinNestedScopes (NestedScope scope1) (NestedScope scope2)
= NestedScope $ Map.union scope1 scope2
joinVarSets :: VarSet -> VarSet -> VarSet
joinVarSets = Set.union
foldNestedScopes :: [Definition] -> ScopeM (NestedScope, VarSet)
foldNestedScopes defs = do
(scopes, vars) <- mapAndUnzipM buildNestedScope defs
let joinedScopes = foldl' joinNestedScopes emptyNestedScope scopes
joinedVars = foldl' joinVarSets emptyVarSet vars
seq joinedScopes $ seq joinedVars $ return (joinedScopes, joinedVars)
buildNestedScope :: Definition -> ScopeM (NestedScope, VarSet)
buildNestedScope (DefStmt (Fun {..})) = do
let usage = varUsage fun_body `mappend`
varUsage fun_result_annotation
parameterTypes = parseParameterTypes fun_args
functionNestedScope usage parameterTypes
(spanToScopeIdentifier stmt_annot) $ fromIdentString fun_name
buildNestedScope (DefLambda (Lambda {..})) = do
let usage = varUsage lambda_body
parameterTypes = parseParameterTypes lambda_args
functionNestedScope usage parameterTypes
(spanToScopeIdentifier expr_annot) "<lambda>"
buildNestedScope (DefComprehension (Comprehension {..})) = do
let resultVarSet = Set.singleton "$result"
usage = mempty { usage_assigned = resultVarSet
, usage_referenced = resultVarSet } `mappend`
varUsage comprehension_expr `mappend`
varUsage comprehension_for
are turned into functions whose parameters are the
parameters = usage_referenced usage `Set.difference` usage_assigned usage
parameterTypes = emptyParameterTypes { parameterTypes_pos = Set.toList parameters }
functionNestedScope usage parameterTypes
(spanToScopeIdentifier comprehension_annot) "<comprehension>"
Classes can have freeVars , but they do n't have cellVars .
We have a problem where a class can have a free variable with the same
name as a " locally " defined variable .
def f ( ):
y = 3
class C ( ):
y = 5
def g ( ):
nonlocal y
print(y )
The g ( ) method of the C ( ) class prints the value 3 , because its free
variable y is bound in the body of f , not in the class definition .
The bases of a class are actually in the enclosing scope of the class
definition .
We record both instances of the variable , and are careful to disambiguate
when the variables are looked - up in the scope during compilation .
Classes can have freeVars, but they don't have cellVars.
We have a problem where a class can have a free variable with the same
name as a "locally" defined variable.
def f():
y = 3
class C():
y = 5
def g():
nonlocal y
print(y)
The g() method of the C() class prints the value 3, because its free
variable y is bound in the body of f, not in the class definition.
The bases of a class are actually in the enclosing scope of the class
definition.
We record both instances of the variable, and are careful to disambiguate
when the variables are looked-up in the scope during compilation.
-}
buildNestedScope (DefStmt (Class {..})) = do
let Usage {..} = varUsage class_body
locals = usage_assigned
(thisNestedScope, nestedFreeVars) <- foldNestedScopes usage_definitions
enclosingScope <- ask
let directFreeVars
= ((usage_referenced `Set.difference` locals) `Set.union`
usage_nonlocals) `Set.intersection` enclosingScope
freeVars = directFreeVars `Set.union` nestedFreeVars
let thisLocalScope =
LocalScope
{ localScope_params = emptyParameterTypes
, localScope_locals = locals
, localScope_freeVars = freeVars
, localScope_cellVars = Set.empty
, localScope_explicitGlobals = usage_globals }
let newScope =
insertNestedScope (spanToScopeIdentifier stmt_annot)
(fromIdentString class_name, thisLocalScope)
thisNestedScope
return (newScope, freeVars)
buildNestedScope _def =
error $ "buildNestedScope called on unexpected definition"
functionNestedScope :: Usage
-> ParameterTypes
-> ScopeIdentifier
-> String
-> ScopeM (NestedScope, VarSet)
functionNestedScope (Usage {..}) parameters scopeIdentifier name = do
let locals = (usage_assigned `Set.difference`
usage_globals `Set.difference`
usage_nonlocals) `Set.union`
(Set.fromList $ identsFromParameters parameters)
(thisNestedScope, nestedFreeVars) <-
local (Set.union locals) $ foldNestedScopes usage_definitions
enclosingScope <- ask
cellVars = locals `Set.intersection` nestedFreeVars
directFreeVars
= ((usage_referenced `Set.difference` locals) `Set.union`
usage_nonlocals) `Set.intersection` enclosingScope
indirectFreeVars = nestedFreeVars `Set.difference` cellVars
freeVars = directFreeVars `Set.union` indirectFreeVars
thisLocalScope =
LocalScope
{ localScope_params = parameters
, localScope_locals = locals
, localScope_freeVars = freeVars
, localScope_cellVars = cellVars
, localScope_explicitGlobals = usage_globals }
let newScope =
insertNestedScope scopeIdentifier (name, thisLocalScope) thisNestedScope
return (newScope, freeVars)
keyword
parseParameterTypes :: [ParameterSpan] -> ParameterTypes
parseParameterTypes = parseAcc [] Nothing Nothing
where
parseAcc :: [Identifier] -> Maybe Identifier -> Maybe Identifier -> [ParameterSpan] -> ParameterTypes
parseAcc pos varPos varKeyword [] =
ParameterTypes { parameterTypes_pos = reverse pos
, parameterTypes_varPos = varPos
, parameterTypes_varKeyword = varKeyword }
parseAcc pos varPos varKeyword (param:rest) =
case param of
Param {..} -> parseAcc (fromIdentString param_name : pos) varPos varKeyword rest
VarArgsPos {..} -> parseAcc pos (Just $ fromIdentString param_name) varKeyword rest
VarArgsKeyword {..} -> parseAcc pos varPos (Just $ fromIdentString param_name) rest
_other -> parseAcc pos varPos varKeyword rest
instance Semigroup Usage where
x <> y
= Usage
{ usage_assigned = usage_assigned x `mappend` usage_assigned y
, usage_nonlocals = usage_nonlocals x `mappend` usage_nonlocals y
, usage_referenced = usage_referenced x `mappend` usage_referenced y
, usage_globals = usage_globals x `mappend` usage_globals y
, usage_definitions = usage_definitions x `mappend` usage_definitions y }
instance Monoid Usage where
mempty = Usage
{ usage_assigned = Set.empty
, usage_nonlocals = Set.empty
, usage_globals = Set.empty
, usage_referenced = Set.empty
, usage_definitions = [] }
instance Semigroup ParameterTypes where
(ParameterTypes pos1 varPos1 varKeyword1) <> (ParameterTypes pos2 varPos2 varKeyword2)
= ParameterTypes (pos1 `mappend` pos2)
(varPos1 `mappend` varPos2)
(varKeyword1 `mappend` varKeyword2)
instance Monoid ParameterTypes where
mempty =
ParameterTypes
{ parameterTypes_pos = []
, parameterTypes_varPos = Nothing
, parameterTypes_varKeyword = Nothing
}
class VarUsage t where
varUsage :: t -> Usage
instance VarUsage t => VarUsage [t] where
varUsage = mconcat . Prelude.map varUsage
instance (VarUsage t1, VarUsage t2) => VarUsage (t1, t2) where
varUsage (x, y) = varUsage x `mappend` varUsage y
instance VarUsage a => VarUsage (Maybe a) where
varUsage Nothing = mempty
varUsage (Just x) = varUsage x
instance VarUsage StatementSpan where
varUsage (While {..})
= varUsage while_cond `mappend`
varUsage while_body `mappend`
varUsage while_else
varUsage (For {..})
= varUsage (AssignTargets $ for_targets) `mappend`
varUsage for_generator `mappend`
varUsage for_body `mappend`
varUsage for_else
Any varUsage made inside a function body are not collected .
varUsage stmt@(Fun {..})
= mempty { usage_assigned = singleVarSet fun_name
, usage_definitions = [DefStmt stmt] }
varUsage stmt@(Class {..})
= mempty { usage_assigned = singleVarSet class_name
, usage_definitions = [DefStmt stmt] } `mappend`
varUsage class_args
varUsage (Conditional {..})
= varUsage cond_guards `mappend` varUsage cond_else
varUsage (Assign {..})
= varUsage (AssignTargets assign_to) `mappend` varUsage assign_expr
varUsage (AugmentedAssign {..})
= varUsage [aug_assign_to] `mappend` varUsage aug_assign_expr
varUsage (Decorated {..})
= varUsage decorated_def
varUsage (Try {..})
= varUsage try_body `mappend` varUsage try_excepts `mappend`
varUsage try_else `mappend` varUsage try_finally
varUsage (With {..})
= varUsage with_context `mappend`
varUsage with_body
varUsage (Global {..})
= mempty { usage_globals = Set.fromList $ Prelude.map fromIdentString global_vars }
varUsage (NonLocal {..})
= mempty { usage_nonlocals = Set.fromList $ Prelude.map fromIdentString nonLocal_vars }
varUsage (StmtExpr {..}) = varUsage stmt_expr
varUsage (Assert {..}) = varUsage assert_exprs
varUsage (Return {..}) = varUsage return_expr
varUsage (Raise {..}) = varUsage raise_expr
varUsage (Delete {..}) = varUsage del_exprs
varUsage _other = mempty
instance VarUsage HandlerSpan where
varUsage (Handler {..}) = varUsage handler_clause `mappend` varUsage handler_suite
instance VarUsage ExceptClauseSpan where
varUsage (ExceptClause {..}) =
case except_clause of
Nothing -> mempty
Just (except, maybeAs) ->
case maybeAs of
Nothing -> varUsage except
Just asName -> varUsage except `mappend` (varUsage $ AssignTargets [asName])
instance VarUsage RaiseExprSpan where
varUsage (RaiseV3 maybeExpr) = varUsage maybeExpr
varUsage _other = error $ "varUsage on Python version 2 style raise statement"
instance VarUsage ExprSpan where
varUsage (Var {..}) =
mempty { usage_referenced = singleVarSet var_ident }
varUsage (Call {..}) =
varUsage call_fun `mappend` varUsage call_args
varUsage (Subscript {..}) =
varUsage subscriptee `mappend`
varUsage subscript_expr
varUsage (SlicedExpr {..}) =
varUsage slicee `mappend` varUsage slices
varUsage (CondExpr {..}) =
varUsage ce_true_branch `mappend`
varUsage ce_condition `mappend`
varUsage ce_false_branch
varUsage (BinaryOp {..}) =
varUsage left_op_arg `mappend` varUsage right_op_arg
| Dot { } < - operator = varUsage left_op_arg
varUsage (Dot { dot_expr = e }) = varUsage e
varUsage (UnaryOp {..}) = varUsage op_arg
varUsage expr@(Lambda {..}) = mempty { usage_definitions = [DefLambda expr] }
varUsage (Tuple {..}) = varUsage tuple_exprs
varUsage ( Yield { .. } ) = varUsage yield_expr
varUsage (Yield {..}) = varUsage yield_arg
varUsage (Generator {..}) =
mempty { usage_definitions = [DefComprehension gen_comprehension] }
varUsage (ListComp {..}) =
mempty { usage_definitions = [DefComprehension list_comprehension] }
varUsage (List {..}) = varUsage list_exprs
varUsage (Dictionary {..}) = varUsage dict_mappings
varUsage (DictComp {..}) =
mempty { usage_definitions = [DefComprehension dict_comprehension] }
varUsage (Set {..}) = varUsage set_exprs
varUsage (SetComp {..}) =
mempty { usage_definitions = [DefComprehension set_comprehension] }
varUsage (Starred {..}) = varUsage starred_expr
varUsage (Paren {..}) = varUsage paren_expr
varUsage _other = mempty
instance VarUsage YieldArgSpan where
varUsage (YieldFrom e _) = varUsage e
varUsage (YieldExpr e) = varUsage e
instance VarUsage ArgumentSpan where
varUsage (ArgExpr {..}) = varUsage arg_expr
varUsage (ArgVarArgsPos {..}) = varUsage arg_expr
varUsage (ArgVarArgsKeyword {..}) = varUsage arg_expr
varUsage (ArgKeyword {..}) = varUsage arg_expr
instance VarUsage SliceSpan where
varUsage (SliceProper {..}) =
varUsage slice_lower `mappend`
varUsage slice_upper `mappend`
varUsage slice_stride
varUsage (SliceExpr {..}) = varUsage slice_expr
varUsage (SliceEllipsis {}) = mempty
instance VarUsage ComprehensionSpan where
varUsage (Comprehension {..}) =
varUsage comprehension_expr `mappend`
varUsage comprehension_for
instance VarUsage ComprehensionExprSpan where
varUsage (ComprehensionExpr e) = varUsage e
varUsage (ComprehensionDict mapping) = varUsage mapping
instance VarUsage CompForSpan where
varUsage (CompFor {..}) =
varUsage (AssignTargets comp_for_exprs) `mappend`
varUsage comp_in_expr `mappend`
varUsage comp_for_iter
instance VarUsage CompIterSpan where
varUsage (IterFor {..}) = varUsage comp_iter_for
varUsage (IterIf {..}) = varUsage comp_iter_if
instance VarUsage CompIfSpan where
varUsage (CompIf {..}) =
varUsage comp_if `mappend`
varUsage comp_if_iter
instance VarUsage DictKeyDatumListSpan where
varUsage (DictMappingPair e1 e2) =
varUsage e1 `mappend` varUsage e2
varUsage (DictUnpacking e) = varUsage e
newtype AssignTargets = AssignTargets [ExprSpan]
instance VarUsage AssignTargets where
varUsage (AssignTargets exprs) = foldl' addUsage mempty exprs
where
addUsage :: Usage -> ExprSpan -> Usage
addUsage usage expr = targetUsage expr `mappend` usage
targetUsage :: ExprSpan -> Usage
targetUsage (Var {..}) = mempty { usage_assigned = singleVarSet var_ident }
targetUsage (List {..}) = varUsage $ AssignTargets list_exprs
targetUsage (Tuple {..}) = varUsage $ AssignTargets tuple_exprs
targetUsage (Paren {..}) = targetUsage paren_expr
targetUsage expr@(Subscript {..}) = varUsage expr
targetUsage expr@(BinaryOp{..}) = varUsage expr
targetUsage expr@(SlicedExpr{..}) = varUsage expr
targetUsage expr@(Dot {..}) = varUsage expr
targetUsage other = error $ "Unsupported assignTarget: " ++ show other
singleVarSet :: AST.Ident a -> VarSet
singleVarSet = Set.singleton . fromIdentString
|
ce89dd4f175b49eafa5224a91993dfe927e5ef71808981cef2160521d9f54897 | apibot-org/apibot | projects.clj | (ns apibot.routes.projects
(:require
[apibot.db.projects :as db.projects]
[apibot.schemas :refer [Project]]
[cats.monad.exception :as exception]
[compojure.api.sweet :refer [defapi context GET PUT DELETE]]
[ring.util.http-response :as response :refer [ok]]
[schema.core :as s]))
(defapi api-projects
{:swagger {:ui "/swagger/projects"
:spec "/swagger/projects.json"
:data {:info {:version "1.0.0"
:title "Projects API"
:description "API for managing projects"}}}}
(context "/api/1/projects" []
:tags ["Projects"]
(GET "/" []
:return [Project]
:query-params [user-id :- s/Str]
:summary "Returns all the projects that belong to the current user."
(ok (db.projects/find-by-user-id user-id)))
(DELETE "/:project-id" []
:path-params [project-id :- s/Str]
:query-params [user-id :- s/Str]
:summary "Returns all the graphs that belong to the current user."
(ok {:removed (db.projects/remove-by-id user-id project-id)}))
(PUT "/" []
:return Project
:query-params [user-id :- s/Str]
:body-params [project :- Project]
:summary "Upserts a project"
(cond
(= "default" (:id project))
(response/bad-request
{:title "Default project not allowed"
:message "The default project cannot be stored."})
:else
(-> (db.projects/save (assoc project :user-id user-id))
(exception/extract)
(ok))))))
| null | https://raw.githubusercontent.com/apibot-org/apibot/26c77c688980549a8deceeeb39f01108be016435/src/clj/apibot/routes/projects.clj | clojure | (ns apibot.routes.projects
(:require
[apibot.db.projects :as db.projects]
[apibot.schemas :refer [Project]]
[cats.monad.exception :as exception]
[compojure.api.sweet :refer [defapi context GET PUT DELETE]]
[ring.util.http-response :as response :refer [ok]]
[schema.core :as s]))
(defapi api-projects
{:swagger {:ui "/swagger/projects"
:spec "/swagger/projects.json"
:data {:info {:version "1.0.0"
:title "Projects API"
:description "API for managing projects"}}}}
(context "/api/1/projects" []
:tags ["Projects"]
(GET "/" []
:return [Project]
:query-params [user-id :- s/Str]
:summary "Returns all the projects that belong to the current user."
(ok (db.projects/find-by-user-id user-id)))
(DELETE "/:project-id" []
:path-params [project-id :- s/Str]
:query-params [user-id :- s/Str]
:summary "Returns all the graphs that belong to the current user."
(ok {:removed (db.projects/remove-by-id user-id project-id)}))
(PUT "/" []
:return Project
:query-params [user-id :- s/Str]
:body-params [project :- Project]
:summary "Upserts a project"
(cond
(= "default" (:id project))
(response/bad-request
{:title "Default project not allowed"
:message "The default project cannot be stored."})
:else
(-> (db.projects/save (assoc project :user-id user-id))
(exception/extract)
(ok))))))
|
|
ab3976220fa9720a3156ca8baa3ee1fb49227cdec6365ed51596493566e2b905 | igorhvr/bedlam | test.scm |
(with-output-to-string
(lambda ()
(for-each (lambda (n)
(display n))
ls))))
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/sisc/sisc-cvs/test.scm | scheme |
(with-output-to-string
(lambda ()
(for-each (lambda (n)
(display n))
ls))))
|
|
73f2f9b1b107099a0417362b4d9edd48901c39d8739b238af1bd460cab396c6c | inconvergent/cl-grph | run.lisp |
(defpackage #:grph-tests (:use #:cl #:prove) (:export #:run-tests))
(setf prove:*enable-colors* nil)
(in-package #:grph-tests)
#+:grph-parallel (setf lparallel:*kernel* (lparallel:make-kernel 4))
; (defun compile-or-fail (f)
( format t " ~%compiling : ~a~% " ( grph::mkstr f ) )
; (with-open-stream (*standard-output* (make-broadcast-stream))
; (compile-file f)))
(defun -run-tests (files)
(loop with fails = 0
for f in files
do ;(compile-or-fail f)
(format t "~&~%starting tests in: ~a~%" (grph::mkstr f))
(unless (prove:run f :reporter :fiveam)
(incf fails))
(format t "~&done: ~a~%" (grph::mkstr f))
finally (return (unless (< fails 1)
(sb-ext:quit :unix-status 7)))))
(defun run-tests ()
(-run-tests '(#P"test/grph.lisp" #P"test/qry.lisp"
#P"test/qry-2.lisp" #P"test/qry-3.lisp"
#P"test/xgrph.lisp"
#P"test/grph-walk.lisp"
)))
(defun p/run-tests ()
(-run-tests '(#P"test/grph.lisp" #P"test/qry.lisp"
#P"test/qry-2.lisp" #P"test/qry-3.lisp"
#P"test/xgrph.lisp")))
(defun lsort* (l &aux (l (copy-list l)))
(declare (optimize speed) (list l))
"radix sort list of lists (of numbers or symbols).
inefficient. use for tests only."
(loop for i of-type fixnum from (1- (length (the list (first l)))) downto 0
do (labels ((srt (a b)
(funcall (the function (etypecase a (symbol #'string<)
(number #'<)))
a b))
(p (a b) (srt (nth i a) (nth i b))))
(setf l (stable-sort (the list l) #'p))))
l)
(defun ls (l) (lsort* l))
(defun mapls (&rest rest) (mapcar #'lsort* rest))
(defun make-edge-set
(&aux (g (grph:grph))
(f `((0 :A 1) (0 :C 1) (1 :A 3) (1 :A 2) (1 :A 0) (1 :C 0)
(2 :A 1) (3 :C 7) (3 :B 5) (3 :C 5) (3 :B 4) (3 :A 1)
(4 :B 3) (4 :B 5) (4 :E 5) (5 :B 3) (5 :C 3) (5 :B 4)
(5 :E 4) (7 :C 3) (99 :X 77))))
(grph:ingest-edges g f))
(defun mk-grph-main ()
(let ((g (grph:grph))
(bprop '((:b "90"))))
(grph:add! g 0 1)
(grph:add! g 2 3)
(grph:add! g 3 4 '(:a))
(grph:add! g 4 3 '((:a "43")))
(grph:add! g 5 6)
(grph:add! g 6 0 '((:a "60")))
(grph:add! g 7 8 '(:c))
(grph:add! g 8 9 '(:b))
(grph:add! g 9 0 bprop)
(grph:add! g 7 8 '(:b))
(grph:add! g 0 3)
g))
(defun mk-grph-match ()
(let ((g (grph:grph)))
(grph:add! g 0 1 '(:a))
(grph:add! g 0 3 '(:a))
(grph:add! g 2 3 '((:a "bbbbb")))
(grph:add! g 2 3 '((:b "ccccc")))
(grph:add! g 3 4 '(:a))
(grph:add! g 4 3 '(:a))
(grph:add! g 7 8 '((:a "7778888")))
(grph:add! g 5 6)
(grph:add! g 6 0 '(:b))
(grph:add! g 33 0 '(:b))
(grph:add! g 8 9 '(:b))
(grph:add! g 9 0 '(:a))
(grph:add! g 0 1 '((:a "aaa")))
g))
(defun make-rules-edge-set-1 ()
(let ((g (grph:grph))
(f `((0 :a 1) (0 :a 2) (1 :a 3)
(3 :a 2) (3 :a 4) (3 :a 0))))
(grph:ingest-edges g f)))
(defun make-rules-edge-set-2 ()
(let ((g (grph:grph))
(f `((0 :b 1) (1 :b 3) (3 :b 0) (1 :e 4) (4 :e 6))))
(grph:ingest-edges g f)))
(defun make-rules-edge-set-3 ()
(let ((g (grph:grph))
(f `((0 :a 3) (3 :a 2) (2 :a 0)
(0 :b 1) (1 :b 3) (3 :b 0)
(3 :c 5) (5 :c 2)
(1 :e 4) (4 :e 6))))
(grph:ingest-edges g f)))
| null | https://raw.githubusercontent.com/inconvergent/cl-grph/d46e921b32bcff0545ab2932238bb7c81594bd89/test/run.lisp | lisp | (defun compile-or-fail (f)
(with-open-stream (*standard-output* (make-broadcast-stream))
(compile-file f)))
(compile-or-fail f) |
(defpackage #:grph-tests (:use #:cl #:prove) (:export #:run-tests))
(setf prove:*enable-colors* nil)
(in-package #:grph-tests)
#+:grph-parallel (setf lparallel:*kernel* (lparallel:make-kernel 4))
( format t " ~%compiling : ~a~% " ( grph::mkstr f ) )
(defun -run-tests (files)
(loop with fails = 0
for f in files
(format t "~&~%starting tests in: ~a~%" (grph::mkstr f))
(unless (prove:run f :reporter :fiveam)
(incf fails))
(format t "~&done: ~a~%" (grph::mkstr f))
finally (return (unless (< fails 1)
(sb-ext:quit :unix-status 7)))))
(defun run-tests ()
(-run-tests '(#P"test/grph.lisp" #P"test/qry.lisp"
#P"test/qry-2.lisp" #P"test/qry-3.lisp"
#P"test/xgrph.lisp"
#P"test/grph-walk.lisp"
)))
(defun p/run-tests ()
(-run-tests '(#P"test/grph.lisp" #P"test/qry.lisp"
#P"test/qry-2.lisp" #P"test/qry-3.lisp"
#P"test/xgrph.lisp")))
(defun lsort* (l &aux (l (copy-list l)))
(declare (optimize speed) (list l))
"radix sort list of lists (of numbers or symbols).
inefficient. use for tests only."
(loop for i of-type fixnum from (1- (length (the list (first l)))) downto 0
do (labels ((srt (a b)
(funcall (the function (etypecase a (symbol #'string<)
(number #'<)))
a b))
(p (a b) (srt (nth i a) (nth i b))))
(setf l (stable-sort (the list l) #'p))))
l)
(defun ls (l) (lsort* l))
(defun mapls (&rest rest) (mapcar #'lsort* rest))
(defun make-edge-set
(&aux (g (grph:grph))
(f `((0 :A 1) (0 :C 1) (1 :A 3) (1 :A 2) (1 :A 0) (1 :C 0)
(2 :A 1) (3 :C 7) (3 :B 5) (3 :C 5) (3 :B 4) (3 :A 1)
(4 :B 3) (4 :B 5) (4 :E 5) (5 :B 3) (5 :C 3) (5 :B 4)
(5 :E 4) (7 :C 3) (99 :X 77))))
(grph:ingest-edges g f))
(defun mk-grph-main ()
(let ((g (grph:grph))
(bprop '((:b "90"))))
(grph:add! g 0 1)
(grph:add! g 2 3)
(grph:add! g 3 4 '(:a))
(grph:add! g 4 3 '((:a "43")))
(grph:add! g 5 6)
(grph:add! g 6 0 '((:a "60")))
(grph:add! g 7 8 '(:c))
(grph:add! g 8 9 '(:b))
(grph:add! g 9 0 bprop)
(grph:add! g 7 8 '(:b))
(grph:add! g 0 3)
g))
(defun mk-grph-match ()
(let ((g (grph:grph)))
(grph:add! g 0 1 '(:a))
(grph:add! g 0 3 '(:a))
(grph:add! g 2 3 '((:a "bbbbb")))
(grph:add! g 2 3 '((:b "ccccc")))
(grph:add! g 3 4 '(:a))
(grph:add! g 4 3 '(:a))
(grph:add! g 7 8 '((:a "7778888")))
(grph:add! g 5 6)
(grph:add! g 6 0 '(:b))
(grph:add! g 33 0 '(:b))
(grph:add! g 8 9 '(:b))
(grph:add! g 9 0 '(:a))
(grph:add! g 0 1 '((:a "aaa")))
g))
(defun make-rules-edge-set-1 ()
(let ((g (grph:grph))
(f `((0 :a 1) (0 :a 2) (1 :a 3)
(3 :a 2) (3 :a 4) (3 :a 0))))
(grph:ingest-edges g f)))
(defun make-rules-edge-set-2 ()
(let ((g (grph:grph))
(f `((0 :b 1) (1 :b 3) (3 :b 0) (1 :e 4) (4 :e 6))))
(grph:ingest-edges g f)))
(defun make-rules-edge-set-3 ()
(let ((g (grph:grph))
(f `((0 :a 3) (3 :a 2) (2 :a 0)
(0 :b 1) (1 :b 3) (3 :b 0)
(3 :c 5) (5 :c 2)
(1 :e 4) (4 :e 6))))
(grph:ingest-edges g f)))
|
5594d82e5ec9ee2b7821bc021798863a17b3913f9c9d76163007b8cf7bcc364f | cabol/west | west_lib.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2013 , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%%-------------------------------------------------------------------
@author < >
( C ) 2013 , < > , All Rights Reserved .
%%% @doc Interface into the WEST distributed application.
%%% @see <a href="-to-start-with-riak-core"></a>
%%% @end
Created : 05 . Nov 2013 12:12 PM
%%%-------------------------------------------------------------------
-module(west_lib).
%% API
-export([reg/4, unreg/2, send/4,
sub/4, unsub/2, pub/4]).
-include("west_int.hrl").
%%%===================================================================
%%% API
%%%===================================================================
%% @doc reg/4.
%% Register to a point-to-point channel with name `Key'. All incoming
events to the channel ` Key ' will be handle them by a GS created
%% by this process.
%% <br/>
Creates a GS event handler and register it into Gproc , locally .
This GS will handle the incoming messages .
%% <br/>
%% <li>Scope: Gproc scope.</li>
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Key : Key which the GS will be registered.</li >
%% <li>CbSpec: Callback specification. This will be called when messages
arrives.</li >
%%
, Ref , Key , ) - > Reply : : term ( )
%% Scope = atom()
%% Ref = any()
%% Key = atom()
CbSpec = { Mod : : atom ( ) , Fun : : atom ( ) , : : list ( ) }
reg(Scope, Ref, Key, CbSpec) ->
Name = west_util:build_name([Ref, Key]),
case whereis(Name) of
undefined ->
{ok, Pid} = west_event_handler:create(Scope, CbSpec, [{monitors, [Ref]}]),
register(Name, Pid),
case west_event_handler:reg(Name, Key) of
{ok, _} ->
{ok, registration_succeeded, Key};
{error, _} ->
west_event_handler:delete(Name),
{error, registration_denied, Key}
end;
_ ->
{error, registration_already_exist, Key}
end.
%% @doc unreg/2.
%% Unregister from a point-to-point channel with name `Key'. The
created GS process wo n't handle incoming events to channel ` Key '
%% any more.
%% <br/>
Destroy the GS event handler in order to delete the registration
from Gproc .
%% <br/>
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Key : Key which the GS was registered.</li >
%%
%% @spec unreg(Ref :: any(), Key :: atom()) -> Reply :: term()
unreg(Ref, Key) ->
Name = west_util:build_name([Ref, Key]),
case whereis(Name) of
undefined ->
{error, registration_not_found, Key};
Pid ->
case ?PROC_TYPE(Pid) of
n ->
west_event_handler:delete(Name),
{ok, unregistration_succeeded, Key};
_ ->
{error, registration_not_found, Key}
end
end.
%% @doc send/4.
%% Send the message `Msg' to point-to-point channel `Key'. Just one
%% consumer will receive this message.
%% <br/>
%% Sends the given message `Msg' to a `Key'. If the registration to
` Key ' exist , message will be received by the registered GS . If
%% registration doesn't exist, send will fail.
%% <br/>
%% <li>Scope: Gproc scope.</li>
%% <li>ETag: ID of the sender.</li>
< li > Key : Key which the GS was registered.</li >
< li > Msg : Message that will send.</li >
%%
%% @spec send(Scope, ETag, Key, Msg) -> Reply :: term()
%% Scope = atom()
%% ETag = string()
%% Key = atom()
%% Msg = binary() | list()
send(Scope, ETag, Key, Msg) ->
F = fun() ->
case ?SEND(Scope, ETag, Key, Msg) of
true ->
{ok, sending_succeeded, Key};
_ ->
{error, sending_failed, Key}
end
end,
case Scope of
g ->
case ?WHERE(Scope, Key) of
undefined ->
{error, sending_failed, Key};
{error, _} ->
{error, sending_failed, Key};
_ ->
F()
end;
_ ->
F()
end.
@doc sub/4 .
%% Subscribe to a pub/sub channel `Event'. All incoming events to the
channel ` Event ' will be handle them by GS created by this process .
%% <br/>
Creates a GS event handler and subscribe it into Gproc , in order
%% to handle the subscription lifecycle and handle the published
%% messages to `Event'.
%% <br/>
%% <li>Scope: Gproc scope.</li>
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Event : Event which the GS will be subscribed.</li >
%% <li>CbSpec: Callback specification. This will be called when messages
arrives.</li >
%%
, Ref , Event , ) - > Reply : : term ( )
%% Scope = atom()
%% Ref = any()
%% Event = atom()
CbSpec = { Mod : : atom ( ) , Fun : : atom ( ) , : : list ( ) }
sub(Scope, Ref, Event, CbSpec) ->
Name = west_util:build_name([Ref, Event]),
case whereis(Name) of
undefined ->
{ok, Pid} = west_event_handler:create(Scope, CbSpec, [{monitors, [Ref]}]),
register(Name, Pid),
case west_event_handler:subscribe(Name, Event) of
{ok, _} ->
{ok, subscription_succeeded, Event};
{error, _} ->
west_event_handler:delete(Name),
{error, subscription_failed, Event}
end;
_ ->
{error, subscription_already_exist, Event}
end.
%% @doc unsub/2.
%% Delete a subscription from a pub/sub channel `Event'.The
created GS process wo n't handle incoming events to channel ` Event '
%% any more.
%% <br/>
Destroy the GS event handler in order to delete the subscription
%% from Gproc. Ends the subscription lifecycle.
%% <br/>
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Event : Event which the GS was subscribed.</li >
%%
%% @spec unsub(Ref :: any(), Event :: atom()) -> Reply :: term()
unsub(Ref, Event) ->
Name = west_util:build_name([Ref, Event]),
case whereis(Name) of
undefined ->
{error, subscription_not_found, Event};
Pid ->
case ?PROC_TYPE(Pid) of
p ->
west_event_handler:delete(Name),
{ok, unsubscription_succeeded, Event};
_ ->
{error, subscription_not_found, Event}
end
end.
@doc pub/4 .
%% Publish the message `Msg' to all subscribers to a pub/sub channel
%% `Event'.
%% <br/>
%% Publishes the given message `Msg' into the a `Event'. If the
%% subscription to `Event' exist, message will be received by the
subscribed GS . If subscription does n't exist , publish will fail .
%% <br/>
%% <li>Scope: Gproc scope.</li>
%% <li>ETag: ID of the sender.</li>
< li > Event : Event which the GS was registered.</li >
< li > Msg : Message that will send.</li >
%%
%% @spec pub(Scope, ETag, Event, Msg) -> Reply :: term()
%% Scope = atom()
%% ETag = string()
%% Event = atom()
%% Msg = binary() | list()
pub(Scope, ETag, Event, Msg) ->
case ?PS_PUB(Scope, ETag, Event, Msg) of
true ->
{ok, publication_succeeded, Event};
_ ->
{error, publication_failed, Event}
end.
| null | https://raw.githubusercontent.com/cabol/west/c3c31dff9ad727ce9b82dde6eb690f7b11cd4d24/src/west_lib.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
-------------------------------------------------------------------
@doc Interface into the WEST distributed application.
@see <a href="-to-start-with-riak-core"></a>
@end
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
@doc reg/4.
Register to a point-to-point channel with name `Key'. All incoming
by this process.
<br/>
<br/>
<li>Scope: Gproc scope.</li>
<li>CbSpec: Callback specification. This will be called when messages
Scope = atom()
Ref = any()
Key = atom()
@doc unreg/2.
Unregister from a point-to-point channel with name `Key'. The
any more.
<br/>
<br/>
@spec unreg(Ref :: any(), Key :: atom()) -> Reply :: term()
@doc send/4.
Send the message `Msg' to point-to-point channel `Key'. Just one
consumer will receive this message.
<br/>
Sends the given message `Msg' to a `Key'. If the registration to
registration doesn't exist, send will fail.
<br/>
<li>Scope: Gproc scope.</li>
<li>ETag: ID of the sender.</li>
@spec send(Scope, ETag, Key, Msg) -> Reply :: term()
Scope = atom()
ETag = string()
Key = atom()
Msg = binary() | list()
Subscribe to a pub/sub channel `Event'. All incoming events to the
<br/>
to handle the subscription lifecycle and handle the published
messages to `Event'.
<br/>
<li>Scope: Gproc scope.</li>
<li>CbSpec: Callback specification. This will be called when messages
Scope = atom()
Ref = any()
Event = atom()
@doc unsub/2.
Delete a subscription from a pub/sub channel `Event'.The
any more.
<br/>
from Gproc. Ends the subscription lifecycle.
<br/>
@spec unsub(Ref :: any(), Event :: atom()) -> Reply :: term()
Publish the message `Msg' to all subscribers to a pub/sub channel
`Event'.
<br/>
Publishes the given message `Msg' into the a `Event'. If the
subscription to `Event' exist, message will be received by the
<br/>
<li>Scope: Gproc scope.</li>
<li>ETag: ID of the sender.</li>
@spec pub(Scope, ETag, Event, Msg) -> Reply :: term()
Scope = atom()
ETag = string()
Event = atom()
Msg = binary() | list() | Copyright ( c ) 2013 , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@author < >
( C ) 2013 , < > , All Rights Reserved .
Created : 05 . Nov 2013 12:12 PM
-module(west_lib).
-export([reg/4, unreg/2, send/4,
sub/4, unsub/2, pub/4]).
-include("west_int.hrl").
events to the channel ` Key ' will be handle them by a GS created
Creates a GS event handler and register it into Gproc , locally .
This GS will handle the incoming messages .
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Key : Key which the GS will be registered.</li >
arrives.</li >
, Ref , Key , ) - > Reply : : term ( )
CbSpec = { Mod : : atom ( ) , Fun : : atom ( ) , : : list ( ) }
reg(Scope, Ref, Key, CbSpec) ->
Name = west_util:build_name([Ref, Key]),
case whereis(Name) of
undefined ->
{ok, Pid} = west_event_handler:create(Scope, CbSpec, [{monitors, [Ref]}]),
register(Name, Pid),
case west_event_handler:reg(Name, Key) of
{ok, _} ->
{ok, registration_succeeded, Key};
{error, _} ->
west_event_handler:delete(Name),
{error, registration_denied, Key}
end;
_ ->
{error, registration_already_exist, Key}
end.
created GS process wo n't handle incoming events to channel ` Key '
Destroy the GS event handler in order to delete the registration
from Gproc .
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Key : Key which the GS was registered.</li >
unreg(Ref, Key) ->
Name = west_util:build_name([Ref, Key]),
case whereis(Name) of
undefined ->
{error, registration_not_found, Key};
Pid ->
case ?PROC_TYPE(Pid) of
n ->
west_event_handler:delete(Name),
{ok, unregistration_succeeded, Key};
_ ->
{error, registration_not_found, Key}
end
end.
` Key ' exist , message will be received by the registered GS . If
< li > Key : Key which the GS was registered.</li >
< li > Msg : Message that will send.</li >
send(Scope, ETag, Key, Msg) ->
F = fun() ->
case ?SEND(Scope, ETag, Key, Msg) of
true ->
{ok, sending_succeeded, Key};
_ ->
{error, sending_failed, Key}
end
end,
case Scope of
g ->
case ?WHERE(Scope, Key) of
undefined ->
{error, sending_failed, Key};
{error, _} ->
{error, sending_failed, Key};
_ ->
F()
end;
_ ->
F()
end.
@doc sub/4 .
channel ` Event ' will be handle them by GS created by this process .
Creates a GS event handler and subscribe it into Gproc , in order
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Event : Event which the GS will be subscribed.</li >
arrives.</li >
, Ref , Event , ) - > Reply : : term ( )
CbSpec = { Mod : : atom ( ) , Fun : : atom ( ) , : : list ( ) }
sub(Scope, Ref, Event, CbSpec) ->
Name = west_util:build_name([Ref, Event]),
case whereis(Name) of
undefined ->
{ok, Pid} = west_event_handler:create(Scope, CbSpec, [{monitors, [Ref]}]),
register(Name, Pid),
case west_event_handler:subscribe(Name, Event) of
{ok, _} ->
{ok, subscription_succeeded, Event};
{error, _} ->
west_event_handler:delete(Name),
{error, subscription_failed, Event}
end;
_ ->
{error, subscription_already_exist, Event}
end.
created GS process wo n't handle incoming events to channel ` Event '
Destroy the GS event handler in order to delete the subscription
< li > Ref : Unique reference to the GS that will be created.</li >
< li > Event : Event which the GS was subscribed.</li >
unsub(Ref, Event) ->
Name = west_util:build_name([Ref, Event]),
case whereis(Name) of
undefined ->
{error, subscription_not_found, Event};
Pid ->
case ?PROC_TYPE(Pid) of
p ->
west_event_handler:delete(Name),
{ok, unsubscription_succeeded, Event};
_ ->
{error, subscription_not_found, Event}
end
end.
@doc pub/4 .
subscribed GS . If subscription does n't exist , publish will fail .
< li > Event : Event which the GS was registered.</li >
< li > Msg : Message that will send.</li >
pub(Scope, ETag, Event, Msg) ->
case ?PS_PUB(Scope, ETag, Event, Msg) of
true ->
{ok, publication_succeeded, Event};
_ ->
{error, publication_failed, Event}
end.
|
e35e429207a9e5c8b59a91145725a9cd2feabb5d4d9fcd7896a214b06d6080f9 | sellout/LOOM | structures.lisp | (loom.internals:defpackage structures
(:use #:cl.data-and-control-flow #:loom.internals)
(:export #:defstruct
#:copy-structure)
(:import-from #:cl #:defstruct))
(cl:in-package #:loom.structures)
(make-generic copy-structure (structure))
| null | https://raw.githubusercontent.com/sellout/LOOM/b34b0590e82a8ba41ca1e58a8a825dd889285c1d/src/structures.lisp | lisp | (loom.internals:defpackage structures
(:use #:cl.data-and-control-flow #:loom.internals)
(:export #:defstruct
#:copy-structure)
(:import-from #:cl #:defstruct))
(cl:in-package #:loom.structures)
(make-generic copy-structure (structure))
|
|
89a0bb11cd6f6ba9d473ae4c1ee9fb40f7841daafb01f7269a55b68cdec317fa | eeng/shevek | helpers.cljs | (ns shevek.pages.designer.helpers
(:require [shevek.reflow.core :refer [dispatch] :refer-macros [defevh]]
[shevek.reflow.db :as db]
[shevek.components.popup :refer [tooltip]]
[shevek.components.refresh :refer [debounce-auto-refresh!]]
[shevek.lib.string :refer [regex-escape]]
[shevek.lib.logger :as log]
[shevek.lib.util :refer [debounce]]
[shevek.domain.dimension :refer [dim= add-dimension remove-dimension time-dimension?]]
[shevek.pages.cubes.helpers :refer [get-cube]]
[shevek.rpc :as rpc]
[shevek.schemas.conversion :refer [designer->report report->designer unexpand]]
[shevek.i18n :refer [t]]))
(defn current-cube [& keys]
(let [cube-name (db/get-in [:designer :report :cube])
cube (get-cube cube-name)]
(get-in cube keys)))
(defn- store-report-changes [{{:keys [on-report-change]} :designer :as db} report]
(let [db (assoc-in db [:designer :report] report)]
(on-report-change report)
db))
(defn- report->query [report]
(-> report
(select-keys [:cube :measures :filters :splits])
(assoc :totals true)))
(defevh :designer/results-arrived [db results results-path pending-report]
(when pending-report
(debounce-auto-refresh!))
(-> (assoc-in db results-path results)
(rpc/loaded results-path)
(cond-> ; We change the report only after results arrived so the visualization doesn't rerender until that moment
pending-report (store-report-changes pending-report))))
(defn- send-query [db query results-path & [pending-report]]
(log/info "Sending query" query)
(rpc/call "querying/query" :args [query] :handler #(dispatch :designer/results-arrived % results-path pending-report))
(rpc/loading db results-path))
(defn send-report-query [db report results-path]
(send-query db (report->query report) results-path))
(defn notify-designer-changes [{:keys [designer] :as db}]
(store-report-changes db (designer->report designer)))
(defn send-designer-query [{:keys [designer] :as db}]
(let [report (designer->report designer)]
(send-query db (report->query report) [:designer :report-results] report)))
(defn- remove-dim-unless-time [dim coll]
(if (time-dimension? dim)
coll
(remove-dimension coll dim)))
TODO esto del unexpand y tantos metodos de conversion no me convence , revisar . no con algo mas simple como en la query que trae los results , o sea con un atom interno nomas
(defn send-pinned-dim-query [{:keys [designer] :as db} {:keys [name] :as dim} & [{:as search-filter}]]
(let [q (cond-> {:cube (get-in designer [:report :cube])
:filters (remove-dim-unless-time dim (:filters designer))
:splits [dim]
:measures (vector (get-in designer [:pinboard :measure]))}
search-filter (update :filters add-dimension search-filter))]
(send-query db (unexpand q) [:designer :pinboard-results name])))
(defn send-pinboard-queries
([db] (send-pinboard-queries db nil))
([db except-dim]
(->> (get-in db [:designer :pinboard :dimensions])
(remove-dim-unless-time except-dim)
(reduce #(send-pinned-dim-query %1 %2) db))))
(def debounce-dispatch (debounce dispatch 500))
(defn build-visualization [results {:keys [cube] :as report}]
(let [cube (get-cube cube)]
(assert cube)
(-> (report->designer report cube)
(select-keys [:viztype :splits :measures])
(assoc :results results))))
(defn- default-measures [{:keys [measures]}]
(->> (if (some :favorite measures)
(filter :favorite measures)
(take 3 measures))
(mapv :name)))
(defn build-new-report [{:keys [name] :as cube}]
(let [measures (default-measures cube)]
{:cube name
:name (t :reports/new)
:viztype "totals"
:measures measures
:filters [{:name "__time" :period "latest-day"}]}))
;;;;; Components
(defn panel-header [text & actions]
[:h2.ui.sub.header text
(when (seq actions) (into [:div.actions] actions))])
(defn description-help-icon [{:keys [description]}]
(when (seq description)
[:i.question.circle.outline.icon {:ref (tooltip description {:position "right center" :delay 250})}]))
(defn search-button [searching]
[:i.search.link.icon {:on-click #(swap! searching not)}])
(defn highlight [value search]
(if (seq search)
(let [[_ pre bold post] (re-find (re-pattern (str "(?i)(.*?)(" (regex-escape search) ")(.*)")) value)]
[:div.segment-value pre [:span.bold bold] post])
[:div.segment-value value]))
| null | https://raw.githubusercontent.com/eeng/shevek/7783b8037303b8dd5f320f35edee3bfbb2b41c02/src/cljs/shevek/pages/designer/helpers.cljs | clojure | We change the report only after results arrived so the visualization doesn't rerender until that moment
Components | (ns shevek.pages.designer.helpers
(:require [shevek.reflow.core :refer [dispatch] :refer-macros [defevh]]
[shevek.reflow.db :as db]
[shevek.components.popup :refer [tooltip]]
[shevek.components.refresh :refer [debounce-auto-refresh!]]
[shevek.lib.string :refer [regex-escape]]
[shevek.lib.logger :as log]
[shevek.lib.util :refer [debounce]]
[shevek.domain.dimension :refer [dim= add-dimension remove-dimension time-dimension?]]
[shevek.pages.cubes.helpers :refer [get-cube]]
[shevek.rpc :as rpc]
[shevek.schemas.conversion :refer [designer->report report->designer unexpand]]
[shevek.i18n :refer [t]]))
(defn current-cube [& keys]
(let [cube-name (db/get-in [:designer :report :cube])
cube (get-cube cube-name)]
(get-in cube keys)))
(defn- store-report-changes [{{:keys [on-report-change]} :designer :as db} report]
(let [db (assoc-in db [:designer :report] report)]
(on-report-change report)
db))
(defn- report->query [report]
(-> report
(select-keys [:cube :measures :filters :splits])
(assoc :totals true)))
(defevh :designer/results-arrived [db results results-path pending-report]
(when pending-report
(debounce-auto-refresh!))
(-> (assoc-in db results-path results)
(rpc/loaded results-path)
pending-report (store-report-changes pending-report))))
(defn- send-query [db query results-path & [pending-report]]
(log/info "Sending query" query)
(rpc/call "querying/query" :args [query] :handler #(dispatch :designer/results-arrived % results-path pending-report))
(rpc/loading db results-path))
(defn send-report-query [db report results-path]
(send-query db (report->query report) results-path))
(defn notify-designer-changes [{:keys [designer] :as db}]
(store-report-changes db (designer->report designer)))
(defn send-designer-query [{:keys [designer] :as db}]
(let [report (designer->report designer)]
(send-query db (report->query report) [:designer :report-results] report)))
(defn- remove-dim-unless-time [dim coll]
(if (time-dimension? dim)
coll
(remove-dimension coll dim)))
TODO esto del unexpand y tantos metodos de conversion no me convence , revisar . no con algo mas simple como en la query que trae los results , o sea con un atom interno nomas
(defn send-pinned-dim-query [{:keys [designer] :as db} {:keys [name] :as dim} & [{:as search-filter}]]
(let [q (cond-> {:cube (get-in designer [:report :cube])
:filters (remove-dim-unless-time dim (:filters designer))
:splits [dim]
:measures (vector (get-in designer [:pinboard :measure]))}
search-filter (update :filters add-dimension search-filter))]
(send-query db (unexpand q) [:designer :pinboard-results name])))
(defn send-pinboard-queries
([db] (send-pinboard-queries db nil))
([db except-dim]
(->> (get-in db [:designer :pinboard :dimensions])
(remove-dim-unless-time except-dim)
(reduce #(send-pinned-dim-query %1 %2) db))))
(def debounce-dispatch (debounce dispatch 500))
(defn build-visualization [results {:keys [cube] :as report}]
(let [cube (get-cube cube)]
(assert cube)
(-> (report->designer report cube)
(select-keys [:viztype :splits :measures])
(assoc :results results))))
(defn- default-measures [{:keys [measures]}]
(->> (if (some :favorite measures)
(filter :favorite measures)
(take 3 measures))
(mapv :name)))
(defn build-new-report [{:keys [name] :as cube}]
(let [measures (default-measures cube)]
{:cube name
:name (t :reports/new)
:viztype "totals"
:measures measures
:filters [{:name "__time" :period "latest-day"}]}))
(defn panel-header [text & actions]
[:h2.ui.sub.header text
(when (seq actions) (into [:div.actions] actions))])
(defn description-help-icon [{:keys [description]}]
(when (seq description)
[:i.question.circle.outline.icon {:ref (tooltip description {:position "right center" :delay 250})}]))
(defn search-button [searching]
[:i.search.link.icon {:on-click #(swap! searching not)}])
(defn highlight [value search]
(if (seq search)
(let [[_ pre bold post] (re-find (re-pattern (str "(?i)(.*?)(" (regex-escape search) ")(.*)")) value)]
[:div.segment-value pre [:span.bold bold] post])
[:div.segment-value value]))
|
e982d2d57a8f2db1c56e34d4844eda1e4deda73a39863408ad58e6307130292c | shayne-fletcher/zen | print_tree.ml | (* A type of non-empty trees of strings. *)
type tree = [
|`Node of string * tree list
]
;;
(* [print_tree tree] prints a rendering of [tree]. *)
let rec print_tree
?(pad : (string * string)= ("", ""))
(tree : tree) : unit =
let pd, pc = pad in
match tree with
| `Node (tag, cs) ->
Printf.printf "%s%s\n" pd tag;
let n = List.length cs - 1 in
List.iteri (
fun i c ->
let pad =
(pc ^ (if i = n then "`-- " else "|-- "),
pc ^ (if i = n then " " else "| ")) in
print_tree ~pad c
) cs
;;
(* An example tree. *)
let tree =
`Node ("."
, [
`Node ("S", [
`Node ("T", [
`Node ("U", [])]);
`Node ("V", [])])
; `Node ("W", [])
])
;;
(* Print the example tree. *)
let () = print_tree tree
;;
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/print_tree/print_tree.ml | ocaml | A type of non-empty trees of strings.
[print_tree tree] prints a rendering of [tree].
An example tree.
Print the example tree. | type tree = [
|`Node of string * tree list
]
;;
let rec print_tree
?(pad : (string * string)= ("", ""))
(tree : tree) : unit =
let pd, pc = pad in
match tree with
| `Node (tag, cs) ->
Printf.printf "%s%s\n" pd tag;
let n = List.length cs - 1 in
List.iteri (
fun i c ->
let pad =
(pc ^ (if i = n then "`-- " else "|-- "),
pc ^ (if i = n then " " else "| ")) in
print_tree ~pad c
) cs
;;
let tree =
`Node ("."
, [
`Node ("S", [
`Node ("T", [
`Node ("U", [])]);
`Node ("V", [])])
; `Node ("W", [])
])
;;
let () = print_tree tree
;;
|
3a18a9b73241254f73904e595ac536993a7247c8d3ef0bd27605a6052d073e9f | marcusm/startrek-clojure | utils.clj | (ns startrek.utils
(:require [clojure.data.generators :as gen])
(:require [clojure.math.numeric-tower :as math]))
;; global constants
(def dim 8)
;; sector map values
(def enterprise-id 1)
(def klingon-id 2)
(def base-id 3)
(def star-id 4)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Wraps the random distribution methods so I can swap them out when testing.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; randomness wrappers
(declare gen-idx gen-idx gen-double gen-uniform)
(defn gen-idx [] (gen/uniform 1 (+ 1 dim)))
(defn gen-double [] (gen/double))
(defn gen-uniform [a b] (gen/uniform a b))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Some common math functions needed in several places.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- sqr
"Uses the numeric tower expt to square a number"
[x]
(math/expt x 2))
(defn- euclidean-squared-distance
"Computes the Euclidean squared distance between two sequences"
[a b]
(reduce + (map (comp sqr -) a b)))
(defn euclidean-distance
"Computes the Euclidean distance between two sequences"
[a b]
(math/sqrt (euclidean-squared-distance a b)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Functions used to extract or change shape of map data.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn strip-x-y [a-map]
(let [{:keys [x y]} a-map]
[x y]))
(defn point-2-str [point]
(format "%d,%d" (first point) (second point)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Functions used for indexing
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn coord-to-index [coord]
(+ (dec (first coord)) (* (dec (second coord)) dim)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Common test methods
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn leave-quadrant? [coord]
(some true? (concat (map #(< % 0.5) coord)
(map #(>= % 8.5) coord))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Functions used for text output
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def messages (atom []))
(defn message
"All print side effects are written to here by default. This is to reduce spam
during unit tests or while testing code in the REPL. During actual execution,
the main game loop rebinds this to println."
([] (swap! messages conj ""))
([text & rest] (swap! messages conj text rest)))
| null | https://raw.githubusercontent.com/marcusm/startrek-clojure/65ef7811d134b634faa478c71b3a9db5e4f4b57a/src/startrek/utils.clj | clojure | global constants
sector map values
Wraps the random distribution methods so I can swap them out when testing.
randomness wrappers
Some common math functions needed in several places.
Functions used to extract or change shape of map data.
Functions used for indexing
Common test methods
Functions used for text output
| (ns startrek.utils
(:require [clojure.data.generators :as gen])
(:require [clojure.math.numeric-tower :as math]))
(def dim 8)
(def enterprise-id 1)
(def klingon-id 2)
(def base-id 3)
(def star-id 4)
(declare gen-idx gen-idx gen-double gen-uniform)
(defn gen-idx [] (gen/uniform 1 (+ 1 dim)))
(defn gen-double [] (gen/double))
(defn gen-uniform [a b] (gen/uniform a b))
(defn- sqr
"Uses the numeric tower expt to square a number"
[x]
(math/expt x 2))
(defn- euclidean-squared-distance
"Computes the Euclidean squared distance between two sequences"
[a b]
(reduce + (map (comp sqr -) a b)))
(defn euclidean-distance
"Computes the Euclidean distance between two sequences"
[a b]
(math/sqrt (euclidean-squared-distance a b)))
(defn strip-x-y [a-map]
(let [{:keys [x y]} a-map]
[x y]))
(defn point-2-str [point]
(format "%d,%d" (first point) (second point)))
(defn coord-to-index [coord]
(+ (dec (first coord)) (* (dec (second coord)) dim)))
(defn leave-quadrant? [coord]
(some true? (concat (map #(< % 0.5) coord)
(map #(>= % 8.5) coord))))
(def messages (atom []))
(defn message
"All print side effects are written to here by default. This is to reduce spam
during unit tests or while testing code in the REPL. During actual execution,
the main game loop rebinds this to println."
([] (swap! messages conj ""))
([text & rest] (swap! messages conj text rest)))
|
a1152b524469d51bedfd38280f67e750d920a6dffc9e2fc0e6bf829dcaff67de | LBacchiani/session-subtyping-tool | RandomTypes.hs | module RandomTypes where
-- This is an ad-hoc tool to generate random session types
-- will be made user-enabled soon.
import Data.List (nub)
import Control.Applicative ((<$>))
import Control.Monad (liftM, liftM2, replicateM)
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import Test.QuickCheck
import System.Random (getStdGen, StdGen)
import Control.Monad.Zip (mzip)
import Data.Time (UTCTime, getCurrentTime)
cabal install MissingH
import Data.Char (toLower)
-- DEBUG
import System.IO.Unsafe
import Debug.Trace
data Variable = Var String
deriving (Show, Eq)
data Message = Msg String
deriving (Show, Eq)
data Direction = Send | Receive
deriving (Show, Eq)
data SessionType = End
| Choice Direction [(Message, SessionType)]
| RecDef Variable SessionType
| RecCall Variable
deriving (Eq)
instance Show SessionType where
show = printSessionType
wellFormed :: SessionType -> Bool
wellFormed lt = helper [] lt
where helper vs End = True
helper vs (RecDef (Var s) lt) = if s `elem` vs
then False
else helper (s:vs) lt
helper vs (RecCall (Var s)) = s `elem` vs
helper vs (Choice dir []) = False
helper vs (Choice dir l@(x:xs)) =
let msgs = map fst l
in if (length msgs) == (length $ nub msgs)
then and $ map (helper vs . snd) l
else False
disjoint :: [Message] -> Bool
disjoint msgs = (length msgs) == (length $ nub msgs)
instance Arbitrary Variable where
arbitrary = elements $ map (\x -> Var [x]) ['A'..'Z']
instance Arbitrary Message where
arbitrary = sized list
where list n = elements $ map (\x -> Msg [x]) (take 7 ['a'..'z'])
instance Arbitrary Direction where
arbitrary = elements [Send, Receive]
instance Arbitrary SessionType where
arbitrary = sized (sessterm [] False)
sessterm :: [String] -> Bool -> Int -> Gen SessionType
sessterm vars flag 0 = if flag
then elements [End]
else elements (End:(map (\x -> RecCall (Var x)) vars))
sessterm vars flag n =
do let available = filter (\x -> not $ [x] `elem` vars) (take (maximum [n `div` 2, 1]) ['A'..'Z'])
nvar <- elements available
msgs <- fmap nub $ listOf1 arbitrary :: Gen [Message]
rec <- elements vars
recdef <- sessterm ([nvar]:vars) True n
dir <- arbitrary
nexts <- vectorOf (length msgs) (sessterm vars False (n-1)) :: Gen [SessionType]
let pairs = zip msgs nexts
elements [ if n < 2
then End
else Choice dir pairs
-- , if (null vars || flag)
-- then Choice dir pairs
else ( Var rec )
-- , if null available
-- then End
else RecDef ( Var [ nvar ] ) recdef
, Choice dir pairs
]
recalterterm :: [String] -> Bool -> Int -> Gen SessionType
recalterterm vars flag n
| n > 0 = do
let available = filter (\x -> not $ [x] `elem` vars) (take (maximum [n `div` 2, 1]) ['A'..'Z'])
nvar <- elements available
msgs <- fmap nub $ listOf1 arbitrary :: Gen [Message]
nexts <- vectorOf (length msgs) (recalterterm vars False (n-1)) :: Gen [SessionType]
dir <- arbitrary
recdef <- recalterterm ([nvar]:vars) True n
let pairs = zip msgs nexts
if flag || null available
then elements [ Choice dir pairs ]
else elements [ RecDef (Var [nvar]) recdef ]
| otherwise = do
rec <- elements vars
if flag
then elements [ Choice Send [(Msg "xx", RecCall (Var rec))] ]
else elements [RecCall (Var rec)]
recterm :: String -> Bool -> Int -> Gen SessionType
recterm s flag 0 = elements [RecCall (Var s)]
recterm s flag n = do
msgs <- fmap nub $ listOf1 (resize 10 arbitrary) :: Gen [Message]
nexts <- vectorOf (length msgs) (recterm s False (n-1)) :: Gen [SessionType]
dir <- arbitrary
let pairs = zip msgs nexts
elements [ if flag
then Choice dir pairs
else RecCall (Var s)
, Choice dir pairs
]
maxTerm :: [String] -> SessionType
maxTerm xs = helper xs xs
where helper (x:xs) all = RecDef (Var x) $ Choice Send [(Msg $ map toLower x, (helper xs all))]
helper [] all = Choice Send $ map (\x -> (Msg $ map toLower x, RecCall (Var x))) all
makeMaxTerms :: ([String] -> SessionType) -> Int -> [((Int, Int), SessionType)]
makeMaxTerms f 0 = []
makeMaxTerms f i = let alphabet = [[x]++[y] | x <- ['A'..'Z'], y <- ['A'..'Z'] ]
list = take i alphabet
in ((length list,length list), f list ):(makeMaxTerms f (i-1))
maxBranchTerm :: Direction -> [String] -> SessionType
maxBranchTerm dir xs = helper xs xs
where helper (x:xs) all = RecDef (Var x) $ Choice dir [(Msg $ map toLower x, (helper xs all))]
helper [] all = Choice dir $ map (\x -> (Msg $ map toLower x,
Choice dir $ map (\y -> (Msg $ map toLower y, RecCall (Var x))) all
)) all
singleRec :: String -> [String] -> SessionType
singleRec s xs = RecDef (Var s) $ Choice Send $ map (\y -> (Msg y, RecCall (Var s))) xs
-- main :: IO ()
-- main = do
saveTypes $ makeMaxTerms ( maxBranchTerm Send ) 100
-- putStrLn "Done"
main :: IO ()
main =
if True
then do --
list <- sample' (resize 5 arbitrary :: Gen SessionType)
let sizes = map (\x -> (maxDepth x, numberOfMessages x)) list
putStrLn $ show sizes
saveTypes $ zip sizes list
else do list' <- sample' (recterm "X" True 6 :: Gen SessionType)
let list = map (\x -> RecDef (Var "X") x) list'
let sizes = map (\x -> (maxDepth x, numberOfMessages x)) list
putStrLn $ show sizes
saveUnfold $ zip sizes list
saveTypes :: [((Int, Int), SessionType)] -> IO ()
saveTypes sts = helper 0 sts
where helper i (((d,w),x):xs) =
do time <- getCurrentTime
let f = "generated_test_"++(show i)++"_"++(show d)++"x"++(show w)
++"__"++((replace " " "") $ show $ time)++".txt"
writeFile f (printSessionType x)
helper (i+1) xs
helper i [] = return ()
saveUnfold :: [((Int, Int), SessionType)] -> IO ()
saveUnfold sts = helper 0 sts
where helper i (((d,w),x):xs) =
do time <- getCurrentTime
let f = "generated_test_"++(show i)++"_"++(show d)++"x"++(show w)
++"__"++((replace " " "") $ show $ time)
writeFile (f++".txt") (printSessionType x)
writeFile (f++"_unfolded.txt") (printSessionType $ unfold 1 x)
helper (i+1) xs
helper i [] = return ()
unfold :: Int -> SessionType -> SessionType
unfold i (RecDef s t) = RecDef s (helper i t)
where helper n (RecCall s)
| n > 1 = helper (n-1) t
| n <= 1 = t
helper n (Choice dir xs) = Choice dir (map (\x -> (fst x, helper n $ snd x)) xs)
helper n _ = error $ "Unsupported unfolding"
unfold _ _ = error $ "Unsupported unfolding"
wellSized :: Int -> SessionType -> Bool
wellSized i lt = ((i-2) <= maxDepth lt) && (maxDepth lt <= (i+2))
maxDepth :: SessionType -> Int
maxDepth End = 0
maxDepth (RecCall _) = 0
maxDepth (RecDef _ t) = (maxDepth t)
maxDepth (Choice dir xs) = 1+(maximum $ map (maxDepth . snd) xs)
numberOfLeaves :: SessionType -> Int
numberOfLeaves End = 1
numberOfLeaves (RecCall _) = 1
numberOfLeaves (RecDef _ t) = (numberOfLeaves t)
numberOfLeaves (Choice dir xs) = foldr (+) 0 $ map (numberOfLeaves . snd) xs
numberOfMessages :: SessionType -> Int
numberOfMessages End = 0
numberOfMessages (RecCall _) = 0
numberOfMessages (RecDef _ t) = (numberOfMessages t)
numberOfMessages (Choice dir xs) = foldr (+) (length xs) $ map (numberOfMessages . snd) xs
printDirection :: Direction -> String
printDirection Send = "!"
printDirection Receive = "?"
printSessionType :: SessionType -> String
printSessionType End = "end"
printSessionType (RecCall (Var var)) = var
printSessionType (RecDef (Var var) t) = "rec "++var++" . "++(printSessionType t)
printSessionType (Choice dir xs) = (if dir == Send
then "+{"
else "&[")
++
helper dir xs
++
(if dir == Send
then "}"
else "]")
where helper dir ((Msg m,x):y:xs) = (printDirection dir)++(m)++"; "++(printSessionType x)
++",\n"++(helper dir (y:xs))
helper dir [(Msg m,x)] = (printDirection dir)++(m)++"; "++(printSessionType x)
helper _ [] = []
| null | https://raw.githubusercontent.com/LBacchiani/session-subtyping-tool/268d716fafae3c4b50899a8f2ce29233ae8beb32/session-type-utilities/session-subtyping-algorithms/synchronous-subtyping/RandomTypes.hs | haskell | This is an ad-hoc tool to generate random session types
will be made user-enabled soon.
DEBUG
, if (null vars || flag)
then Choice dir pairs
, if null available
then End
main :: IO ()
main = do
putStrLn "Done"
| module RandomTypes where
import Data.List (nub)
import Control.Applicative ((<$>))
import Control.Monad (liftM, liftM2, replicateM)
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import Test.QuickCheck
import System.Random (getStdGen, StdGen)
import Control.Monad.Zip (mzip)
import Data.Time (UTCTime, getCurrentTime)
cabal install MissingH
import Data.Char (toLower)
import System.IO.Unsafe
import Debug.Trace
data Variable = Var String
deriving (Show, Eq)
data Message = Msg String
deriving (Show, Eq)
data Direction = Send | Receive
deriving (Show, Eq)
data SessionType = End
| Choice Direction [(Message, SessionType)]
| RecDef Variable SessionType
| RecCall Variable
deriving (Eq)
instance Show SessionType where
show = printSessionType
wellFormed :: SessionType -> Bool
wellFormed lt = helper [] lt
where helper vs End = True
helper vs (RecDef (Var s) lt) = if s `elem` vs
then False
else helper (s:vs) lt
helper vs (RecCall (Var s)) = s `elem` vs
helper vs (Choice dir []) = False
helper vs (Choice dir l@(x:xs)) =
let msgs = map fst l
in if (length msgs) == (length $ nub msgs)
then and $ map (helper vs . snd) l
else False
disjoint :: [Message] -> Bool
disjoint msgs = (length msgs) == (length $ nub msgs)
instance Arbitrary Variable where
arbitrary = elements $ map (\x -> Var [x]) ['A'..'Z']
instance Arbitrary Message where
arbitrary = sized list
where list n = elements $ map (\x -> Msg [x]) (take 7 ['a'..'z'])
instance Arbitrary Direction where
arbitrary = elements [Send, Receive]
instance Arbitrary SessionType where
arbitrary = sized (sessterm [] False)
sessterm :: [String] -> Bool -> Int -> Gen SessionType
sessterm vars flag 0 = if flag
then elements [End]
else elements (End:(map (\x -> RecCall (Var x)) vars))
sessterm vars flag n =
do let available = filter (\x -> not $ [x] `elem` vars) (take (maximum [n `div` 2, 1]) ['A'..'Z'])
nvar <- elements available
msgs <- fmap nub $ listOf1 arbitrary :: Gen [Message]
rec <- elements vars
recdef <- sessterm ([nvar]:vars) True n
dir <- arbitrary
nexts <- vectorOf (length msgs) (sessterm vars False (n-1)) :: Gen [SessionType]
let pairs = zip msgs nexts
elements [ if n < 2
then End
else Choice dir pairs
else ( Var rec )
else RecDef ( Var [ nvar ] ) recdef
, Choice dir pairs
]
recalterterm :: [String] -> Bool -> Int -> Gen SessionType
recalterterm vars flag n
| n > 0 = do
let available = filter (\x -> not $ [x] `elem` vars) (take (maximum [n `div` 2, 1]) ['A'..'Z'])
nvar <- elements available
msgs <- fmap nub $ listOf1 arbitrary :: Gen [Message]
nexts <- vectorOf (length msgs) (recalterterm vars False (n-1)) :: Gen [SessionType]
dir <- arbitrary
recdef <- recalterterm ([nvar]:vars) True n
let pairs = zip msgs nexts
if flag || null available
then elements [ Choice dir pairs ]
else elements [ RecDef (Var [nvar]) recdef ]
| otherwise = do
rec <- elements vars
if flag
then elements [ Choice Send [(Msg "xx", RecCall (Var rec))] ]
else elements [RecCall (Var rec)]
recterm :: String -> Bool -> Int -> Gen SessionType
recterm s flag 0 = elements [RecCall (Var s)]
recterm s flag n = do
msgs <- fmap nub $ listOf1 (resize 10 arbitrary) :: Gen [Message]
nexts <- vectorOf (length msgs) (recterm s False (n-1)) :: Gen [SessionType]
dir <- arbitrary
let pairs = zip msgs nexts
elements [ if flag
then Choice dir pairs
else RecCall (Var s)
, Choice dir pairs
]
maxTerm :: [String] -> SessionType
maxTerm xs = helper xs xs
where helper (x:xs) all = RecDef (Var x) $ Choice Send [(Msg $ map toLower x, (helper xs all))]
helper [] all = Choice Send $ map (\x -> (Msg $ map toLower x, RecCall (Var x))) all
makeMaxTerms :: ([String] -> SessionType) -> Int -> [((Int, Int), SessionType)]
makeMaxTerms f 0 = []
makeMaxTerms f i = let alphabet = [[x]++[y] | x <- ['A'..'Z'], y <- ['A'..'Z'] ]
list = take i alphabet
in ((length list,length list), f list ):(makeMaxTerms f (i-1))
maxBranchTerm :: Direction -> [String] -> SessionType
maxBranchTerm dir xs = helper xs xs
where helper (x:xs) all = RecDef (Var x) $ Choice dir [(Msg $ map toLower x, (helper xs all))]
helper [] all = Choice dir $ map (\x -> (Msg $ map toLower x,
Choice dir $ map (\y -> (Msg $ map toLower y, RecCall (Var x))) all
)) all
singleRec :: String -> [String] -> SessionType
singleRec s xs = RecDef (Var s) $ Choice Send $ map (\y -> (Msg y, RecCall (Var s))) xs
saveTypes $ makeMaxTerms ( maxBranchTerm Send ) 100
main :: IO ()
main =
if True
list <- sample' (resize 5 arbitrary :: Gen SessionType)
let sizes = map (\x -> (maxDepth x, numberOfMessages x)) list
putStrLn $ show sizes
saveTypes $ zip sizes list
else do list' <- sample' (recterm "X" True 6 :: Gen SessionType)
let list = map (\x -> RecDef (Var "X") x) list'
let sizes = map (\x -> (maxDepth x, numberOfMessages x)) list
putStrLn $ show sizes
saveUnfold $ zip sizes list
saveTypes :: [((Int, Int), SessionType)] -> IO ()
saveTypes sts = helper 0 sts
where helper i (((d,w),x):xs) =
do time <- getCurrentTime
let f = "generated_test_"++(show i)++"_"++(show d)++"x"++(show w)
++"__"++((replace " " "") $ show $ time)++".txt"
writeFile f (printSessionType x)
helper (i+1) xs
helper i [] = return ()
saveUnfold :: [((Int, Int), SessionType)] -> IO ()
saveUnfold sts = helper 0 sts
where helper i (((d,w),x):xs) =
do time <- getCurrentTime
let f = "generated_test_"++(show i)++"_"++(show d)++"x"++(show w)
++"__"++((replace " " "") $ show $ time)
writeFile (f++".txt") (printSessionType x)
writeFile (f++"_unfolded.txt") (printSessionType $ unfold 1 x)
helper (i+1) xs
helper i [] = return ()
unfold :: Int -> SessionType -> SessionType
unfold i (RecDef s t) = RecDef s (helper i t)
where helper n (RecCall s)
| n > 1 = helper (n-1) t
| n <= 1 = t
helper n (Choice dir xs) = Choice dir (map (\x -> (fst x, helper n $ snd x)) xs)
helper n _ = error $ "Unsupported unfolding"
unfold _ _ = error $ "Unsupported unfolding"
wellSized :: Int -> SessionType -> Bool
wellSized i lt = ((i-2) <= maxDepth lt) && (maxDepth lt <= (i+2))
maxDepth :: SessionType -> Int
maxDepth End = 0
maxDepth (RecCall _) = 0
maxDepth (RecDef _ t) = (maxDepth t)
maxDepth (Choice dir xs) = 1+(maximum $ map (maxDepth . snd) xs)
numberOfLeaves :: SessionType -> Int
numberOfLeaves End = 1
numberOfLeaves (RecCall _) = 1
numberOfLeaves (RecDef _ t) = (numberOfLeaves t)
numberOfLeaves (Choice dir xs) = foldr (+) 0 $ map (numberOfLeaves . snd) xs
numberOfMessages :: SessionType -> Int
numberOfMessages End = 0
numberOfMessages (RecCall _) = 0
numberOfMessages (RecDef _ t) = (numberOfMessages t)
numberOfMessages (Choice dir xs) = foldr (+) (length xs) $ map (numberOfMessages . snd) xs
printDirection :: Direction -> String
printDirection Send = "!"
printDirection Receive = "?"
printSessionType :: SessionType -> String
printSessionType End = "end"
printSessionType (RecCall (Var var)) = var
printSessionType (RecDef (Var var) t) = "rec "++var++" . "++(printSessionType t)
printSessionType (Choice dir xs) = (if dir == Send
then "+{"
else "&[")
++
helper dir xs
++
(if dir == Send
then "}"
else "]")
where helper dir ((Msg m,x):y:xs) = (printDirection dir)++(m)++"; "++(printSessionType x)
++",\n"++(helper dir (y:xs))
helper dir [(Msg m,x)] = (printDirection dir)++(m)++"; "++(printSessionType x)
helper _ [] = []
|
a082679cab1382472afbd5ef06dc78a44bc96f0414cb99e5347313161965b6af | kumarshantanu/asphalt | test_connpool.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns asphalt.test-connpool
"DataSource maker for Apache DBCP 1.x JDBC connection pool."
(:require
[clj-dbcp.core :as dbcp]))
(defn make-datasource
[{:keys [classname
jdbc-url
username
password]
:as config}]
(dbcp/make-datasource {:classname classname
:jdbc-url jdbc-url
:username username
:password password
:val-query "SELECT 1;"}))
| null | https://raw.githubusercontent.com/kumarshantanu/asphalt/1a6a890ec05d038b204764c5b0fb5357476f41f1/test-connpool/dbcp/asphalt/test_connpool.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) . All rights reserved .
(ns asphalt.test-connpool
"DataSource maker for Apache DBCP 1.x JDBC connection pool."
(:require
[clj-dbcp.core :as dbcp]))
(defn make-datasource
[{:keys [classname
jdbc-url
username
password]
:as config}]
(dbcp/make-datasource {:classname classname
:jdbc-url jdbc-url
:username username
:password password
:val-query "SELECT 1;"}))
|
62f3854a681b2f11760949e01f66cbf4b18bda61631ad8c5e7d661fc4369f842 | nervous-systems/eulalie | creds.cljc | (ns eulalie.creds
(:require [eulalie.util :as util]
[eulalie.instance-data :as instance-data]
[eulalie.platform.time :as platform.time]
[glossop.core :as g
#? (:clj :refer :cljs :refer-macros) [go-catching <?]]))
(defn env []
(let [secret-key (util/env! "AWS_SECRET_ACCESS_KEY")
token (util/env! "AWS_SESSION_TOKEN")]
(when (not-empty secret-key)
(cond->
{:access-key (util/env! "AWS_ACCESS_KEY_ID")
:secret-key secret-key}
token (assoc :token token)))))
(defmulti creds->credentials
"Unfortunately-named mechanism to turn the informally-specified 'creds' map
supplied by the user into a map with :access-key, :secret-key, :token members,
suitable for signing requests, etc. To support more exotic use-cases, like
mutable/refreshable credentials, we offer this layer of indirection."
:eulalie/type)
(defmethod creds->credentials :default [creds]
(go-catching creds))
(defmethod creds->credentials :mutable [{:keys [current]}]
(go-catching @current))
(defn refresh! [{:keys [current refresh] :as creds}]
(go-catching
The no expiry junk is kind of awkward , but we do n't want ( ) to do any
;; immediate I/O, so we just assume the credentials will expire
(reset! current
(-> (refresh)
<?
(update :expiration #(or % ::no-expiry))))
creds))
#? (:clj (def refresh!! (comp g/<?! refresh!)))
(defmethod creds->credentials :expiring
[{:keys [threshold current refresh] :as m} & [msecs-now]]
(go-catching
(let [{:keys [expiration]} @current]
(when (or (nil? expiration)
(<= (- expiration (or msecs-now (platform.time/msecs-now))) threshold))
;; So this is pretty wasteful - there could be large numbers of
;; concurrent requests, all with the same expired credentials - they
;; should all be waiting on a single request
(<? (refresh! m)))
@current)))
(defn expiring-creds
[refresh-fn & [{:keys [threshold]
:or {threshold (* 60 1000 5)}}]]
{:eulalie/type :expiring
:current (atom nil)
:refresh refresh-fn
:threshold threshold})
(defn iam
([]
(expiring-creds instance-data/default-iam-credentials!))
([role]
(expiring-creds #(instance-data/iam-credentials! role))))
| null | https://raw.githubusercontent.com/nervous-systems/eulalie/ee435987278f5ed628f576700b716d9d0bc17c61/src/eulalie/creds.cljc | clojure | immediate I/O, so we just assume the credentials will expire
So this is pretty wasteful - there could be large numbers of
concurrent requests, all with the same expired credentials - they
should all be waiting on a single request | (ns eulalie.creds
(:require [eulalie.util :as util]
[eulalie.instance-data :as instance-data]
[eulalie.platform.time :as platform.time]
[glossop.core :as g
#? (:clj :refer :cljs :refer-macros) [go-catching <?]]))
(defn env []
(let [secret-key (util/env! "AWS_SECRET_ACCESS_KEY")
token (util/env! "AWS_SESSION_TOKEN")]
(when (not-empty secret-key)
(cond->
{:access-key (util/env! "AWS_ACCESS_KEY_ID")
:secret-key secret-key}
token (assoc :token token)))))
(defmulti creds->credentials
"Unfortunately-named mechanism to turn the informally-specified 'creds' map
supplied by the user into a map with :access-key, :secret-key, :token members,
suitable for signing requests, etc. To support more exotic use-cases, like
mutable/refreshable credentials, we offer this layer of indirection."
:eulalie/type)
(defmethod creds->credentials :default [creds]
(go-catching creds))
(defmethod creds->credentials :mutable [{:keys [current]}]
(go-catching @current))
(defn refresh! [{:keys [current refresh] :as creds}]
(go-catching
The no expiry junk is kind of awkward , but we do n't want ( ) to do any
(reset! current
(-> (refresh)
<?
(update :expiration #(or % ::no-expiry))))
creds))
#? (:clj (def refresh!! (comp g/<?! refresh!)))
(defmethod creds->credentials :expiring
[{:keys [threshold current refresh] :as m} & [msecs-now]]
(go-catching
(let [{:keys [expiration]} @current]
(when (or (nil? expiration)
(<= (- expiration (or msecs-now (platform.time/msecs-now))) threshold))
(<? (refresh! m)))
@current)))
(defn expiring-creds
[refresh-fn & [{:keys [threshold]
:or {threshold (* 60 1000 5)}}]]
{:eulalie/type :expiring
:current (atom nil)
:refresh refresh-fn
:threshold threshold})
(defn iam
([]
(expiring-creds instance-data/default-iam-credentials!))
([role]
(expiring-creds #(instance-data/iam-credentials! role))))
|
56bb40842572bd0eaf63c8d484679df6d2e8aaabca0bba5e18ca1209173b6086 | fossas/fossa-cli | MockDockerEngineApi.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE UndecidableInstances #
module Test.MockDockerEngineApi (
ApiExpectation,
DockerEngineApiMockC,
MockApi (..),
MockApiC (runMockApiC),
alwaysReturns,
assertAllSatisfied,
fails,
runMockApi,
runApiWithMock,
) where
import Control.Algebra (Algebra (..), Has, send, type (:+:) (..))
import Control.Carrier.Simple (SimpleC, interpret)
import Control.Carrier.State.Strict (StateC (StateC), evalState)
import Control.Effect.Diagnostics (Diagnostics, fatalText)
import Control.Effect.DockerEngineApi (DockerEngineApiF (ExportImage, GetImageSize, IsDockerEngineAccessible))
import Control.Effect.Lift (Lift, sendIO)
import Control.Effect.State (State, get, modify, put)
import Control.Monad (guard)
import Control.Monad.Trans (MonadIO)
import Data.Kind (Type)
import Data.List (intercalate)
import Data.Text (Text)
import Test.HUnit (assertFailure)
type DockerEngineApiMockC = SimpleC DockerEngineApiF
data MockApi (m :: Type -> Type) a where
MockApiAlways :: DockerEngineApiF a -> a -> MockApi m ()
MockApiFails :: DockerEngineApiF a -> Text -> MockApi m ()
MockApiRunExpectations :: DockerEngineApiF a -> MockApi m (Maybe (ApiResult a))
AssertUnexpectedCall :: DockerEngineApiF a -> MockApi m a
AssertAllSatisfied :: MockApi m ()
data ExpectationRepetition
= Once
| Always
deriving (Eq, Ord, Show)
data ExpectationRequestType
= ExpectingExactRequest
| ExpectingAnyRequest
deriving (Eq, Ord, Show)
newtype ApiResult a = ApiResult (Either ApiFail a)
newtype ApiFail = ApiFail {unApiFail :: Text}
-- | An expectation of an API call made up of the request and response.
data ApiExpectation where
ApiExpectation :: ExpectationRepetition -> ExpectationRequestType -> DockerEngineApiF a -> ApiResult a -> ApiExpectation
alwaysReturns :: Has MockApi sig m => DockerEngineApiF a -> a -> m ()
alwaysReturns req resp = send $ MockApiAlways req resp
fails :: Has MockApi sig m => DockerEngineApiF a -> Text -> m ()
fails req msg = send $ MockApiFails req msg
assertAllSatisfied :: Has MockApi sig m => m ()
assertAllSatisfied = send AssertAllSatisfied
assertUnexpectedCall :: Has MockApi sig m => DockerEngineApiF a -> m a
assertUnexpectedCall = send . AssertUnexpectedCall
runExpectations :: Has MockApi sig m => DockerEngineApiF a -> m (Maybe (ApiResult a))
runExpectations = send . MockApiRunExpectations
newtype MockApiC m a = MockApiC
{ runMockApiC :: StateC [ApiExpectation] m a
}
deriving (Functor, Applicative, Monad, MonadIO)
instance (Algebra sig m, Has (Lift IO) sig m) => Algebra (MockApi :+: sig) (MockApiC m) where
alg hdl sig ctx = MockApiC $ case sig of
L (MockApiAlways req resp) -> do
let expectation = ApiExpectation Always ExpectingExactRequest req (ApiResult (Right resp))
modify (++ [expectation])
pure ctx
L (MockApiFails req msg) -> do
let expectation = ApiExpectation Once ExpectingExactRequest req (ApiResult (Left (ApiFail msg)))
modify (++ [expectation])
pure ctx
L (MockApiRunExpectations req) -> do
(<$ ctx) <$> handleRequest req
L (AssertUnexpectedCall req) -> do
expectations <- get
a <-
sendIO . assertFailure $
"Unexpected call: \n "
<> show req
<> "\n"
<> "Unsatisfied expectations: \n "
<> intercalate "\n " (map (\(ApiExpectation _ _ expectedReq _) -> show expectedReq) expectations)
pure (a <$ ctx)
L AssertAllSatisfied -> do
remainingExpectations <- get
let unsatisfiedSingleExpectations = filter isSingular remainingExpectations
if null unsatisfiedSingleExpectations
then pure ctx
else
sendIO . assertFailure $
"Test completed with unsatisfied expectations: \n "
<> intercalate "\n " (map (\(ApiExpectation _ _ req _) -> show req) unsatisfiedSingleExpectations)
R other -> alg (runMockApiC . hdl) (R other) ctx
isSingular :: ApiExpectation -> Bool
isSingular (ApiExpectation Once _ _ _) = True
isSingular _ = False
checkResult :: ExpectationRequestType -> DockerEngineApiF a -> DockerEngineApiF a -> ApiResult a -> Maybe (ApiResult a)
checkResult ExpectingExactRequest a b resp = resp <$ guard (a == b)
checkResult ExpectingAnyRequest _ _ resp = pure resp
matchExpectation :: DockerEngineApiF a -> ApiExpectation -> Maybe (ApiResult a)
matchExpectation a@(ExportImage{}) (ApiExpectation _ requestExpectation b@(ExportImage{}) resp) = checkResult requestExpectation a b resp
matchExpectation a@(GetImageSize{}) (ApiExpectation _ requestExpectation b@(GetImageSize{}) resp) = checkResult requestExpectation a b resp
matchExpectation a@(IsDockerEngineAccessible{}) (ApiExpectation _ requestExpectation b@(IsDockerEngineAccessible{}) resp) = checkResult requestExpectation a b resp
matchExpectation _ _ = Nothing
handleRequest ::
( Has (State [ApiExpectation]) sig m
) =>
forall a.
DockerEngineApiF a ->
m (Maybe (ApiResult a))
handleRequest req = do
expectations <- get
case testExpectations req expectations of
Just (resp, expectations') -> do
put expectations'
pure (Just resp)
Nothing ->
pure Nothing
testExpectations :: DockerEngineApiF a -> [ApiExpectation] -> Maybe (ApiResult a, [ApiExpectation])
testExpectations _ [] = Nothing
testExpectations req (expectation : rest) =
case matchExpectation req expectation of
Nothing -> fmap (expectation :) <$> testExpectations req rest
Just resp ->
if isSingular expectation
then Just (resp, rest)
else Just (resp, expectation : rest)
runApiWithMock ::
( Has (Lift IO) sig m
, Has Diagnostics sig m
, Has MockApi sig m
) =>
DockerEngineApiMockC m a ->
m a
runApiWithMock f = do
result <- interpret runRequest f
assertAllSatisfied
pure result
where
runRequest ::
( Has Diagnostics sig m
, Has MockApi sig m
) =>
DockerEngineApiF a ->
m a
runRequest req = do
apiResult <- runExpectations req
case apiResult of
Just (ApiResult result) -> either (fatalText . unApiFail) pure result
Nothing ->
assertUnexpectedCall req
runMockApi ::
( Has (Lift IO) sig m
) =>
MockApiC m a ->
m a
runMockApi =
evalState [] . runMockApiC
| null | https://raw.githubusercontent.com/fossas/fossa-cli/6603f238a34198f8c2b9825b69dd585a58331300/test/Test/MockDockerEngineApi.hs | haskell | # LANGUAGE GADTs #
| An expectation of an API call made up of the request and response. | # LANGUAGE UndecidableInstances #
module Test.MockDockerEngineApi (
ApiExpectation,
DockerEngineApiMockC,
MockApi (..),
MockApiC (runMockApiC),
alwaysReturns,
assertAllSatisfied,
fails,
runMockApi,
runApiWithMock,
) where
import Control.Algebra (Algebra (..), Has, send, type (:+:) (..))
import Control.Carrier.Simple (SimpleC, interpret)
import Control.Carrier.State.Strict (StateC (StateC), evalState)
import Control.Effect.Diagnostics (Diagnostics, fatalText)
import Control.Effect.DockerEngineApi (DockerEngineApiF (ExportImage, GetImageSize, IsDockerEngineAccessible))
import Control.Effect.Lift (Lift, sendIO)
import Control.Effect.State (State, get, modify, put)
import Control.Monad (guard)
import Control.Monad.Trans (MonadIO)
import Data.Kind (Type)
import Data.List (intercalate)
import Data.Text (Text)
import Test.HUnit (assertFailure)
type DockerEngineApiMockC = SimpleC DockerEngineApiF
data MockApi (m :: Type -> Type) a where
MockApiAlways :: DockerEngineApiF a -> a -> MockApi m ()
MockApiFails :: DockerEngineApiF a -> Text -> MockApi m ()
MockApiRunExpectations :: DockerEngineApiF a -> MockApi m (Maybe (ApiResult a))
AssertUnexpectedCall :: DockerEngineApiF a -> MockApi m a
AssertAllSatisfied :: MockApi m ()
data ExpectationRepetition
= Once
| Always
deriving (Eq, Ord, Show)
data ExpectationRequestType
= ExpectingExactRequest
| ExpectingAnyRequest
deriving (Eq, Ord, Show)
newtype ApiResult a = ApiResult (Either ApiFail a)
newtype ApiFail = ApiFail {unApiFail :: Text}
data ApiExpectation where
ApiExpectation :: ExpectationRepetition -> ExpectationRequestType -> DockerEngineApiF a -> ApiResult a -> ApiExpectation
alwaysReturns :: Has MockApi sig m => DockerEngineApiF a -> a -> m ()
alwaysReturns req resp = send $ MockApiAlways req resp
fails :: Has MockApi sig m => DockerEngineApiF a -> Text -> m ()
fails req msg = send $ MockApiFails req msg
assertAllSatisfied :: Has MockApi sig m => m ()
assertAllSatisfied = send AssertAllSatisfied
assertUnexpectedCall :: Has MockApi sig m => DockerEngineApiF a -> m a
assertUnexpectedCall = send . AssertUnexpectedCall
runExpectations :: Has MockApi sig m => DockerEngineApiF a -> m (Maybe (ApiResult a))
runExpectations = send . MockApiRunExpectations
newtype MockApiC m a = MockApiC
{ runMockApiC :: StateC [ApiExpectation] m a
}
deriving (Functor, Applicative, Monad, MonadIO)
instance (Algebra sig m, Has (Lift IO) sig m) => Algebra (MockApi :+: sig) (MockApiC m) where
alg hdl sig ctx = MockApiC $ case sig of
L (MockApiAlways req resp) -> do
let expectation = ApiExpectation Always ExpectingExactRequest req (ApiResult (Right resp))
modify (++ [expectation])
pure ctx
L (MockApiFails req msg) -> do
let expectation = ApiExpectation Once ExpectingExactRequest req (ApiResult (Left (ApiFail msg)))
modify (++ [expectation])
pure ctx
L (MockApiRunExpectations req) -> do
(<$ ctx) <$> handleRequest req
L (AssertUnexpectedCall req) -> do
expectations <- get
a <-
sendIO . assertFailure $
"Unexpected call: \n "
<> show req
<> "\n"
<> "Unsatisfied expectations: \n "
<> intercalate "\n " (map (\(ApiExpectation _ _ expectedReq _) -> show expectedReq) expectations)
pure (a <$ ctx)
L AssertAllSatisfied -> do
remainingExpectations <- get
let unsatisfiedSingleExpectations = filter isSingular remainingExpectations
if null unsatisfiedSingleExpectations
then pure ctx
else
sendIO . assertFailure $
"Test completed with unsatisfied expectations: \n "
<> intercalate "\n " (map (\(ApiExpectation _ _ req _) -> show req) unsatisfiedSingleExpectations)
R other -> alg (runMockApiC . hdl) (R other) ctx
isSingular :: ApiExpectation -> Bool
isSingular (ApiExpectation Once _ _ _) = True
isSingular _ = False
checkResult :: ExpectationRequestType -> DockerEngineApiF a -> DockerEngineApiF a -> ApiResult a -> Maybe (ApiResult a)
checkResult ExpectingExactRequest a b resp = resp <$ guard (a == b)
checkResult ExpectingAnyRequest _ _ resp = pure resp
matchExpectation :: DockerEngineApiF a -> ApiExpectation -> Maybe (ApiResult a)
matchExpectation a@(ExportImage{}) (ApiExpectation _ requestExpectation b@(ExportImage{}) resp) = checkResult requestExpectation a b resp
matchExpectation a@(GetImageSize{}) (ApiExpectation _ requestExpectation b@(GetImageSize{}) resp) = checkResult requestExpectation a b resp
matchExpectation a@(IsDockerEngineAccessible{}) (ApiExpectation _ requestExpectation b@(IsDockerEngineAccessible{}) resp) = checkResult requestExpectation a b resp
matchExpectation _ _ = Nothing
handleRequest ::
( Has (State [ApiExpectation]) sig m
) =>
forall a.
DockerEngineApiF a ->
m (Maybe (ApiResult a))
handleRequest req = do
expectations <- get
case testExpectations req expectations of
Just (resp, expectations') -> do
put expectations'
pure (Just resp)
Nothing ->
pure Nothing
testExpectations :: DockerEngineApiF a -> [ApiExpectation] -> Maybe (ApiResult a, [ApiExpectation])
testExpectations _ [] = Nothing
testExpectations req (expectation : rest) =
case matchExpectation req expectation of
Nothing -> fmap (expectation :) <$> testExpectations req rest
Just resp ->
if isSingular expectation
then Just (resp, rest)
else Just (resp, expectation : rest)
runApiWithMock ::
( Has (Lift IO) sig m
, Has Diagnostics sig m
, Has MockApi sig m
) =>
DockerEngineApiMockC m a ->
m a
runApiWithMock f = do
result <- interpret runRequest f
assertAllSatisfied
pure result
where
runRequest ::
( Has Diagnostics sig m
, Has MockApi sig m
) =>
DockerEngineApiF a ->
m a
runRequest req = do
apiResult <- runExpectations req
case apiResult of
Just (ApiResult result) -> either (fatalText . unApiFail) pure result
Nothing ->
assertUnexpectedCall req
runMockApi ::
( Has (Lift IO) sig m
) =>
MockApiC m a ->
m a
runMockApi =
evalState [] . runMockApiC
|
0d69a060314170cf5843810acb21842fda0f8adec5b32a2dcd7324c8892d21c8 | static-analysis-engineering/codehawk | cHCilFunDeclarations.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk C Analyzer Parser using CIL
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk C Analyzer Parser using CIL
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
cchcil
open CHCilTypes
val mk_cilfundeclarations: unit -> cilfundeclarations_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/d2e83cef7430defdc4cf30fc1495fe4ff64d9f9d/CodeHawk/CHC/cchcil/cHCilFunDeclarations.mli | ocaml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk C Analyzer Parser using CIL
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk C Analyzer Parser using CIL
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
cchcil
open CHCilTypes
val mk_cilfundeclarations: unit -> cilfundeclarations_int
|
|
3c3fc71abc7febfbd783dfe1d7ad6e338e7dc359a2ab36d42ec57c6e545ced19 | prestancedesign/pingcrm-clojure | top_header.cljs | (ns pingcrm.shared.top-header
(:require ["@inertiajs/inertia-react" :refer [InertiaLink]]
[pingcrm.shared.logo :refer [logo]]
[pingcrm.shared.menu :refer [main-menu]]
[reagent.core :as r]))
(defn top-header []
(let [opened? (r/atom false)
on-click #(reset! opened? false)]
(fn []
[:div {:class "bg-indigo-900 md:flex-shrink-0 md:w-56 px-6 py-4 flex items-center justify-between md:justify-center"}
[:> InertiaLink {:class "mt-1", :href "/"}
[logo {:class "fill-white", :height "28", :width "120"}]]
[:div {:class "relative md:hidden"}
[:svg {:class "w-6 h-6 text-white cursor-pointer fill-current"
:on-click #(reset! opened? true)
:xmlns ""
:view-box "0 0 20 20"}
[:path {:d "M0 3h20v2H0V3zm0 6h20v2H0V9zm0 6h20v2H0v-2z"}]]
[:div {:class (str "absolute right-0 z-20" (when-not @opened? " hidden"))
:on-click on-click}
[:f> main-menu {:class "relative z-20 px-8 py-4 pb-2 mt-2 bg-indigo-800 rounded shadow-lg"}]
[:div {:on-click on-click
:class "fixed inset-0 z-10 bg-black opacity-25"}]]]])))
| null | https://raw.githubusercontent.com/prestancedesign/pingcrm-clojure/12f938f81a4b4010e8b66e87634fe631152cb18d/src/cljs/pingcrm/shared/top_header.cljs | clojure | (ns pingcrm.shared.top-header
(:require ["@inertiajs/inertia-react" :refer [InertiaLink]]
[pingcrm.shared.logo :refer [logo]]
[pingcrm.shared.menu :refer [main-menu]]
[reagent.core :as r]))
(defn top-header []
(let [opened? (r/atom false)
on-click #(reset! opened? false)]
(fn []
[:div {:class "bg-indigo-900 md:flex-shrink-0 md:w-56 px-6 py-4 flex items-center justify-between md:justify-center"}
[:> InertiaLink {:class "mt-1", :href "/"}
[logo {:class "fill-white", :height "28", :width "120"}]]
[:div {:class "relative md:hidden"}
[:svg {:class "w-6 h-6 text-white cursor-pointer fill-current"
:on-click #(reset! opened? true)
:xmlns ""
:view-box "0 0 20 20"}
[:path {:d "M0 3h20v2H0V3zm0 6h20v2H0V9zm0 6h20v2H0v-2z"}]]
[:div {:class (str "absolute right-0 z-20" (when-not @opened? " hidden"))
:on-click on-click}
[:f> main-menu {:class "relative z-20 px-8 py-4 pb-2 mt-2 bg-indigo-800 rounded shadow-lg"}]
[:div {:on-click on-click
:class "fixed inset-0 z-10 bg-black opacity-25"}]]]])))
|
|
e4dd8d9204dbd1c1d6df352efea7e04d89e176295fb3769330046bb6fc0c36fe | rescript-lang/rescript-compiler | ast_uncurry_gen.mli | Copyright ( C ) 2020- Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val to_uncurry_fn :
Parsetree.expression ->
Bs_ast_mapper.mapper ->
Asttypes.arg_label ->
Parsetree.pattern ->
Parsetree.expression ->
bool -> (* async *)
Parsetree.expression
*
[ function ] can only take one argument , that is the reason we did not adopt it
syntax :
{ [ fun [ @bs ] pat pat1- > body ] }
[ to_uncurry_fn ( fun pat - > ( fun pat1 - > ... body ) ) ]
[function] can only take one argument, that is the reason we did not adopt it
syntax:
{[ fun [@bs] pat pat1-> body ]}
[to_uncurry_fn (fun pat -> (fun pat1 -> ... body))]
*)
val to_method_callback :
Location.t ->
Bs_ast_mapper.mapper ->
Asttypes.arg_label ->
Parsetree.pattern ->
Parsetree.expression ->
Parsetree.expression_desc
* syntax :
{ [ fun [ @bs.this ] obj pat pat1 - > body ] }
{[fun [@bs.this] obj pat pat1 -> body]}
*)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/eb07cb50b6e6ba2bf26ce667d4e3c638a24b35c4/jscomp/frontend/ast_uncurry_gen.mli | ocaml | async | Copyright ( C ) 2020- Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val to_uncurry_fn :
Parsetree.expression ->
Bs_ast_mapper.mapper ->
Asttypes.arg_label ->
Parsetree.pattern ->
Parsetree.expression ->
Parsetree.expression
*
[ function ] can only take one argument , that is the reason we did not adopt it
syntax :
{ [ fun [ @bs ] pat pat1- > body ] }
[ to_uncurry_fn ( fun pat - > ( fun pat1 - > ... body ) ) ]
[function] can only take one argument, that is the reason we did not adopt it
syntax:
{[ fun [@bs] pat pat1-> body ]}
[to_uncurry_fn (fun pat -> (fun pat1 -> ... body))]
*)
val to_method_callback :
Location.t ->
Bs_ast_mapper.mapper ->
Asttypes.arg_label ->
Parsetree.pattern ->
Parsetree.expression ->
Parsetree.expression_desc
* syntax :
{ [ fun [ @bs.this ] obj pat pat1 - > body ] }
{[fun [@bs.this] obj pat pat1 -> body]}
*)
|
04d8ff3e9de95c868a4fd4c0f4b525456d903e1975fdc5c65fc56d3671862017 | simoncourtenage/quanthas | Settings.hs |
Copyright ( C ) 2010 , ( )
This file is part of QuantHas , an open - source Haskell implementation
of the QuantLib library for quantitative finance .
is free software : you can redistribute it and/or modify it
under the terms of the QuantHas license . You should have received a
copy of the license along with this program .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the license for more details .
Copyright (C) 2010, Simon Courtenage ()
This file is part of QuantHas, an open-source Haskell implementation
of the QuantLib library for quantitative finance.
Quanthas is free software: you can redistribute it and/or modify it
under the terms of the QuantHas license. You should have received a
copy of the license along with this program.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
-}
module QuantHas.Settings(module QuantHas.Settings) where
import Data.Maybe
import QuantHas.Time.Date
data Settings = Settings {
evalDate :: Date,
includeRefDateEvents :: Bool,
includeTodaysCashFlows :: Maybe Bool,
enforcesTodaysHistoricFixings :: Bool
}
deriving (Show)
defaultSettings:: Settings
defaultSettings = Settings mkNullDate False Nothing False
| null | https://raw.githubusercontent.com/simoncourtenage/quanthas/6e0b2cc9a60bb7d1709f98ed10d09aa6c071c8dd/src/QuantHas/Settings.hs | haskell |
Copyright ( C ) 2010 , ( )
This file is part of QuantHas , an open - source Haskell implementation
of the QuantLib library for quantitative finance .
is free software : you can redistribute it and/or modify it
under the terms of the QuantHas license . You should have received a
copy of the license along with this program .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the license for more details .
Copyright (C) 2010, Simon Courtenage ()
This file is part of QuantHas, an open-source Haskell implementation
of the QuantLib library for quantitative finance.
Quanthas is free software: you can redistribute it and/or modify it
under the terms of the QuantHas license. You should have received a
copy of the license along with this program.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
-}
module QuantHas.Settings(module QuantHas.Settings) where
import Data.Maybe
import QuantHas.Time.Date
data Settings = Settings {
evalDate :: Date,
includeRefDateEvents :: Bool,
includeTodaysCashFlows :: Maybe Bool,
enforcesTodaysHistoricFixings :: Bool
}
deriving (Show)
defaultSettings:: Settings
defaultSettings = Settings mkNullDate False Nothing False
|
|
86b32e5b90b17153a35f2247989d1af8dee55b2d8313d3cf1ee12ad3d24a4f1f | travelping/ergw | proxy_lib_SUITE.erl | Copyright 2018 , Travelping GmbH < >
%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version
2 of the License , or ( at your option ) any later version .
-module(proxy_lib_SUITE).
-compile([export_all, nowarn_export_all]).
-include_lib("common_test/include/ct.hrl").
-include("../include/ergw.hrl").
-include("ergw_test_lib.hrl").
-define('CP-Node', <<"topon.s5s8.pgw.epc.mnc001.mcc001.3gppnetwork.org">>).
-define('SX-Node', <<"topon.sx.prox01.epc.mnc001.mcc001.3gppnetwork.org">>).
-define('CP-IP', {172,20,21,91}).
-define('SX-IP', {172,20,16,91}).
-define(SERVICES, [{'x-3gpp-pgw', 'x-s8-gtp'},
{'x-3gpp-pgw', 'x-s5-gtp'},
{'x-3gpp-pgw', 'x-gp'},
{'x-3gpp-pgw', 'x-gn'}]).
-define(ERGW_NODE_SELECTION,
#{default =>
#{type => static,
entries =>
[
APN NAPTR alternative
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 0,
preference => 0,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.example.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.example.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
%% A/AAAA record alternatives
#{type => host,
name => ?'CP-Node',
ip4 => [?'CP-IP'],
ip6 => []},
#{type => host,
name => ?'SX-Node',
ip4 => [?'SX-IP'],
ip6 => []}
]
}
}).
%%%===================================================================
%%% Common Test callbacks
%%%===================================================================
all() ->
[proxy_lookup].
suite() ->
[{timetrap, {seconds, 30}}].
groups() ->
[].
init_per_suite(Config) ->
Node = [{node_id, <<"node">>}],
{ok, _} = application:ensure_all_started(ergw_core),
ergw_cluster:wait_till_ready(),
ergw_cluster:start([{enabled, false}]),
ergw_cluster:wait_till_running(),
ergw_core:start_node(Node),
ergw_core:wait_till_running(),
ok = ergw_core:setopts(node_selection, ?ERGW_NODE_SELECTION),
Config.
end_per_suite(_Config) ->
[application:stop(App) || App <- [ranch, cowboy, ergw_core, ergw_aaa, ergw_cluster]],
ok.
%%%===================================================================
%%% Test cases
%%%===================================================================
proxy_lookup() ->
[{doc, "lookup from config"}].
proxy_lookup(_Config) ->
NodeSelect = [default],
Socket = #socket{name = <<"TEST">>, type = 'gtp-c'},
PI =
#{imsi => <<"001010000000002">>,
msisdn => <<"444444400008502">>,
apn => apn(<<"web">>),
context => <<"GRX2">>
},
gtp_path_reg:start_link(),
PI1 =
PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>)},
{ok, Proxy1} = ergw_proxy_lib:select_gw(PI1, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy1),
PI2 =
PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>)},
{ok, Proxy2} = ergw_proxy_lib:select_gw(PI2, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy2),
PI4 = PI#{gwSelectionAPN => apn(<<"web">>)},
{ok, Proxy4} = ergw_proxy_lib:select_gw(PI4, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy4),
PI5 = PI#{gwSelectionAPN => apn(<<"web.mnc001.mcc001.gprs">>)},
{ok, Proxy5} = ergw_proxy_lib:select_gw(PI5, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy5),
PI6 = PI#{gwSelectionAPN => apn(<<"web.mnc123.mcc001.gprs">>)},
{ok, Proxy6} = ergw_proxy_lib:select_gw(PI6, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy6),
PI7 = PI#{gwSelectionAPN => apn(<<"web.mnc567.mcc001.gprs">>)},
{error, Proxy7} = ergw_proxy_lib:select_gw(PI7, v1, ?SERVICES, NodeSelect, Socket),
?match(#ctx_err{level = ?FATAL, reply = system_failure}, Proxy7),
PI8 = PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>)},
ok = gtp_path_reg:register({<<"TEST">>, v1, ?'CP-IP'}, down),
{error, Proxy8} = ergw_proxy_lib:select_gw(PI8, v1, ?SERVICES, NodeSelect, Socket),
?match(#ctx_err{level = ?FATAL, reply = no_resources_available}, Proxy8),
ok.
apn(Bin) ->
binary:split(Bin, <<".">>, [global, trim_all]).
| null | https://raw.githubusercontent.com/travelping/ergw/b577328c8bbdc4959b45c321338971ed242bb822/apps/ergw_core/test/proxy_lib_SUITE.erl | erlang | This program is free software; you can redistribute it and/or
A/AAAA record alternatives
===================================================================
Common Test callbacks
===================================================================
===================================================================
Test cases
=================================================================== | Copyright 2018 , Travelping GmbH < >
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version
2 of the License , or ( at your option ) any later version .
-module(proxy_lib_SUITE).
-compile([export_all, nowarn_export_all]).
-include_lib("common_test/include/ct.hrl").
-include("../include/ergw.hrl").
-include("ergw_test_lib.hrl").
-define('CP-Node', <<"topon.s5s8.pgw.epc.mnc001.mcc001.3gppnetwork.org">>).
-define('SX-Node', <<"topon.sx.prox01.epc.mnc001.mcc001.3gppnetwork.org">>).
-define('CP-IP', {172,20,21,91}).
-define('SX-IP', {172,20,16,91}).
-define(SERVICES, [{'x-3gpp-pgw', 'x-s8-gtp'},
{'x-3gpp-pgw', 'x-s5-gtp'},
{'x-3gpp-pgw', 'x-gp'},
{'x-3gpp-pgw', 'x-gn'}]).
-define(ERGW_NODE_SELECTION,
#{default =>
#{type => static,
entries =>
[
APN NAPTR alternative
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 0,
preference => 0,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.example.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-pgw',
protocols => ['x-s5-gtp', 'x-s8-gtp', 'x-gn', 'x-gp'],
replacement => ?'CP-Node'},
#{type => naptr,
name => <<"web.apn.epc.mnc123.mcc001.example.org">>,
order => 300,
preference => 64536,
service => 'x-3gpp-upf',
protocols => ['x-sxa'],
replacement => ?'SX-Node'},
#{type => host,
name => ?'CP-Node',
ip4 => [?'CP-IP'],
ip6 => []},
#{type => host,
name => ?'SX-Node',
ip4 => [?'SX-IP'],
ip6 => []}
]
}
}).
all() ->
[proxy_lookup].
suite() ->
[{timetrap, {seconds, 30}}].
groups() ->
[].
init_per_suite(Config) ->
Node = [{node_id, <<"node">>}],
{ok, _} = application:ensure_all_started(ergw_core),
ergw_cluster:wait_till_ready(),
ergw_cluster:start([{enabled, false}]),
ergw_cluster:wait_till_running(),
ergw_core:start_node(Node),
ergw_core:wait_till_running(),
ok = ergw_core:setopts(node_selection, ?ERGW_NODE_SELECTION),
Config.
end_per_suite(_Config) ->
[application:stop(App) || App <- [ranch, cowboy, ergw_core, ergw_aaa, ergw_cluster]],
ok.
proxy_lookup() ->
[{doc, "lookup from config"}].
proxy_lookup(_Config) ->
NodeSelect = [default],
Socket = #socket{name = <<"TEST">>, type = 'gtp-c'},
PI =
#{imsi => <<"001010000000002">>,
msisdn => <<"444444400008502">>,
apn => apn(<<"web">>),
context => <<"GRX2">>
},
gtp_path_reg:start_link(),
PI1 =
PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>)},
{ok, Proxy1} = ergw_proxy_lib:select_gw(PI1, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy1),
PI2 =
PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc123.mcc001.3gppnetwork.org">>)},
{ok, Proxy2} = ergw_proxy_lib:select_gw(PI2, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy2),
PI4 = PI#{gwSelectionAPN => apn(<<"web">>)},
{ok, Proxy4} = ergw_proxy_lib:select_gw(PI4, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy4),
PI5 = PI#{gwSelectionAPN => apn(<<"web.mnc001.mcc001.gprs">>)},
{ok, Proxy5} = ergw_proxy_lib:select_gw(PI5, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy5),
PI6 = PI#{gwSelectionAPN => apn(<<"web.mnc123.mcc001.gprs">>)},
{ok, Proxy6} = ergw_proxy_lib:select_gw(PI6, v1, ?SERVICES, NodeSelect, Socket),
?match({?'CP-Node', ?'CP-IP'}, Proxy6),
PI7 = PI#{gwSelectionAPN => apn(<<"web.mnc567.mcc001.gprs">>)},
{error, Proxy7} = ergw_proxy_lib:select_gw(PI7, v1, ?SERVICES, NodeSelect, Socket),
?match(#ctx_err{level = ?FATAL, reply = system_failure}, Proxy7),
PI8 = PI#{gwSelectionAPN => apn(<<"web.apn.epc.mnc001.mcc001.3gppnetwork.org">>)},
ok = gtp_path_reg:register({<<"TEST">>, v1, ?'CP-IP'}, down),
{error, Proxy8} = ergw_proxy_lib:select_gw(PI8, v1, ?SERVICES, NodeSelect, Socket),
?match(#ctx_err{level = ?FATAL, reply = no_resources_available}, Proxy8),
ok.
apn(Bin) ->
binary:split(Bin, <<".">>, [global, trim_all]).
|
5236b42f2d5b84baea61182656f12093b169916990e22aed38ba08a90636291e | HaskellForCats/HaskellForCats | reTurnRightAroundIo.hs | ---------------- reTurnRightAroundIo ----------------
-- works but is stylistically poor!
main :: IO ()
main = do
line1 <- getLine
line2 <- getLine
lines <- return (line1 ++ " " ++ line2)
putStrLn lines
| null | https://raw.githubusercontent.com/HaskellForCats/HaskellForCats/2d7a15c0cdaa262c157bbf37af6e72067bc279bc/IO/reTurnRightAroundIo.hs | haskell | -------------- reTurnRightAroundIo ----------------
works but is stylistically poor! |
main :: IO ()
main = do
line1 <- getLine
line2 <- getLine
lines <- return (line1 ++ " " ++ line2)
putStrLn lines
|
fae303de9a712ff71bd1fd2bdccaabcb57acdac367911544d4f6cfbd03421535 | well-typed/large-records | R030.hs | #if PROFILE_CORESIZE
{-# OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #-}
#endif
#if PROFILE_TIMING
{-# OPTIONS_GHC -ddump-to-file -ddump-timings #-}
#endif
{-# OPTIONS_GHC -fplugin=Data.Record.Anon.Plugin #-}
module Experiment.ToJSON.Sized.R030 where
import Data.Aeson (Value)
import Data.Record.Generic.JSON (gtoJSON)
import Common.RowOfSize.Row030
import Data.Record.Anon.Simple (Record)
recToJSON :: Record ExampleRow -> Value
recToJSON = gtoJSON | null | https://raw.githubusercontent.com/well-typed/large-records/78d0966e4871847e2c17a0aa821bacf38bdf96bc/large-records-benchmarks/bench/large-anon/Experiment/ToJSON/Sized/R030.hs | haskell | # OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #
# OPTIONS_GHC -ddump-to-file -ddump-timings #
# OPTIONS_GHC -fplugin=Data.Record.Anon.Plugin # | #if PROFILE_CORESIZE
#endif
#if PROFILE_TIMING
#endif
module Experiment.ToJSON.Sized.R030 where
import Data.Aeson (Value)
import Data.Record.Generic.JSON (gtoJSON)
import Common.RowOfSize.Row030
import Data.Record.Anon.Simple (Record)
recToJSON :: Record ExampleRow -> Value
recToJSON = gtoJSON |
e47813d13133de875fde5858b7ef149328656296371019d2f5a5fb7e0ccb516d | patoline/patoline | test_offset.ml |
Copyright Florian Hatat , , ,
Pierre - Etienne Meunier , , 2012 .
This file is part of Patoline .
Patoline is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Patoline is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Patoline . If not , see < / > .
Copyright Florian Hatat, Tom Hirschowitz, Pierre Hyvernat,
Pierre-Etienne Meunier, Christophe Raffalli, Guillaume Theyssier 2012.
This file is part of Patoline.
Patoline is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Patoline is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Patoline. If not, see </>.
*)
open Patoraw
open Offset
let to_point x=int_of_float (x*.50.)
let draw fx fy=
let n=1000 in
let fx2=rev fx in
let fy2=rev fy in
for i=0 to n do
let (x0,y0),_=eval fx fy (float_of_int i/.float_of_int n) in
let (x2,y2),_=eval fx2 fy2 (float_of_int i/.float_of_int n) in
Graphics.plot (to_point x0) (to_point y0);
Graphics.plot (to_point x2) (to_point y2)
done
let draw_bezier (fx,fy)=
let n=1000 in
for i=0 to n do
let x=Bezier.eval fx (float_of_int i/.float_of_int n) in
let y=Bezier.eval fy (float_of_int i/.float_of_int n) in
Graphics.plot (to_point x) (to_point y);
done
let _=
Random.init 200;
let x=ref
(if Array.length Sys.argv>1 then (
for _ = 1 to int_of_string Sys.argv.(1) do
let _=example () in ()
done;
int_of_string Sys.argv.(1))
else 0)
in
while true do
let ex,ey=example () in
Graphics.open_graph "";
Graphics.clear_graph ();
Graphics.set_color Graphics.black;
draw ex ey;
draw_bezier (ex, ey);
Graphics.set_color Graphics.red;
let col=ref false in
List.iter (fun (x,y)->
if !col then Graphics.set_color Graphics.red else
Graphics.set_color Graphics.blue;
col:= not !col;
draw_bezier (x,y))
(approx ex ey);
List.iter (fun (x,y)->
if !col then Graphics.set_color Graphics.red else
Graphics.set_color Graphics.blue;
col:= not !col;
draw_bezier (x,y))
(approx (rev ex) (rev ey));
Printf.printf "%d\n" !x;
flush stdout;
let _=Graphics.wait_next_event [Graphics.Key_pressed] in incr x
done
| null | https://raw.githubusercontent.com/patoline/patoline/3dcd41fdff64895d795d4a78baa27d572b161081/typography/test_offset.ml | ocaml |
Copyright Florian Hatat , , ,
Pierre - Etienne Meunier , , 2012 .
This file is part of Patoline .
Patoline is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Patoline is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Patoline . If not , see < / > .
Copyright Florian Hatat, Tom Hirschowitz, Pierre Hyvernat,
Pierre-Etienne Meunier, Christophe Raffalli, Guillaume Theyssier 2012.
This file is part of Patoline.
Patoline is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Patoline is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Patoline. If not, see </>.
*)
open Patoraw
open Offset
let to_point x=int_of_float (x*.50.)
let draw fx fy=
let n=1000 in
let fx2=rev fx in
let fy2=rev fy in
for i=0 to n do
let (x0,y0),_=eval fx fy (float_of_int i/.float_of_int n) in
let (x2,y2),_=eval fx2 fy2 (float_of_int i/.float_of_int n) in
Graphics.plot (to_point x0) (to_point y0);
Graphics.plot (to_point x2) (to_point y2)
done
let draw_bezier (fx,fy)=
let n=1000 in
for i=0 to n do
let x=Bezier.eval fx (float_of_int i/.float_of_int n) in
let y=Bezier.eval fy (float_of_int i/.float_of_int n) in
Graphics.plot (to_point x) (to_point y);
done
let _=
Random.init 200;
let x=ref
(if Array.length Sys.argv>1 then (
for _ = 1 to int_of_string Sys.argv.(1) do
let _=example () in ()
done;
int_of_string Sys.argv.(1))
else 0)
in
while true do
let ex,ey=example () in
Graphics.open_graph "";
Graphics.clear_graph ();
Graphics.set_color Graphics.black;
draw ex ey;
draw_bezier (ex, ey);
Graphics.set_color Graphics.red;
let col=ref false in
List.iter (fun (x,y)->
if !col then Graphics.set_color Graphics.red else
Graphics.set_color Graphics.blue;
col:= not !col;
draw_bezier (x,y))
(approx ex ey);
List.iter (fun (x,y)->
if !col then Graphics.set_color Graphics.red else
Graphics.set_color Graphics.blue;
col:= not !col;
draw_bezier (x,y))
(approx (rev ex) (rev ey));
Printf.printf "%d\n" !x;
flush stdout;
let _=Graphics.wait_next_event [Graphics.Key_pressed] in incr x
done
|
|
6b6b55b033ac3c2e7012e8b001fc777ecc55197e80c8301a4f9be7c20e9a91d1 | sonowz/advent-of-code-haskell | Day06.hs | import Control.Monad
import Data.Function
import Data.List
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as Map
type Point = (Int, Int)
type Border = (Int, Int, Int, Int)
_x = fst
_y = snd
manhattanD (x1, y1) (x2, y2) = (abs $ x1 - x2) + (abs $ y1 - y2)
More than 2 nearest point = = Nothing
nearestPoint :: [Point] -> Point -> Maybe Point
nearestPoint points x = if minCount == 1 then find ((/=) nullPoint) points' else Nothing where
distances = map (manhattanD x) points
minD = minimum distances
minCount = length $ filter ((==) minD) distances
nullPoint = (maxBound :: Int, maxBound :: Int)
points' = zipWith (\p d -> if d == minD then p else nullPoint) points distances
getBorder :: [Point] -> Border
getBorder points = (l, b, r, t) where
xs = _x `map` points
ys = _y `map` points
l = minimum xs
r = maximum xs
b = minimum ys
t = maximum ys
-- 3x size of original border
getOuterBorder (l, b, r, t) = (l - w, b - h, r + w, t + h) where
w = r - l
h = t - b
getGrid :: Border -> [Point]
getGrid (l, b, r, t) = [(x, y) | x <- [l..r], y <- [b..t]]
getInfPoints :: Border -> [Point] -> Set Point
getInfPoints (l, b, r, t) points = infPoints where
borderLine = [(x, y) | x <- [l, r], y <- [b..t]] ++ [(x, y) | x <- [l..r], y <- [b, t]]
insertNearest s bp = maybe s (\np -> Set.insert np s) (nearestPoint points bp)
infPoints = foldl insertNearest Set.empty borderLine
solve1 :: [Point] -> Int
solve1 points = maximum $ map snd (Map.toList nonInfMap) where
border = getBorder points
grid = getGrid border
Point - > NearestCount
addNearest m p = maybe m (\np -> Map.update (\c -> Just (c+1)) np m) (nearestPoint points p)
pointMap' = foldl addNearest pointMap grid
infPoints = getInfPoints (getOuterBorder border) points
nonInfMap = Set.foldl (flip Map.delete) pointMap' infPoints
solve2 :: [Point] -> Int
solve2 points = length $ filter (\x -> x < 10000) sumGrid where
grid = getGrid $ getBorder points
manhattanSum points p = sum $ map (manhattanD p) points
sumGrid = map (manhattanSum points) grid
getPoint :: String -> Point
getPoint line = (read $ ints !! 0, read $ ints !! 1) where
ints = words $ delete ',' line
main' = do
points <- map getPoint <$> replicateM 50 getLine
putStrLn $ show $ solve1 points
putStrLn $ show $ solve2 points
| null | https://raw.githubusercontent.com/sonowz/advent-of-code-haskell/6cec825c5172bbec687aab510e43832e6f2c0372/Y2018/Day06.hs | haskell | 3x size of original border | import Control.Monad
import Data.Function
import Data.List
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as Map
type Point = (Int, Int)
type Border = (Int, Int, Int, Int)
_x = fst
_y = snd
manhattanD (x1, y1) (x2, y2) = (abs $ x1 - x2) + (abs $ y1 - y2)
More than 2 nearest point = = Nothing
nearestPoint :: [Point] -> Point -> Maybe Point
nearestPoint points x = if minCount == 1 then find ((/=) nullPoint) points' else Nothing where
distances = map (manhattanD x) points
minD = minimum distances
minCount = length $ filter ((==) minD) distances
nullPoint = (maxBound :: Int, maxBound :: Int)
points' = zipWith (\p d -> if d == minD then p else nullPoint) points distances
getBorder :: [Point] -> Border
getBorder points = (l, b, r, t) where
xs = _x `map` points
ys = _y `map` points
l = minimum xs
r = maximum xs
b = minimum ys
t = maximum ys
getOuterBorder (l, b, r, t) = (l - w, b - h, r + w, t + h) where
w = r - l
h = t - b
getGrid :: Border -> [Point]
getGrid (l, b, r, t) = [(x, y) | x <- [l..r], y <- [b..t]]
getInfPoints :: Border -> [Point] -> Set Point
getInfPoints (l, b, r, t) points = infPoints where
borderLine = [(x, y) | x <- [l, r], y <- [b..t]] ++ [(x, y) | x <- [l..r], y <- [b, t]]
insertNearest s bp = maybe s (\np -> Set.insert np s) (nearestPoint points bp)
infPoints = foldl insertNearest Set.empty borderLine
solve1 :: [Point] -> Int
solve1 points = maximum $ map snd (Map.toList nonInfMap) where
border = getBorder points
grid = getGrid border
Point - > NearestCount
addNearest m p = maybe m (\np -> Map.update (\c -> Just (c+1)) np m) (nearestPoint points p)
pointMap' = foldl addNearest pointMap grid
infPoints = getInfPoints (getOuterBorder border) points
nonInfMap = Set.foldl (flip Map.delete) pointMap' infPoints
solve2 :: [Point] -> Int
solve2 points = length $ filter (\x -> x < 10000) sumGrid where
grid = getGrid $ getBorder points
manhattanSum points p = sum $ map (manhattanD p) points
sumGrid = map (manhattanSum points) grid
getPoint :: String -> Point
getPoint line = (read $ ints !! 0, read $ ints !! 1) where
ints = words $ delete ',' line
main' = do
points <- map getPoint <$> replicateM 50 getLine
putStrLn $ show $ solve1 points
putStrLn $ show $ solve2 points
|
99652e413867b84403c8a0895a18303cd1b74a0e92079e7331444e5d14e56377 | parsifal-47/socketpool | tcp_echo.erl | -module(tcp_echo).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _Args) ->
{ok, erlang:spawn(socketpool, start_listener,
[5555, 100, fun(_State, Bin) -> {ok, Bin} end, fun() -> ok end])}.
stop(_State) -> ok. | null | https://raw.githubusercontent.com/parsifal-47/socketpool/f4f2130b61ad902dc0c619d50e0a102222e5206b/examples/tcp_echo/src/tcp_echo.erl | erlang | -module(tcp_echo).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _Args) ->
{ok, erlang:spawn(socketpool, start_listener,
[5555, 100, fun(_State, Bin) -> {ok, Bin} end, fun() -> ok end])}.
stop(_State) -> ok. |
|
5d35d8b9d3167f23fc13014f395a9a00b5348b5929ab7e5996080fdb195637e6 | macourtney/Dark-Exchange | trade.clj | (ns darkexchange.model.trade
(:require [clj-record.boot :as clj-record-boot]
[clojure.contrib.logging :as logging]
[darkexchange.model.identity :as identity-model]
[darkexchange.model.offer :as offer]
[darkexchange.model.terms :as terms]
[darkexchange.model.trade-message :as trade-message]
[darkexchange.model.user :as user])
(:use darkexchange.model.base)
(:import [java.util Date]))
(declare get-record)
(def needs-to-be-confirmed-key :needs-to-be-confirmed)
(def waiting-to-be-confirmed-key :waiting-to-be-confirmed)
(def rejected-key :rejected)
(def waiting-for-wants-key :waiting-for-wants)
(def send-wants-receipt-key :send-wants-receipt)
(def send-has-key :send-has)
(def waiting-for-has-receipt-key :waiting-for-has-receipt)
(def trade-add-listeners (atom []))
(def update-trade-listeners (atom []))
(def delete-trade-listeners (atom []))
(defn add-trade-add-listener [listener]
(swap! trade-add-listeners conj listener))
(defn add-update-trade-listener [listener]
(swap! update-trade-listeners conj listener))
(defn add-delete-trade-listener [listener]
(swap! delete-trade-listeners conj listener))
(defn trade-add [new-trade]
(doseq [listener @trade-add-listeners]
(listener new-trade)))
(defn trade-updated [trade]
(let [trade (get-record (:id trade))]
(doseq [listener @update-trade-listeners]
(listener trade))))
(defn trade-deleted [trade]
(doseq [listener @delete-trade-listeners]
(listener trade)))
(clj-record.core/init-model
(:associations (belongs-to identity)
(belongs-to offer)
(belongs-to user)
(has-many trade-messages))
(:callbacks (:after-insert trade-add)
(:after-update trade-updated)
(:after-destroy trade-deleted)))
(defn create-new-trade [trade-data]
(insert
(merge { :created_at (new Date) :user_id (:id (user/current-user)) }
(select-keys trade-data [:foreign_trade_id :identity_id :is_acceptor :offer_id :wants_first :updated]))))
(defn create-non-acceptor-trade [acceptor-user-name acceptor-public-key acceptor-public-key-algorithm offer foreign-trade-id]
(when-let [acceptor-identity (identity-model/find-identity acceptor-user-name acceptor-public-key
acceptor-public-key-algorithm)]
(create-new-trade
{ :offer_id (:id offer)
:wants_first 1
:identity_id (:id acceptor-identity)
:is_acceptor 0
:foreign_trade_id foreign-trade-id
:updated 1 })))
(defn create-acceptor-trade [other-identity offer-id]
(create-new-trade
{ :offer_id offer-id
:wants_first 0
:identity_id (:id other-identity)
:is_acceptor 1
:updated 1 }))
(defn set-foreign-trade-id [trade-id foreign-trade-id]
(update { :id trade-id :foreign_trade_id foreign-trade-id }))
(defn open-trades
([] (open-trades (user/current-user)))
([user] (find-records ["(closed IS NULL OR closed = 0) AND user_id = ?" (:id user)])))
(defn open-trade? [trade]
(not (as-boolean (:closed trade))))
(defn does-not-go-first? [trade]
(as-boolean (:wants_first trade)))
(defn goes-first? [trade]
(not (does-not-go-first? trade)))
(defn needs-to-be-confirmed? [trade]
(and (not (as-boolean (:accept_confirm trade))) (not (as-boolean (:is_acceptor trade)))))
(defn waiting-to-be-confirmed? [trade]
(and (not (as-boolean (:accept_confirm trade))) (as-boolean (:is_acceptor trade))))
(defn rejected? [trade]
(as-boolean (:accept_rejected trade)))
(defn wants-sent? [trade]
(as-boolean (:wants_sent trade)))
(defn wants-received? [trade]
(as-boolean (:wants_received trade)))
(defn has-sent? [trade]
(as-boolean (:has_sent trade)))
(defn has-received? [trade]
(as-boolean (:has_received trade)))
(defn rejected-next-step-key [trade]
(when (rejected? trade)
rejected-key))
(defn needs-to-be-confirmed-next-step-key [trade]
(when (needs-to-be-confirmed? trade)
needs-to-be-confirmed-key))
(defn waiting-to-be-confirmed-next-step-key [trade]
(when (waiting-to-be-confirmed? trade)
waiting-to-be-confirmed-key))
(defn confirmation-next-step-key [trade]
(or (rejected-next-step-key trade) (needs-to-be-confirmed-next-step-key trade)
(waiting-to-be-confirmed-next-step-key trade)))
(defn waiting-for-wants-next-step-key [trade]
(when (not (wants-sent? trade))
waiting-for-wants-key))
(defn wants-received-next-step-key [trade]
(when (not (wants-received? trade))
send-wants-receipt-key))
(defn wants-next-step-key [trade]
(or (waiting-for-wants-next-step-key trade) (wants-received-next-step-key trade)))
(defn has-sent-next-step-key [trade]
(when (not (has-sent? trade))
send-has-key))
(defn has-received-next-step-key [trade]
(when (not (has-received? trade))
waiting-for-has-receipt-key))
(defn has-next-step-key [trade]
(or (has-sent-next-step-key trade) (has-received-next-step-key trade)))
(defn has-want-next-step-key [trade]
(if (goes-first? trade)
(or (has-next-step-key trade) (wants-next-step-key trade))
(or (wants-next-step-key trade) (has-next-step-key trade))))
(defn next-step-key [trade]
(or (confirmation-next-step-key trade) (has-want-next-step-key trade)))
(defn waiting-for-key-to-text [waiting-for-key]
(cond
(= waiting-for-key rejected-key) (terms/rejected)
(= waiting-for-key needs-to-be-confirmed-key) (terms/needs-to-be-confirmed)
(= waiting-for-key waiting-to-be-confirmed-key) (terms/waiting-to-be-confirmed)
(= waiting-for-key waiting-for-wants-key) (terms/waiting-for-payment-to-be-sent)
(= waiting-for-key send-wants-receipt-key) (terms/confirm-payment-received)
(= waiting-for-key send-has-key) (terms/send-payment)
(= waiting-for-key waiting-for-has-receipt-key) (terms/waiting-for-payment-to-be-confirmed)))
(defn next-step-text [trade]
(waiting-for-key-to-text (next-step-key trade)))
(defn has-unseen-message? [trade]
(trade-message/has-unseen-message? (:id trade)))
(defn convert-to-table-trade [trade]
(let [offer (find-offer trade)]
{ :id (:id trade)
:im-sending-amount (offer/has-amount-str offer)
:im-sending-by (offer/has-payment-type-str offer)
:im-receiving-amount (offer/wants-amount-str offer)
:im-receiving-by (offer/wants-payment-type-str offer)
:waiting-for (next-step-text trade)
:original-trade trade
:unseen-message? (if (has-unseen-message? trade) true false) }))
(defn table-open-trades []
(map convert-to-table-trade (open-trades)))
(defn as-view-trade [trade-id]
(when trade-id
(let [trade (get-record trade-id)]
(merge trade { :offer (find-offer trade) :identity (find-identity trade) }))))
(defn find-trade [foreign-trade-id trade-partner-identity]
(find-record { :foreign_trade_id foreign-trade-id :identity_id (:id trade-partner-identity) }))
(defn confirm-trade
([foreign-trade-id trade-partner-identity] (confirm-trade (find-trade foreign-trade-id trade-partner-identity)))
([trade]
(let [trade-id (:id trade)]
(update { :id trade-id :accept_confirm 1 :accept_rejected 0 :updated 0 })
trade-id)))
(defn reject-trade
([foreign-trade-id trade-partner-identity] (reject-trade (find-trade foreign-trade-id trade-partner-identity)))
([trade]
(let [trade-id (:id trade)]
(update { :id trade-id :accept_confirm 0 :accept_rejected 1 :updated 0 })
trade-id)))
(defn payment-sent [trade]
(update { :id (:id trade) :has_sent 1 })
(get-record (:id trade)))
(defn foreign-payment-sent [foreign-trade-id trade-partner-identity]
(let [trade (find-trade foreign-trade-id trade-partner-identity)]
(update { :id (:id trade) :wants_sent 1 :updated 0 })
(get-record (:id trade))))
(defn update-foreign-trade-id [trade-id foreign-trade-id]
(update { :id trade-id :foreign_trade_id foreign-trade-id }))
(defn close [trade]
(update { :id (:id trade) :closed 1 })
(get-record (:id trade)))
(defn complete? [trade]
(and (wants-sent? trade) (wants-received? trade) (has-sent? trade) (has-received? trade)))
(defn update-closed [trade]
(if (complete? trade)
(assoc trade :closed 1)
trade))
(defn close-if-complete [trade]
(if (complete? trade)
(close trade)
trade))
(defn payment-received [trade]
(update { :id (:id trade) :wants_received 1 })
(close-if-complete (get-record (:id trade))))
(defn foreign-payment-received [foreign-trade-id trade-partner-identity]
(let [trade (find-trade foreign-trade-id trade-partner-identity)]
(update { :id (:id trade) :has_received 1 :updated 0 })
(close-if-complete (get-record (:id trade)))))
(defn table-trade-messages [trade]
(map trade-message/as-table-trade-message (find-trade-messages trade)))
(defn update-trade-accept-confirm [foreign-trade trade]
(if (as-boolean (:is_acceptor trade))
(merge trade { :accept_confirm (or (:accept_confirm foreign-trade) 0)
:accept_rejected (or (:accept_rejected foreign-trade) 0) })
trade))
(defn update-has-received [foreign-trade trade]
(assoc trade :has_received (:wants_received foreign-trade)))
(defn update-wants-sent [foreign-trade trade]
(assoc trade :wants_sent (:has_sent foreign-trade)))
(defn update-trade-messages [foreign-trade trade trade-partner-identity]
(doseq [message (:messages foreign-trade)]
(trade-message/update-or-create-message (:id trade) message trade-partner-identity)))
(defn update-trade [trade-partner-identity foreign-trade]
(when-let [trade (find-trade (:id foreign-trade) trade-partner-identity)]
(update
(update-closed
(update-wants-sent foreign-trade
(update-has-received foreign-trade
(update-trade-accept-confirm foreign-trade trade)))))
(update-trade-messages foreign-trade trade trade-partner-identity)
(assoc (get-record (:id trade)) :messages (trade-message/find-matching-messages (:messages foreign-trade)))))
(defn unconfirmed-messages [trade]
(when trade
(filter #(nil? (:foreign_message_id %1)) (find-trade-messages trade))))
(defn contains-unconfirmed-message? [trade]
(seq (unconfirmed-messages trade)))
(defn requires-action? [trade]
(when-let [trade-next-step-key (next-step-key trade)]
(trade-next-step-key #{ needs-to-be-confirmed-key send-wants-receipt-key send-has-key })))
(defn trade-updated [trade]
(update { :id (:id trade) :updated 1 }))
(defn trades-to-update
([] (trades-to-update (user/current-user)))
([user] (find-records ["(((updated IS NULL OR updated = 0) AND closed = 1) OR (closed IS NULL OR closed = 0)) AND user_id = ?" (:id user)])))
(defn trade-partner-text [trade]
(identity-model/identity-text (:identity trade)))
| null | https://raw.githubusercontent.com/macourtney/Dark-Exchange/1654d05cda0c81585da7b8e64f9ea3e2944b27f1/src/darkexchange/model/trade.clj | clojure | (ns darkexchange.model.trade
(:require [clj-record.boot :as clj-record-boot]
[clojure.contrib.logging :as logging]
[darkexchange.model.identity :as identity-model]
[darkexchange.model.offer :as offer]
[darkexchange.model.terms :as terms]
[darkexchange.model.trade-message :as trade-message]
[darkexchange.model.user :as user])
(:use darkexchange.model.base)
(:import [java.util Date]))
(declare get-record)
(def needs-to-be-confirmed-key :needs-to-be-confirmed)
(def waiting-to-be-confirmed-key :waiting-to-be-confirmed)
(def rejected-key :rejected)
(def waiting-for-wants-key :waiting-for-wants)
(def send-wants-receipt-key :send-wants-receipt)
(def send-has-key :send-has)
(def waiting-for-has-receipt-key :waiting-for-has-receipt)
(def trade-add-listeners (atom []))
(def update-trade-listeners (atom []))
(def delete-trade-listeners (atom []))
(defn add-trade-add-listener [listener]
(swap! trade-add-listeners conj listener))
(defn add-update-trade-listener [listener]
(swap! update-trade-listeners conj listener))
(defn add-delete-trade-listener [listener]
(swap! delete-trade-listeners conj listener))
(defn trade-add [new-trade]
(doseq [listener @trade-add-listeners]
(listener new-trade)))
(defn trade-updated [trade]
(let [trade (get-record (:id trade))]
(doseq [listener @update-trade-listeners]
(listener trade))))
(defn trade-deleted [trade]
(doseq [listener @delete-trade-listeners]
(listener trade)))
(clj-record.core/init-model
(:associations (belongs-to identity)
(belongs-to offer)
(belongs-to user)
(has-many trade-messages))
(:callbacks (:after-insert trade-add)
(:after-update trade-updated)
(:after-destroy trade-deleted)))
(defn create-new-trade [trade-data]
(insert
(merge { :created_at (new Date) :user_id (:id (user/current-user)) }
(select-keys trade-data [:foreign_trade_id :identity_id :is_acceptor :offer_id :wants_first :updated]))))
(defn create-non-acceptor-trade [acceptor-user-name acceptor-public-key acceptor-public-key-algorithm offer foreign-trade-id]
(when-let [acceptor-identity (identity-model/find-identity acceptor-user-name acceptor-public-key
acceptor-public-key-algorithm)]
(create-new-trade
{ :offer_id (:id offer)
:wants_first 1
:identity_id (:id acceptor-identity)
:is_acceptor 0
:foreign_trade_id foreign-trade-id
:updated 1 })))
(defn create-acceptor-trade [other-identity offer-id]
(create-new-trade
{ :offer_id offer-id
:wants_first 0
:identity_id (:id other-identity)
:is_acceptor 1
:updated 1 }))
(defn set-foreign-trade-id [trade-id foreign-trade-id]
(update { :id trade-id :foreign_trade_id foreign-trade-id }))
(defn open-trades
([] (open-trades (user/current-user)))
([user] (find-records ["(closed IS NULL OR closed = 0) AND user_id = ?" (:id user)])))
(defn open-trade? [trade]
(not (as-boolean (:closed trade))))
(defn does-not-go-first? [trade]
(as-boolean (:wants_first trade)))
(defn goes-first? [trade]
(not (does-not-go-first? trade)))
(defn needs-to-be-confirmed? [trade]
(and (not (as-boolean (:accept_confirm trade))) (not (as-boolean (:is_acceptor trade)))))
(defn waiting-to-be-confirmed? [trade]
(and (not (as-boolean (:accept_confirm trade))) (as-boolean (:is_acceptor trade))))
(defn rejected? [trade]
(as-boolean (:accept_rejected trade)))
(defn wants-sent? [trade]
(as-boolean (:wants_sent trade)))
(defn wants-received? [trade]
(as-boolean (:wants_received trade)))
(defn has-sent? [trade]
(as-boolean (:has_sent trade)))
(defn has-received? [trade]
(as-boolean (:has_received trade)))
(defn rejected-next-step-key [trade]
(when (rejected? trade)
rejected-key))
(defn needs-to-be-confirmed-next-step-key [trade]
(when (needs-to-be-confirmed? trade)
needs-to-be-confirmed-key))
(defn waiting-to-be-confirmed-next-step-key [trade]
(when (waiting-to-be-confirmed? trade)
waiting-to-be-confirmed-key))
(defn confirmation-next-step-key [trade]
(or (rejected-next-step-key trade) (needs-to-be-confirmed-next-step-key trade)
(waiting-to-be-confirmed-next-step-key trade)))
(defn waiting-for-wants-next-step-key [trade]
(when (not (wants-sent? trade))
waiting-for-wants-key))
(defn wants-received-next-step-key [trade]
(when (not (wants-received? trade))
send-wants-receipt-key))
(defn wants-next-step-key [trade]
(or (waiting-for-wants-next-step-key trade) (wants-received-next-step-key trade)))
(defn has-sent-next-step-key [trade]
(when (not (has-sent? trade))
send-has-key))
(defn has-received-next-step-key [trade]
(when (not (has-received? trade))
waiting-for-has-receipt-key))
(defn has-next-step-key [trade]
(or (has-sent-next-step-key trade) (has-received-next-step-key trade)))
(defn has-want-next-step-key [trade]
(if (goes-first? trade)
(or (has-next-step-key trade) (wants-next-step-key trade))
(or (wants-next-step-key trade) (has-next-step-key trade))))
(defn next-step-key [trade]
(or (confirmation-next-step-key trade) (has-want-next-step-key trade)))
(defn waiting-for-key-to-text [waiting-for-key]
(cond
(= waiting-for-key rejected-key) (terms/rejected)
(= waiting-for-key needs-to-be-confirmed-key) (terms/needs-to-be-confirmed)
(= waiting-for-key waiting-to-be-confirmed-key) (terms/waiting-to-be-confirmed)
(= waiting-for-key waiting-for-wants-key) (terms/waiting-for-payment-to-be-sent)
(= waiting-for-key send-wants-receipt-key) (terms/confirm-payment-received)
(= waiting-for-key send-has-key) (terms/send-payment)
(= waiting-for-key waiting-for-has-receipt-key) (terms/waiting-for-payment-to-be-confirmed)))
(defn next-step-text [trade]
(waiting-for-key-to-text (next-step-key trade)))
(defn has-unseen-message? [trade]
(trade-message/has-unseen-message? (:id trade)))
(defn convert-to-table-trade [trade]
(let [offer (find-offer trade)]
{ :id (:id trade)
:im-sending-amount (offer/has-amount-str offer)
:im-sending-by (offer/has-payment-type-str offer)
:im-receiving-amount (offer/wants-amount-str offer)
:im-receiving-by (offer/wants-payment-type-str offer)
:waiting-for (next-step-text trade)
:original-trade trade
:unseen-message? (if (has-unseen-message? trade) true false) }))
(defn table-open-trades []
(map convert-to-table-trade (open-trades)))
(defn as-view-trade [trade-id]
(when trade-id
(let [trade (get-record trade-id)]
(merge trade { :offer (find-offer trade) :identity (find-identity trade) }))))
(defn find-trade [foreign-trade-id trade-partner-identity]
(find-record { :foreign_trade_id foreign-trade-id :identity_id (:id trade-partner-identity) }))
(defn confirm-trade
([foreign-trade-id trade-partner-identity] (confirm-trade (find-trade foreign-trade-id trade-partner-identity)))
([trade]
(let [trade-id (:id trade)]
(update { :id trade-id :accept_confirm 1 :accept_rejected 0 :updated 0 })
trade-id)))
(defn reject-trade
([foreign-trade-id trade-partner-identity] (reject-trade (find-trade foreign-trade-id trade-partner-identity)))
([trade]
(let [trade-id (:id trade)]
(update { :id trade-id :accept_confirm 0 :accept_rejected 1 :updated 0 })
trade-id)))
(defn payment-sent [trade]
(update { :id (:id trade) :has_sent 1 })
(get-record (:id trade)))
(defn foreign-payment-sent [foreign-trade-id trade-partner-identity]
(let [trade (find-trade foreign-trade-id trade-partner-identity)]
(update { :id (:id trade) :wants_sent 1 :updated 0 })
(get-record (:id trade))))
(defn update-foreign-trade-id [trade-id foreign-trade-id]
(update { :id trade-id :foreign_trade_id foreign-trade-id }))
(defn close [trade]
(update { :id (:id trade) :closed 1 })
(get-record (:id trade)))
(defn complete? [trade]
(and (wants-sent? trade) (wants-received? trade) (has-sent? trade) (has-received? trade)))
(defn update-closed [trade]
(if (complete? trade)
(assoc trade :closed 1)
trade))
(defn close-if-complete [trade]
(if (complete? trade)
(close trade)
trade))
(defn payment-received [trade]
(update { :id (:id trade) :wants_received 1 })
(close-if-complete (get-record (:id trade))))
(defn foreign-payment-received [foreign-trade-id trade-partner-identity]
(let [trade (find-trade foreign-trade-id trade-partner-identity)]
(update { :id (:id trade) :has_received 1 :updated 0 })
(close-if-complete (get-record (:id trade)))))
(defn table-trade-messages [trade]
(map trade-message/as-table-trade-message (find-trade-messages trade)))
(defn update-trade-accept-confirm [foreign-trade trade]
(if (as-boolean (:is_acceptor trade))
(merge trade { :accept_confirm (or (:accept_confirm foreign-trade) 0)
:accept_rejected (or (:accept_rejected foreign-trade) 0) })
trade))
(defn update-has-received [foreign-trade trade]
(assoc trade :has_received (:wants_received foreign-trade)))
(defn update-wants-sent [foreign-trade trade]
(assoc trade :wants_sent (:has_sent foreign-trade)))
(defn update-trade-messages [foreign-trade trade trade-partner-identity]
(doseq [message (:messages foreign-trade)]
(trade-message/update-or-create-message (:id trade) message trade-partner-identity)))
(defn update-trade [trade-partner-identity foreign-trade]
(when-let [trade (find-trade (:id foreign-trade) trade-partner-identity)]
(update
(update-closed
(update-wants-sent foreign-trade
(update-has-received foreign-trade
(update-trade-accept-confirm foreign-trade trade)))))
(update-trade-messages foreign-trade trade trade-partner-identity)
(assoc (get-record (:id trade)) :messages (trade-message/find-matching-messages (:messages foreign-trade)))))
(defn unconfirmed-messages [trade]
(when trade
(filter #(nil? (:foreign_message_id %1)) (find-trade-messages trade))))
(defn contains-unconfirmed-message? [trade]
(seq (unconfirmed-messages trade)))
(defn requires-action? [trade]
(when-let [trade-next-step-key (next-step-key trade)]
(trade-next-step-key #{ needs-to-be-confirmed-key send-wants-receipt-key send-has-key })))
(defn trade-updated [trade]
(update { :id (:id trade) :updated 1 }))
(defn trades-to-update
([] (trades-to-update (user/current-user)))
([user] (find-records ["(((updated IS NULL OR updated = 0) AND closed = 1) OR (closed IS NULL OR closed = 0)) AND user_id = ?" (:id user)])))
(defn trade-partner-text [trade]
(identity-model/identity-text (:identity trade)))
|
|
9b8e35ebba2f178cc86b1fd51c5fd74d2f2f625e64263a1e0e6db160d66b333b | lettier/webviewhs | how-do-i-communicate-with-haskell-from-javascript.hs |
webviewhs
( C ) 2018
lettier.com
webviewhs
(C) 2018 David Lettier
lettier.com
-}
# LANGUAGE
OverloadedStrings
, DeriveGeneric
, QuasiQuotes
#
OverloadedStrings
, DeriveGeneric
, QuasiQuotes
#-}
import GHC.Generics
import Control.Monad
import Data.Text
import Data.Text.Encoding
import Data.ByteString.Lazy
import Data.Aeson
import Language.Javascript.JMacro
import qualified Graphics.UI.Webviewhs as WHS
data JsonMessage =
JsonMessage
{ _message :: Text
} deriving (Generic, Show)
instance FromJSON JsonMessage
main :: IO ()
main =
WHS.withWindowLoop
WHS.WindowParams
{ WHS.windowParamsTitle = "webviewhs - How do I communicate with Haskell from JavaScript?"
This could be a localhost URL to your single - page application ( SPA ) .
, WHS.windowParamsUri = ""
, WHS.windowParamsWidth = 800
, WHS.windowParamsHeight = 600
, WHS.windowParamsResizable = True
, WHS.windowParamsDebuggable = True
}
This is the callback JavaScript can execute .
Inside JavaScript , you call " window.external.invoke " .
(\ _window stringFromJavaScript -> do
print stringFromJavaScript
print (decode (fromStrict $ encodeUtf8 stringFromJavaScript) :: Maybe JsonMessage)
)
-- This function runs before the loop.
(WHS.WithWindowLoopSetUp (\ _window -> print ("Setting up." :: Data.Text.Text)))
-- This function runs after the loop.
(WHS.WithWindowLoopTearDown (void . return . const))
-- This function runs every window loop.
-- Return True to continue the loop or False to exit the loop.
$ \ window -> do
let message' = "Hello from JavaScript." :: Text
-- runJavaScript returns either True on success or False on failure.
success <-
WHS.runJavaScript
window
[jmacro|
window.external.invoke(JSON.stringify({ message: `(message')` }));
|]
If you rather not use , you can use runJavaScript ' .
success' <-
WHS.runJavaScript'
window
"window.external.invoke(\"This won't decode.\");"
return $ success && success'
| null | https://raw.githubusercontent.com/lettier/webviewhs/21ddd4b4872b2e7eb50c9735e70cfbdedf2c6c1a/examples/how-do-i-communicate-with-haskell-from-javascript.hs | haskell | This function runs before the loop.
This function runs after the loop.
This function runs every window loop.
Return True to continue the loop or False to exit the loop.
runJavaScript returns either True on success or False on failure. |
webviewhs
( C ) 2018
lettier.com
webviewhs
(C) 2018 David Lettier
lettier.com
-}
# LANGUAGE
OverloadedStrings
, DeriveGeneric
, QuasiQuotes
#
OverloadedStrings
, DeriveGeneric
, QuasiQuotes
#-}
import GHC.Generics
import Control.Monad
import Data.Text
import Data.Text.Encoding
import Data.ByteString.Lazy
import Data.Aeson
import Language.Javascript.JMacro
import qualified Graphics.UI.Webviewhs as WHS
data JsonMessage =
JsonMessage
{ _message :: Text
} deriving (Generic, Show)
instance FromJSON JsonMessage
main :: IO ()
main =
WHS.withWindowLoop
WHS.WindowParams
{ WHS.windowParamsTitle = "webviewhs - How do I communicate with Haskell from JavaScript?"
This could be a localhost URL to your single - page application ( SPA ) .
, WHS.windowParamsUri = ""
, WHS.windowParamsWidth = 800
, WHS.windowParamsHeight = 600
, WHS.windowParamsResizable = True
, WHS.windowParamsDebuggable = True
}
This is the callback JavaScript can execute .
Inside JavaScript , you call " window.external.invoke " .
(\ _window stringFromJavaScript -> do
print stringFromJavaScript
print (decode (fromStrict $ encodeUtf8 stringFromJavaScript) :: Maybe JsonMessage)
)
(WHS.WithWindowLoopSetUp (\ _window -> print ("Setting up." :: Data.Text.Text)))
(WHS.WithWindowLoopTearDown (void . return . const))
$ \ window -> do
let message' = "Hello from JavaScript." :: Text
success <-
WHS.runJavaScript
window
[jmacro|
window.external.invoke(JSON.stringify({ message: `(message')` }));
|]
If you rather not use , you can use runJavaScript ' .
success' <-
WHS.runJavaScript'
window
"window.external.invoke(\"This won't decode.\");"
return $ success && success'
|
11ccc97e75c5a810bc8944aa6426beeeb55593573e45dbeccddb7655cf82a1cf | NelosG/fp-tests | TAnnotated.hs | # LANGUAGE StandaloneDeriving #
module Test.TAnnotated
( propAnnotated
) where
import HW2.T1 (Annotated (..), mapAnnotated)
import HW2.T2 (distAnnotated, wrapAnnotated)
import Hedgehog (Gen)
import Test.Common (allProps, genString)
import Test.Tasty (TestTree)
deriving instance ((Show a, Show e)) => Show (Annotated e a)
deriving instance ((Eq a, Eq e)) => Eq (Annotated e a)
genAnnotated :: Gen (Annotated String String)
genAnnotated = (:#) <$> genString <*> genString
propAnnotated :: TestTree
propAnnotated = allProps "Annotated" genString genAnnotated mapAnnotated wrapAnnotated distAnnotated
| null | https://raw.githubusercontent.com/NelosG/fp-tests/7e2af5c3c3279c2045662faaff8e5f895af6af6a/hw2/test/T2/Test/TAnnotated.hs | haskell | # LANGUAGE StandaloneDeriving #
module Test.TAnnotated
( propAnnotated
) where
import HW2.T1 (Annotated (..), mapAnnotated)
import HW2.T2 (distAnnotated, wrapAnnotated)
import Hedgehog (Gen)
import Test.Common (allProps, genString)
import Test.Tasty (TestTree)
deriving instance ((Show a, Show e)) => Show (Annotated e a)
deriving instance ((Eq a, Eq e)) => Eq (Annotated e a)
genAnnotated :: Gen (Annotated String String)
genAnnotated = (:#) <$> genString <*> genString
propAnnotated :: TestTree
propAnnotated = allProps "Annotated" genString genAnnotated mapAnnotated wrapAnnotated distAnnotated
|
|
a6ab0f68a68a395b3385a30b194be505396028af7cfad0b37ae3e931235be70a | fulcrologic/semantic-ui-wrapper | ui_step_content.cljc | (ns com.fulcrologic.semantic-ui.elements.step.ui-step-content
(:require
[com.fulcrologic.semantic-ui.factory-helpers :as h]
#?(:cljs ["semantic-ui-react$StepContent" :as StepContent])))
(def ui-step-content
"A step can contain a content.
Props:
- as (elementType): An element type to render as (string or function).
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Shorthand for primary content.
- description (custom): Shorthand for StepDescription.
- title (custom): Shorthand for StepTitle."
#?(:cljs (h/factory-apply StepContent)))
| null | https://raw.githubusercontent.com/fulcrologic/semantic-ui-wrapper/7bd53f445bc4ca7e052c69596dc089282671df6c/src/main/com/fulcrologic/semantic_ui/elements/step/ui_step_content.cljc | clojure | (ns com.fulcrologic.semantic-ui.elements.step.ui-step-content
(:require
[com.fulcrologic.semantic-ui.factory-helpers :as h]
#?(:cljs ["semantic-ui-react$StepContent" :as StepContent])))
(def ui-step-content
"A step can contain a content.
Props:
- as (elementType): An element type to render as (string or function).
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Shorthand for primary content.
- description (custom): Shorthand for StepDescription.
- title (custom): Shorthand for StepTitle."
#?(:cljs (h/factory-apply StepContent)))
|
|
3f20d34044957a30d8f6732b0f7aa6535caaed96a7672ef044fb5f9315107621 | appleshan/cl-http | undefmethod.lisp | (in-package "USER")
;;;
CLOS debugging utilities - OBC
;;;
(defun method-possible-qualifiers (qualifiers)
(let ((basicquals '(:before :after :around)))
(if (member qualifiers basicquals)
(list qualifiers)
(and (consp qualifiers)
(let ((qualifier (first qualifiers)))
(or (and (symbolp qualifier)
(fboundp qualifier))
(keywordp qualifiers)
(intersection qualifiers basicquals)))
qualifiers))))
;;; Here an interesting exercise: how to find
the setf method without using ?
;;;
(defun symbol-setf-function (sym)
(unless (symbolp sym)
(error "~a is not a symbol." sym))
(let ((form (nth-value 3 (get-setf-method (list sym)))))
(if form
(symbol-function (first form))
(error "the symbol ~a does not have a SETF method definition." sym))))
(defmacro get-method (function-name &optional qualifiers &rest specializers)
`(apply #'get-method-fn ',function-name ',qualifiers ',specializers))
(defmacro undefmethod (function-name &optional qualifier &rest classes)
`(apply #'undefmethod-fn ',function-name ',qualifier ',classes))
(defun get-method-fn (function-name &optional qualifiers &rest specializers)
(unless (let ((quals (method-possible-qualifiers qualifiers)))
(if quals
(setf qualifiers quals)))
(setf specializers (cons qualifiers specializers))
(setf qualifiers nil))
(unless (or (symbolp function-name)
(and (consp function-name)
(eql (first function-name) 'SETF)))
(error "Function name is not a symbol or its setf form: ~a." function-name))
(let ((generic (fdefinition function-name)))
(values (find-method generic qualifiers (mapcar #'(lambda (spec)
(if (consp spec)
spec
(find-class spec)))
specializers))
generic)))
(defun undefmethod-fn (function-name &optional qualifier &rest classes)
(multiple-value-bind (special-method generic-function)
(apply #'get-method-fn function-name qualifier classes)
(remove-method generic-function special-method)))
| null | https://raw.githubusercontent.com/appleshan/cl-http/a7ec6bf51e260e9bb69d8e180a103daf49aa0ac2/acl/obc/server/undefmethod.lisp | lisp |
Here an interesting exercise: how to find
| (in-package "USER")
CLOS debugging utilities - OBC
(defun method-possible-qualifiers (qualifiers)
(let ((basicquals '(:before :after :around)))
(if (member qualifiers basicquals)
(list qualifiers)
(and (consp qualifiers)
(let ((qualifier (first qualifiers)))
(or (and (symbolp qualifier)
(fboundp qualifier))
(keywordp qualifiers)
(intersection qualifiers basicquals)))
qualifiers))))
the setf method without using ?
(defun symbol-setf-function (sym)
(unless (symbolp sym)
(error "~a is not a symbol." sym))
(let ((form (nth-value 3 (get-setf-method (list sym)))))
(if form
(symbol-function (first form))
(error "the symbol ~a does not have a SETF method definition." sym))))
(defmacro get-method (function-name &optional qualifiers &rest specializers)
`(apply #'get-method-fn ',function-name ',qualifiers ',specializers))
(defmacro undefmethod (function-name &optional qualifier &rest classes)
`(apply #'undefmethod-fn ',function-name ',qualifier ',classes))
(defun get-method-fn (function-name &optional qualifiers &rest specializers)
(unless (let ((quals (method-possible-qualifiers qualifiers)))
(if quals
(setf qualifiers quals)))
(setf specializers (cons qualifiers specializers))
(setf qualifiers nil))
(unless (or (symbolp function-name)
(and (consp function-name)
(eql (first function-name) 'SETF)))
(error "Function name is not a symbol or its setf form: ~a." function-name))
(let ((generic (fdefinition function-name)))
(values (find-method generic qualifiers (mapcar #'(lambda (spec)
(if (consp spec)
spec
(find-class spec)))
specializers))
generic)))
(defun undefmethod-fn (function-name &optional qualifier &rest classes)
(multiple-value-bind (special-method generic-function)
(apply #'get-method-fn function-name qualifier classes)
(remove-method generic-function special-method)))
|
cb454157ed0ad9b7b19f9b9867857502990c171793b96c6c1d062a9fde618049 | kazu-yamamoto/http3 | Decode.hs | # LANGUAGE BinaryLiterals #
module Network.QPACK.HeaderBlock.Decode where
import Control.Concurrent.STM
import qualified Data.ByteString.Char8 as BS8
import Data.CaseInsensitive
import Network.ByteOrder
import Network.HPACK (TokenHeader, HeaderTable, HeaderList)
import Network.HPACK.Internal
import Network.HPACK.Token (toToken, tokenKey)
import Imports
import Network.QPACK.HeaderBlock.Prefix
import Network.QPACK.Table
import Network.QPACK.Types
decodeTokenHeader :: DynamicTable
-> ReadBuffer
-> IO HeaderTable
decodeTokenHeader dyntbl rbuf = do
(reqip, bp) <- decodePrefix rbuf dyntbl
checkInsertionPoint dyntbl reqip
decodeSophisticated (toTokenHeader dyntbl bp) rbuf
decodeTokenHeaderS :: DynamicTable
-> ReadBuffer
-> IO HeaderList
decodeTokenHeaderS dyntbl rbuf = do
(reqip, bp) <- decodePrefix rbuf dyntbl
debug <- getDebugQPACK dyntbl
unless debug $ checkInsertionPoint dyntbl reqip
decodeSimple (toTokenHeader dyntbl bp) rbuf
toTokenHeader :: DynamicTable -> BasePoint -> Word8 -> ReadBuffer -> IO TokenHeader
toTokenHeader dyntbl bp w8 rbuf
| w8 `testBit` 7 = decodeIndexedFieldLine rbuf dyntbl bp w8
| w8 `testBit` 6 = decodeLiteralFieldLineWithNameReference rbuf dyntbl bp w8
| w8 `testBit` 5 = decodeLiteralFieldLineWithoutNameReference rbuf dyntbl bp w8
| w8 `testBit` 4 = decodeIndexedFieldLineWithPostBaseIndex rbuf dyntbl bp w8
| otherwise = decodeLiteralFieldLineWithPostBaseNameReference rbuf dyntbl bp w8
-- 4.5.2. Indexed Field Line
decodeIndexedFieldLine :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeIndexedFieldLine rbuf dyntbl bp w8 = do
i <- decodeI 6 (w8 .&. 0b00111111) rbuf
let static = w8 `testBit` 6
hidx | static = SIndex $ AbsoluteIndex i
| otherwise = DIndex $ fromHBRelativeIndex (HBRelativeIndex i) bp
ret <- atomically (entryTokenHeader <$> toIndexedEntry dyntbl hidx)
qpackDebug dyntbl $ putStrLn $ "IndexedFieldLine (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
4.5.4 . Literal Field Line With Name Reference
decodeLiteralFieldLineWithNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithNameReference rbuf dyntbl bp w8 = do
i <- decodeI 4 (w8 .&. 0b00001111) rbuf
let static = w8 `testBit` 4
hidx | static = SIndex $ AbsoluteIndex i
| otherwise = DIndex $ fromHBRelativeIndex (HBRelativeIndex i) bp
key <- atomically (entryToken <$> toIndexedEntry dyntbl hidx)
let hufdec = getHuffmanDecoder dyntbl
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithNameReference (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
4.5.6 . Literal Field Line Without Name Reference
decodeLiteralFieldLineWithoutNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithoutNameReference rbuf dyntbl _bp _w8 = do
ff rbuf (-1)
let hufdec = getHuffmanDecoder dyntbl
key <- toToken <$> decodeS (.&. 0b00000111) (`testBit` 3) 3 hufdec rbuf
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithoutNameReference " ++ showTokenHeader ret
return ret
4.5.3 . Indexed Field Line With Post - Base Index
decodeIndexedFieldLineWithPostBaseIndex :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeIndexedFieldLineWithPostBaseIndex rbuf dyntbl bp w8 = do
i <- decodeI 4 (w8 .&. 0b00001111) rbuf
let hidx = DIndex $ fromPostBaseIndex (PostBaseIndex i) bp
ret <- atomically (entryTokenHeader <$> toIndexedEntry dyntbl hidx)
qpackDebug dyntbl $ putStrLn $ "IndexedFieldLineWithPostBaseIndex (" ++ show hidx ++ " " ++ show i ++ "/" ++ show bp ++ ") " ++ showTokenHeader ret
return ret
4.5.5 . Literal Field Line With Post - Base Name Reference
decodeLiteralFieldLineWithPostBaseNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithPostBaseNameReference rbuf dyntbl bp w8 = do
i <- decodeI 3 (w8 .&. 0b00000111) rbuf
let hidx = DIndex $ fromPostBaseIndex (PostBaseIndex i) bp
key <- atomically (entryToken <$> toIndexedEntry dyntbl hidx)
let hufdec = getHuffmanDecoder dyntbl
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithPostBaseNameReference (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
showTokenHeader :: TokenHeader -> String
showTokenHeader (t,val) = "\"" ++ key ++ "\" \"" ++ BS8.unpack val ++ "\""
where
key = BS8.unpack $ foldedCase $ tokenKey t
| null | https://raw.githubusercontent.com/kazu-yamamoto/http3/93b2b18a3b92b313129b91b6cafefd8f228215db/Network/QPACK/HeaderBlock/Decode.hs | haskell | 4.5.2. Indexed Field Line | # LANGUAGE BinaryLiterals #
module Network.QPACK.HeaderBlock.Decode where
import Control.Concurrent.STM
import qualified Data.ByteString.Char8 as BS8
import Data.CaseInsensitive
import Network.ByteOrder
import Network.HPACK (TokenHeader, HeaderTable, HeaderList)
import Network.HPACK.Internal
import Network.HPACK.Token (toToken, tokenKey)
import Imports
import Network.QPACK.HeaderBlock.Prefix
import Network.QPACK.Table
import Network.QPACK.Types
decodeTokenHeader :: DynamicTable
-> ReadBuffer
-> IO HeaderTable
decodeTokenHeader dyntbl rbuf = do
(reqip, bp) <- decodePrefix rbuf dyntbl
checkInsertionPoint dyntbl reqip
decodeSophisticated (toTokenHeader dyntbl bp) rbuf
decodeTokenHeaderS :: DynamicTable
-> ReadBuffer
-> IO HeaderList
decodeTokenHeaderS dyntbl rbuf = do
(reqip, bp) <- decodePrefix rbuf dyntbl
debug <- getDebugQPACK dyntbl
unless debug $ checkInsertionPoint dyntbl reqip
decodeSimple (toTokenHeader dyntbl bp) rbuf
toTokenHeader :: DynamicTable -> BasePoint -> Word8 -> ReadBuffer -> IO TokenHeader
toTokenHeader dyntbl bp w8 rbuf
| w8 `testBit` 7 = decodeIndexedFieldLine rbuf dyntbl bp w8
| w8 `testBit` 6 = decodeLiteralFieldLineWithNameReference rbuf dyntbl bp w8
| w8 `testBit` 5 = decodeLiteralFieldLineWithoutNameReference rbuf dyntbl bp w8
| w8 `testBit` 4 = decodeIndexedFieldLineWithPostBaseIndex rbuf dyntbl bp w8
| otherwise = decodeLiteralFieldLineWithPostBaseNameReference rbuf dyntbl bp w8
decodeIndexedFieldLine :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeIndexedFieldLine rbuf dyntbl bp w8 = do
i <- decodeI 6 (w8 .&. 0b00111111) rbuf
let static = w8 `testBit` 6
hidx | static = SIndex $ AbsoluteIndex i
| otherwise = DIndex $ fromHBRelativeIndex (HBRelativeIndex i) bp
ret <- atomically (entryTokenHeader <$> toIndexedEntry dyntbl hidx)
qpackDebug dyntbl $ putStrLn $ "IndexedFieldLine (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
4.5.4 . Literal Field Line With Name Reference
decodeLiteralFieldLineWithNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithNameReference rbuf dyntbl bp w8 = do
i <- decodeI 4 (w8 .&. 0b00001111) rbuf
let static = w8 `testBit` 4
hidx | static = SIndex $ AbsoluteIndex i
| otherwise = DIndex $ fromHBRelativeIndex (HBRelativeIndex i) bp
key <- atomically (entryToken <$> toIndexedEntry dyntbl hidx)
let hufdec = getHuffmanDecoder dyntbl
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithNameReference (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
4.5.6 . Literal Field Line Without Name Reference
decodeLiteralFieldLineWithoutNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithoutNameReference rbuf dyntbl _bp _w8 = do
ff rbuf (-1)
let hufdec = getHuffmanDecoder dyntbl
key <- toToken <$> decodeS (.&. 0b00000111) (`testBit` 3) 3 hufdec rbuf
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithoutNameReference " ++ showTokenHeader ret
return ret
4.5.3 . Indexed Field Line With Post - Base Index
decodeIndexedFieldLineWithPostBaseIndex :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeIndexedFieldLineWithPostBaseIndex rbuf dyntbl bp w8 = do
i <- decodeI 4 (w8 .&. 0b00001111) rbuf
let hidx = DIndex $ fromPostBaseIndex (PostBaseIndex i) bp
ret <- atomically (entryTokenHeader <$> toIndexedEntry dyntbl hidx)
qpackDebug dyntbl $ putStrLn $ "IndexedFieldLineWithPostBaseIndex (" ++ show hidx ++ " " ++ show i ++ "/" ++ show bp ++ ") " ++ showTokenHeader ret
return ret
4.5.5 . Literal Field Line With Post - Base Name Reference
decodeLiteralFieldLineWithPostBaseNameReference :: ReadBuffer -> DynamicTable -> BasePoint -> Word8 -> IO TokenHeader
decodeLiteralFieldLineWithPostBaseNameReference rbuf dyntbl bp w8 = do
i <- decodeI 3 (w8 .&. 0b00000111) rbuf
let hidx = DIndex $ fromPostBaseIndex (PostBaseIndex i) bp
key <- atomically (entryToken <$> toIndexedEntry dyntbl hidx)
let hufdec = getHuffmanDecoder dyntbl
val <- decodeS (`clearBit` 7) (`testBit` 7) 7 hufdec rbuf
let ret = (key,val)
qpackDebug dyntbl $ putStrLn $ "LiteralFieldLineWithPostBaseNameReference (" ++ show hidx ++ ") " ++ showTokenHeader ret
return ret
showTokenHeader :: TokenHeader -> String
showTokenHeader (t,val) = "\"" ++ key ++ "\" \"" ++ BS8.unpack val ++ "\""
where
key = BS8.unpack $ foldedCase $ tokenKey t
|
815f718085d3f2e40c8649471f3a95adab1f59f1ba64707251b7a83b1fdbea17 | processone/ejabberd-contrib | mod_push_offline_opt.erl | -module(mod_push_offline_opt).
-export([host/1]).
-spec host(gen_mod:opts() | global | binary()) -> binary().
host(Opts) when is_map(Opts) ->
gen_mod:get_opt(host, Opts);
host(Host) ->
gen_mod:get_module_opt(Host, mod_push_offline, host).
| null | https://raw.githubusercontent.com/processone/ejabberd-contrib/bad48b0d6afeabe8f98550919138984328e89ef5/mod_push_offline/src/mod_push_offline_opt.erl | erlang | -module(mod_push_offline_opt).
-export([host/1]).
-spec host(gen_mod:opts() | global | binary()) -> binary().
host(Opts) when is_map(Opts) ->
gen_mod:get_opt(host, Opts);
host(Host) ->
gen_mod:get_module_opt(Host, mod_push_offline, host).
|
|
1ea64bd46f0c7254836888792b0d1e8b45834cbc5e465e00bc40853e9980d0c9 | johnwhitington/haskell-from-the-very-beginning-exercises | Examples.hs | p :: (Num a, Num b) => (a, b)
p = (1, 4)
q :: Num a => (a, Char)
q = (1, '1')
fst' :: (a, b) -> a
fst' (x, _) = x
snd' :: (a, b) -> b
snd' (_, y) = y
census :: (Num a, Num b) => [(a, b)]
census = [(1, 4), (2, 2), (3, 2), (4, 3), (5, 1), (6, 2)]
lookup' :: Eq a => a -> [(a, b)] -> Maybe b
lookup' k' [] = Nothing
lookup' k' ((k, v):xs) =
if k == k' then Just v else lookup' k' xs
add :: Eq a => a -> b -> [(a, b)] -> [(a, b)]
add k v [] = [(k, v)]
add k v ((k', v'):xs) =
if k == k'
then (k, v) : xs
else (k', v') : add k v xs
remove :: Eq a => a -> [(a, b)] -> [(a, b)]
remove k [] = []
remove k ((k', v'):xs) =
if k == k'
then xs
else (k', v'):remove k xs
keyExists :: (Eq a, Eq b) => a -> [(a, b)] -> Bool
keyExists k d =
lookup' k d /= Nothing
| null | https://raw.githubusercontent.com/johnwhitington/haskell-from-the-very-beginning-exercises/18bda69bf8a0233feb6f023c6a2219b7c20e9fa1/examples/Chapter8/Examples.hs | haskell | p :: (Num a, Num b) => (a, b)
p = (1, 4)
q :: Num a => (a, Char)
q = (1, '1')
fst' :: (a, b) -> a
fst' (x, _) = x
snd' :: (a, b) -> b
snd' (_, y) = y
census :: (Num a, Num b) => [(a, b)]
census = [(1, 4), (2, 2), (3, 2), (4, 3), (5, 1), (6, 2)]
lookup' :: Eq a => a -> [(a, b)] -> Maybe b
lookup' k' [] = Nothing
lookup' k' ((k, v):xs) =
if k == k' then Just v else lookup' k' xs
add :: Eq a => a -> b -> [(a, b)] -> [(a, b)]
add k v [] = [(k, v)]
add k v ((k', v'):xs) =
if k == k'
then (k, v) : xs
else (k', v') : add k v xs
remove :: Eq a => a -> [(a, b)] -> [(a, b)]
remove k [] = []
remove k ((k', v'):xs) =
if k == k'
then xs
else (k', v'):remove k xs
keyExists :: (Eq a, Eq b) => a -> [(a, b)] -> Bool
keyExists k d =
lookup' k d /= Nothing
|
|
9ee6975dccddc129b505422bac66d0047e4a5884d746042f90620bca0a7067f7 | dimitaruzunov/fp-2018 | binary-tree.scm | (define (tree? t)
(or (null? t)
(and (list? t)
(= (length t) 3)
(tree? (cadr t))
(tree? (caddr t)))))
(define (make-tree root left right)
(list root left right))
(define root car)
(define left cadr)
(define right caddr)
(define empty? null?)
(define (leaf? tree)
(and (not (empty? tree))
(empty? (left tree))
(empty? (right tree))))
| null | https://raw.githubusercontent.com/dimitaruzunov/fp-2018/f75f0cd009cc7f41ce55a5ec71fb4b8eadafc4eb/exercises/06/binary-tree.scm | scheme | (define (tree? t)
(or (null? t)
(and (list? t)
(= (length t) 3)
(tree? (cadr t))
(tree? (caddr t)))))
(define (make-tree root left right)
(list root left right))
(define root car)
(define left cadr)
(define right caddr)
(define empty? null?)
(define (leaf? tree)
(and (not (empty? tree))
(empty? (left tree))
(empty? (right tree))))
|
|
bb070935dded4fac2757ff9607439a5ac033ab5798e98ca7087f00424d6e07dd | Lupino/haskell-periodic | periodic-run.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Main
( main
) where
import Control.Concurrent (forkIO, killThread)
import Control.DeepSeq (rnf)
import Control.Monad (unless, void, when)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Char8 as B (pack)
import qualified Data.ByteString.Lazy as LB (null, toStrict)
import qualified Data.ByteString.Lazy.Char8 as LB (hGetContents, hPut)
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import Data.Version (showVersion)
import Metro.Class (Transport)
import Metro.Socket (getHost, getService)
import Metro.TP.Socket (socket)
import Metro.TP.TLS (makeClientParams', tlsConfig)
import Metro.TP.WebSockets (clientConfig)
import Metro.TP.XOR (xorConfig)
import Paths_periodic_client_exe (version)
import Periodic.Trans.Job (JobT, name, withLock_, workDone,
workDone_, workFail, workload)
import Periodic.Trans.Worker (WorkerT, addFunc, broadcast,
startWorkerT, work)
import Periodic.Types (FuncName (..), LockName (..))
import System.Environment (getArgs, lookupEnv)
import System.Exit (ExitCode (..), exitSuccess)
import System.IO (hClose)
import System.Process (CreateProcess (std_in, std_out),
StdStream (CreatePipe, Inherit),
proc, waitForProcess,
withCreateProcess)
import UnliftIO (MVar, SomeException, evaluate,
mask, newEmptyMVar, onException,
putMVar, takeMVar, throwIO, try,
tryIO)
data Options = Options
{ host :: String
, xorFile :: FilePath
, useTls :: Bool
, useWs :: Bool
, hostName :: String
, certKey :: FilePath
, cert :: FilePath
, caStore :: FilePath
, thread :: Int
, lockCount :: Int
, lockName :: Maybe LockName
, notify :: Bool
, useData :: Bool
, useName :: Bool
, showHelp :: Bool
}
options :: Maybe Int -> Maybe String -> Maybe String -> Options
options t h f = Options
{ host = fromMaybe "unix" h
, xorFile = fromMaybe "" f
, useTls = False
, useWs = False
, hostName = "localhost"
, certKey = "client-key.pem"
, cert = "client.pem"
, caStore = "ca.pem"
, thread = fromMaybe 1 t
, lockCount = 1
, lockName = Nothing
, notify = False
, useData = False
, useName = True
, showHelp = False
}
parseOptions :: [String] -> Options -> (Options, FuncName, String, [String])
parseOptions ("-H":x:xs) opt = parseOptions xs opt { host = x }
parseOptions ("--host":x:xs) opt = parseOptions xs opt { host = x }
parseOptions ("--xor":x:xs) opt = parseOptions xs opt { xorFile = x }
parseOptions ("--tls":xs) opt = parseOptions xs opt { useTls = True }
parseOptions ("--ws":xs) opt = parseOptions xs opt { useWs = True }
parseOptions ("--hostname":x:xs) opt = parseOptions xs opt { hostName = x }
parseOptions ("--cert-key":x:xs) opt = parseOptions xs opt { certKey = x }
parseOptions ("--cert":x:xs) opt = parseOptions xs opt { cert = x }
parseOptions ("--ca":x:xs) opt = parseOptions xs opt { caStore = x }
parseOptions ("--thread":x:xs) opt = parseOptions xs opt { thread = read x }
parseOptions ("--lock-count":x:xs) opt = parseOptions xs opt { lockCount = read x }
parseOptions ("--lock-name":x:xs) opt = parseOptions xs opt { lockName = Just (LockName $ B.pack x) }
parseOptions ("--help":xs) opt = parseOptions xs opt { showHelp = True }
parseOptions ("--broadcast":xs) opt = parseOptions xs opt { notify = True }
parseOptions ("--data":xs) opt = parseOptions xs opt { useData = True }
parseOptions ("--no-name":xs) opt = parseOptions xs opt { useName = False }
parseOptions ("-h":xs) opt = parseOptions xs opt { showHelp = True }
parseOptions [] opt = (opt { showHelp = True }, "", "", [])
parseOptions [_] opt = (opt { showHelp = True }, "", "", [])
parseOptions (x:y:xs) opt = (opt, FuncName $ B.pack x, y, xs)
printHelp :: IO ()
printHelp = do
putStrLn "periodic-run - Periodic task system worker"
putStrLn ""
putStrLn "Usage: periodic-run [--host|-H HOST] [--xor FILE|--ws|--tls [--hostname HOSTNAME] [--cert-key FILE] [--cert FILE] [--ca FILE] [--thread THREAD] [--lock-name NAME] [--lock-count COUNT] [--broadcast] [--data] [--no-name]] funcname command [options]"
putStrLn ""
putStrLn "Available options:"
putStrLn " -H --host Socket path [$PERIODIC_PORT]"
putStrLn " Eg: tcp://:5000 (optional: unix) "
putStrLn " --xor XOR Transport encode file [$XOR_FILE]"
putStrLn " --tls Use tls transport"
putStrLn " --ws Use websockets transport"
putStrLn " --hostname Host name"
putStrLn " --cert-key Private key associated"
putStrLn " --cert Public certificate (X.509 format)"
putStrLn " --ca Trusted certificates"
putStrLn " --thread Worker thread [$THREAD]"
putStrLn " --lock-count Max lock count (optional: 1)"
putStrLn " --lock-name The lock name (optional: no lock)"
putStrLn " --broadcast Is broadcast worker"
putStrLn " --data Send work data to client"
putStrLn " --no-name Ignore the job name"
putStrLn " -h --help Display help message"
putStrLn ""
putStrLn $ "Version: v" ++ showVersion version
putStrLn ""
exitSuccess
main :: IO ()
main = do
h <- lookupEnv "PERIODIC_PORT"
f <- lookupEnv "XOR_FILE"
t <- fmap read <$> lookupEnv "THREAD"
(opts@Options {..}, func, cmd, argv) <- flip parseOptions (options t h f) <$> getArgs
when showHelp printHelp
when (not ("tcp" `isPrefixOf` host) && not ("unix" `isPrefixOf` host)) $ do
putStrLn $ "Invalid host " ++ host
printHelp
run opts func cmd argv
doWork :: Transport tp => Options -> FuncName -> String -> [String] -> WorkerT tp IO ()
doWork opts@Options{..} func cmd argv = do
let w = processWorker opts cmd argv
if notify then void $ broadcast func w
else
case lockName of
Nothing -> void $ addFunc func w
Just n -> void $ addFunc func $ withLock_ n lockCount w
liftIO $ putStrLn "Worker started."
work thread
run :: Options -> FuncName -> String -> [String] -> IO ()
run opts@Options {useTls = True, ..} func cmd argv = do
prms <- makeClientParams' cert [] certKey caStore (hostName, B.pack $ fromMaybe "" $ getService host)
startWorkerT (tlsConfig prms (socket host)) $ doWork opts func cmd argv
run opts@Options {useWs = True, ..} func cmd argv =
startWorkerT (clientConfig (socket host) (fromMaybe "0.0.0.0" $ getHost host) (fromMaybe "" $ getService host)) $ doWork opts func cmd argv
run opts@Options {xorFile = "", ..} func cmd argv =
startWorkerT (socket host) $ doWork opts func cmd argv
run opts@Options {..} func cmd argv =
startWorkerT (xorConfig xorFile $ socket host) $ doWork opts func cmd argv
processWorker :: Transport tp => Options -> String -> [String] -> JobT tp IO ()
processWorker Options{..} cmd argv = do
n <- name
rb <- workload
let argv' = if useName then argv ++ [n] else argv
cp = (proc cmd argv') {std_in = CreatePipe, std_out= if useData then CreatePipe else Inherit}
(code, out) <- liftIO $ withCreateProcess cp $ \mb_inh mb_outh _ ph ->
case (mb_inh, mb_outh) of
(Nothing, _) -> error "processWorker: Failed to get a stdin handle."
(Just inh, Nothing) -> do
unless (LB.null rb) $ void $ tryIO $ LB.hPut inh rb
void $ tryIO $ hClose inh
code <- waitForProcess ph
return (code, Nothing)
(Just inh, Just outh) -> do
output <- LB.hGetContents outh
withForkWait (evaluate $ rnf output) $ \waitOut -> do
unless (LB.null rb) $ void $ tryIO $ LB.hPut inh rb
void $ tryIO $ hClose inh
waitOut
hClose outh
code <- waitForProcess ph
return (code, Just output)
case code of
ExitFailure _ -> void workFail
ExitSuccess ->
case out of
Nothing -> void workDone
Just wl -> void $ workDone_ $ LB.toStrict wl
-- | Fork a thread while doing something else, but kill it if there's an
-- exception.
--
-- This is important in the cases above because we want to kill the thread
-- that is holding the Handle lock, because when we clean up the process we
-- try to close that handle, which could otherwise deadlock.
--
withForkWait :: IO () -> (IO () -> IO a) -> IO a
withForkWait async body = do
waitVar <- newEmptyMVar :: IO (MVar (Either SomeException ()))
mask $ \restore -> do
tid <- forkIO $ try (restore async) >>= putMVar waitVar
let wait = takeMVar waitVar >>= either throwIO return
restore (body wait) `onException` killThread tid
| null | https://raw.githubusercontent.com/Lupino/haskell-periodic/d685e806caf3bb54575fc5cb1ca5a3bf1e98969c/periodic-client-exe/app/periodic-run.hs | haskell | # LANGUAGE OverloadedStrings #
| Fork a thread while doing something else, but kill it if there's an
exception.
This is important in the cases above because we want to kill the thread
that is holding the Handle lock, because when we clean up the process we
try to close that handle, which could otherwise deadlock.
| # LANGUAGE RecordWildCards #
module Main
( main
) where
import Control.Concurrent (forkIO, killThread)
import Control.DeepSeq (rnf)
import Control.Monad (unless, void, when)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Char8 as B (pack)
import qualified Data.ByteString.Lazy as LB (null, toStrict)
import qualified Data.ByteString.Lazy.Char8 as LB (hGetContents, hPut)
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import Data.Version (showVersion)
import Metro.Class (Transport)
import Metro.Socket (getHost, getService)
import Metro.TP.Socket (socket)
import Metro.TP.TLS (makeClientParams', tlsConfig)
import Metro.TP.WebSockets (clientConfig)
import Metro.TP.XOR (xorConfig)
import Paths_periodic_client_exe (version)
import Periodic.Trans.Job (JobT, name, withLock_, workDone,
workDone_, workFail, workload)
import Periodic.Trans.Worker (WorkerT, addFunc, broadcast,
startWorkerT, work)
import Periodic.Types (FuncName (..), LockName (..))
import System.Environment (getArgs, lookupEnv)
import System.Exit (ExitCode (..), exitSuccess)
import System.IO (hClose)
import System.Process (CreateProcess (std_in, std_out),
StdStream (CreatePipe, Inherit),
proc, waitForProcess,
withCreateProcess)
import UnliftIO (MVar, SomeException, evaluate,
mask, newEmptyMVar, onException,
putMVar, takeMVar, throwIO, try,
tryIO)
data Options = Options
{ host :: String
, xorFile :: FilePath
, useTls :: Bool
, useWs :: Bool
, hostName :: String
, certKey :: FilePath
, cert :: FilePath
, caStore :: FilePath
, thread :: Int
, lockCount :: Int
, lockName :: Maybe LockName
, notify :: Bool
, useData :: Bool
, useName :: Bool
, showHelp :: Bool
}
options :: Maybe Int -> Maybe String -> Maybe String -> Options
options t h f = Options
{ host = fromMaybe "unix" h
, xorFile = fromMaybe "" f
, useTls = False
, useWs = False
, hostName = "localhost"
, certKey = "client-key.pem"
, cert = "client.pem"
, caStore = "ca.pem"
, thread = fromMaybe 1 t
, lockCount = 1
, lockName = Nothing
, notify = False
, useData = False
, useName = True
, showHelp = False
}
parseOptions :: [String] -> Options -> (Options, FuncName, String, [String])
parseOptions ("-H":x:xs) opt = parseOptions xs opt { host = x }
parseOptions ("--host":x:xs) opt = parseOptions xs opt { host = x }
parseOptions ("--xor":x:xs) opt = parseOptions xs opt { xorFile = x }
parseOptions ("--tls":xs) opt = parseOptions xs opt { useTls = True }
parseOptions ("--ws":xs) opt = parseOptions xs opt { useWs = True }
parseOptions ("--hostname":x:xs) opt = parseOptions xs opt { hostName = x }
parseOptions ("--cert-key":x:xs) opt = parseOptions xs opt { certKey = x }
parseOptions ("--cert":x:xs) opt = parseOptions xs opt { cert = x }
parseOptions ("--ca":x:xs) opt = parseOptions xs opt { caStore = x }
parseOptions ("--thread":x:xs) opt = parseOptions xs opt { thread = read x }
parseOptions ("--lock-count":x:xs) opt = parseOptions xs opt { lockCount = read x }
parseOptions ("--lock-name":x:xs) opt = parseOptions xs opt { lockName = Just (LockName $ B.pack x) }
parseOptions ("--help":xs) opt = parseOptions xs opt { showHelp = True }
parseOptions ("--broadcast":xs) opt = parseOptions xs opt { notify = True }
parseOptions ("--data":xs) opt = parseOptions xs opt { useData = True }
parseOptions ("--no-name":xs) opt = parseOptions xs opt { useName = False }
parseOptions ("-h":xs) opt = parseOptions xs opt { showHelp = True }
parseOptions [] opt = (opt { showHelp = True }, "", "", [])
parseOptions [_] opt = (opt { showHelp = True }, "", "", [])
parseOptions (x:y:xs) opt = (opt, FuncName $ B.pack x, y, xs)
printHelp :: IO ()
printHelp = do
putStrLn "periodic-run - Periodic task system worker"
putStrLn ""
putStrLn "Usage: periodic-run [--host|-H HOST] [--xor FILE|--ws|--tls [--hostname HOSTNAME] [--cert-key FILE] [--cert FILE] [--ca FILE] [--thread THREAD] [--lock-name NAME] [--lock-count COUNT] [--broadcast] [--data] [--no-name]] funcname command [options]"
putStrLn ""
putStrLn "Available options:"
putStrLn " -H --host Socket path [$PERIODIC_PORT]"
putStrLn " Eg: tcp://:5000 (optional: unix) "
putStrLn " --xor XOR Transport encode file [$XOR_FILE]"
putStrLn " --tls Use tls transport"
putStrLn " --ws Use websockets transport"
putStrLn " --hostname Host name"
putStrLn " --cert-key Private key associated"
putStrLn " --cert Public certificate (X.509 format)"
putStrLn " --ca Trusted certificates"
putStrLn " --thread Worker thread [$THREAD]"
putStrLn " --lock-count Max lock count (optional: 1)"
putStrLn " --lock-name The lock name (optional: no lock)"
putStrLn " --broadcast Is broadcast worker"
putStrLn " --data Send work data to client"
putStrLn " --no-name Ignore the job name"
putStrLn " -h --help Display help message"
putStrLn ""
putStrLn $ "Version: v" ++ showVersion version
putStrLn ""
exitSuccess
main :: IO ()
main = do
h <- lookupEnv "PERIODIC_PORT"
f <- lookupEnv "XOR_FILE"
t <- fmap read <$> lookupEnv "THREAD"
(opts@Options {..}, func, cmd, argv) <- flip parseOptions (options t h f) <$> getArgs
when showHelp printHelp
when (not ("tcp" `isPrefixOf` host) && not ("unix" `isPrefixOf` host)) $ do
putStrLn $ "Invalid host " ++ host
printHelp
run opts func cmd argv
doWork :: Transport tp => Options -> FuncName -> String -> [String] -> WorkerT tp IO ()
doWork opts@Options{..} func cmd argv = do
let w = processWorker opts cmd argv
if notify then void $ broadcast func w
else
case lockName of
Nothing -> void $ addFunc func w
Just n -> void $ addFunc func $ withLock_ n lockCount w
liftIO $ putStrLn "Worker started."
work thread
run :: Options -> FuncName -> String -> [String] -> IO ()
run opts@Options {useTls = True, ..} func cmd argv = do
prms <- makeClientParams' cert [] certKey caStore (hostName, B.pack $ fromMaybe "" $ getService host)
startWorkerT (tlsConfig prms (socket host)) $ doWork opts func cmd argv
run opts@Options {useWs = True, ..} func cmd argv =
startWorkerT (clientConfig (socket host) (fromMaybe "0.0.0.0" $ getHost host) (fromMaybe "" $ getService host)) $ doWork opts func cmd argv
run opts@Options {xorFile = "", ..} func cmd argv =
startWorkerT (socket host) $ doWork opts func cmd argv
run opts@Options {..} func cmd argv =
startWorkerT (xorConfig xorFile $ socket host) $ doWork opts func cmd argv
processWorker :: Transport tp => Options -> String -> [String] -> JobT tp IO ()
processWorker Options{..} cmd argv = do
n <- name
rb <- workload
let argv' = if useName then argv ++ [n] else argv
cp = (proc cmd argv') {std_in = CreatePipe, std_out= if useData then CreatePipe else Inherit}
(code, out) <- liftIO $ withCreateProcess cp $ \mb_inh mb_outh _ ph ->
case (mb_inh, mb_outh) of
(Nothing, _) -> error "processWorker: Failed to get a stdin handle."
(Just inh, Nothing) -> do
unless (LB.null rb) $ void $ tryIO $ LB.hPut inh rb
void $ tryIO $ hClose inh
code <- waitForProcess ph
return (code, Nothing)
(Just inh, Just outh) -> do
output <- LB.hGetContents outh
withForkWait (evaluate $ rnf output) $ \waitOut -> do
unless (LB.null rb) $ void $ tryIO $ LB.hPut inh rb
void $ tryIO $ hClose inh
waitOut
hClose outh
code <- waitForProcess ph
return (code, Just output)
case code of
ExitFailure _ -> void workFail
ExitSuccess ->
case out of
Nothing -> void workDone
Just wl -> void $ workDone_ $ LB.toStrict wl
withForkWait :: IO () -> (IO () -> IO a) -> IO a
withForkWait async body = do
waitVar <- newEmptyMVar :: IO (MVar (Either SomeException ()))
mask $ \restore -> do
tid <- forkIO $ try (restore async) >>= putMVar waitVar
let wait = takeMVar waitVar >>= either throwIO return
restore (body wait) `onException` killThread tid
|
0fbef6b395f3aef08358ad997e7f108428e40a493221e2e624a21950bae38801 | haskoin/haskoin-core | Keys.hs | |
Module : Haskoin . Test . Keys
Copyright : No rights reserved
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Module : Haskoin.Test.Keys
Copyright : No rights reserved
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Haskoin.Util.Arbitrary.Keys where
import Data.Bits (clearBit)
import Data.Coerce (coerce)
import Data.List (foldl')
import Data.Word (Word32)
import Haskoin.Crypto
import Haskoin.Keys.Common
import Haskoin.Keys.Extended
import Haskoin.Keys.Extended.Internal (Fingerprint (..))
import Haskoin.Util.Arbitrary.Crypto
import Test.QuickCheck
-- | Arbitrary private key with arbitrary compressed flag.
arbitrarySecKeyI :: Gen SecKeyI
arbitrarySecKeyI = wrapSecKey <$> arbitrary <*> arbitrary
-- | Arbitrary keypair, both either compressed or not.
arbitraryKeyPair :: Gen (SecKeyI, PubKeyI)
arbitraryKeyPair = do
k <- arbitrarySecKeyI
return (k, derivePubKeyI k)
arbitraryFingerprint :: Gen Fingerprint
arbitraryFingerprint = Fingerprint <$> arbitrary
-- | Arbitrary extended private key.
arbitraryXPrvKey :: Gen XPrvKey
arbitraryXPrvKey =
XPrvKey <$> arbitrary
<*> arbitraryFingerprint
<*> arbitrary
<*> arbitraryHash256
<*> arbitrary
-- | Arbitrary extended public key with its corresponding private key.
arbitraryXPubKey :: Gen (XPrvKey, XPubKey)
arbitraryXPubKey = (\k -> (k, deriveXPubKey k)) <$> arbitraryXPrvKey
{- Custom derivations -}
-- | Arbitrary derivation index with last bit unset.
genIndex :: Gen Word32
genIndex = (`clearBit` 31) <$> arbitrary
-- | Arbitrary BIP-32 path index. Can be hardened or not.
arbitraryBip32PathIndex :: Gen Bip32PathIndex
arbitraryBip32PathIndex =
oneof
[ Bip32SoftIndex <$> genIndex
, Bip32HardIndex <$> genIndex
]
-- | Arbitrary BIP-32 derivation path composed of only hardened derivations.
arbitraryHardPath :: Gen HardPath
arbitraryHardPath = foldl' (:|) Deriv <$> listOf genIndex
-- | Arbitrary BIP-32 derivation path composed of only non-hardened derivations.
arbitrarySoftPath :: Gen SoftPath
arbitrarySoftPath = foldl' (:/) Deriv <$> listOf genIndex
-- | Arbitrary derivation path composed of hardened and non-hardened derivations.
arbitraryDerivPath :: Gen DerivPath
arbitraryDerivPath = concatBip32Segments <$> listOf arbitraryBip32PathIndex
| Arbitrary parsed derivation path . Can contain ' ParsedPrv ' , ' ParsedPub ' or
' ParsedEmpty ' elements .
'ParsedEmpty' elements.
-}
arbitraryParsedPath :: Gen ParsedPath
arbitraryParsedPath =
oneof
[ ParsedPrv <$> arbitraryDerivPath
, ParsedPub <$> arbitraryDerivPath
, ParsedEmpty <$> arbitraryDerivPath
]
{- | Arbitrary message hash, private key, nonce and corresponding signature. The
signature is generated with a random message, random private key and a random
nonce.
-}
arbitrarySignature :: Gen (Hash256, SecKey, Sig)
arbitrarySignature = do
m <- arbitraryHash256
key <- arbitrary
let sig = signHash key m
return (m, key, sig)
| null | https://raw.githubusercontent.com/haskoin/haskoin-core/d49455a27735dbe636453e870cf4e8720fb3a80a/src/Haskoin/Util/Arbitrary/Keys.hs | haskell | | Arbitrary private key with arbitrary compressed flag.
| Arbitrary keypair, both either compressed or not.
| Arbitrary extended private key.
| Arbitrary extended public key with its corresponding private key.
Custom derivations
| Arbitrary derivation index with last bit unset.
| Arbitrary BIP-32 path index. Can be hardened or not.
| Arbitrary BIP-32 derivation path composed of only hardened derivations.
| Arbitrary BIP-32 derivation path composed of only non-hardened derivations.
| Arbitrary derivation path composed of hardened and non-hardened derivations.
| Arbitrary message hash, private key, nonce and corresponding signature. The
signature is generated with a random message, random private key and a random
nonce.
| |
Module : Haskoin . Test . Keys
Copyright : No rights reserved
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Module : Haskoin.Test.Keys
Copyright : No rights reserved
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Haskoin.Util.Arbitrary.Keys where
import Data.Bits (clearBit)
import Data.Coerce (coerce)
import Data.List (foldl')
import Data.Word (Word32)
import Haskoin.Crypto
import Haskoin.Keys.Common
import Haskoin.Keys.Extended
import Haskoin.Keys.Extended.Internal (Fingerprint (..))
import Haskoin.Util.Arbitrary.Crypto
import Test.QuickCheck
arbitrarySecKeyI :: Gen SecKeyI
arbitrarySecKeyI = wrapSecKey <$> arbitrary <*> arbitrary
arbitraryKeyPair :: Gen (SecKeyI, PubKeyI)
arbitraryKeyPair = do
k <- arbitrarySecKeyI
return (k, derivePubKeyI k)
arbitraryFingerprint :: Gen Fingerprint
arbitraryFingerprint = Fingerprint <$> arbitrary
arbitraryXPrvKey :: Gen XPrvKey
arbitraryXPrvKey =
XPrvKey <$> arbitrary
<*> arbitraryFingerprint
<*> arbitrary
<*> arbitraryHash256
<*> arbitrary
arbitraryXPubKey :: Gen (XPrvKey, XPubKey)
arbitraryXPubKey = (\k -> (k, deriveXPubKey k)) <$> arbitraryXPrvKey
genIndex :: Gen Word32
genIndex = (`clearBit` 31) <$> arbitrary
arbitraryBip32PathIndex :: Gen Bip32PathIndex
arbitraryBip32PathIndex =
oneof
[ Bip32SoftIndex <$> genIndex
, Bip32HardIndex <$> genIndex
]
arbitraryHardPath :: Gen HardPath
arbitraryHardPath = foldl' (:|) Deriv <$> listOf genIndex
arbitrarySoftPath :: Gen SoftPath
arbitrarySoftPath = foldl' (:/) Deriv <$> listOf genIndex
arbitraryDerivPath :: Gen DerivPath
arbitraryDerivPath = concatBip32Segments <$> listOf arbitraryBip32PathIndex
| Arbitrary parsed derivation path . Can contain ' ParsedPrv ' , ' ParsedPub ' or
' ParsedEmpty ' elements .
'ParsedEmpty' elements.
-}
arbitraryParsedPath :: Gen ParsedPath
arbitraryParsedPath =
oneof
[ ParsedPrv <$> arbitraryDerivPath
, ParsedPub <$> arbitraryDerivPath
, ParsedEmpty <$> arbitraryDerivPath
]
arbitrarySignature :: Gen (Hash256, SecKey, Sig)
arbitrarySignature = do
m <- arbitraryHash256
key <- arbitrary
let sig = signHash key m
return (m, key, sig)
|
508cc6701e120fc6821a6742ec62d4aad863d2d6960f01de3c8ff489dd9e86f1 | egonSchiele/chips | Chips.hs | module Chips (
module Chips.Types
,module Chips.Core
,module Chips.Utils
,module Chips.Imports
,module Chips.UserInput
,module Chips.Globals
,module Chips.GameState
,module Chips.Enemies
,module Chips.Position
,module Chips.CurrentTile
,module Chips.Move
) where
import Chips.Types
import Chips.Core
import Chips.Utils
import Chips.Imports
import Chips.UserInput
import Chips.Globals
import Chips.GameState
import Chips.Enemies
import Chips.Position
import Chips.CurrentTile
import Chips.Move
| null | https://raw.githubusercontent.com/egonSchiele/chips/14bb957f9ad42fa05c5edc56e50b90fcde461a77/src/Chips.hs | haskell | module Chips (
module Chips.Types
,module Chips.Core
,module Chips.Utils
,module Chips.Imports
,module Chips.UserInput
,module Chips.Globals
,module Chips.GameState
,module Chips.Enemies
,module Chips.Position
,module Chips.CurrentTile
,module Chips.Move
) where
import Chips.Types
import Chips.Core
import Chips.Utils
import Chips.Imports
import Chips.UserInput
import Chips.Globals
import Chips.GameState
import Chips.Enemies
import Chips.Position
import Chips.CurrentTile
import Chips.Move
|
|
bbe6342cb2746398795b6b61452f429a03de43031e4e808cf77c96fc38678eec | alda-lang/alda-core | repeats_test.clj | (ns alda.lisp.repeats-test
(:require [clojure.test :refer :all]
[alda.lisp :refer :all]))
(deftest repeats-test
(testing "alternate endings/numbered repeats"
(is (= [[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :d))]
[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :e))]]
(alda.lisp/times 2
[(alda.lisp/note (alda.lisp/pitch :c))
[[1] (alda.lisp/note (alda.lisp/pitch :d))]
[[2] (alda.lisp/note (alda.lisp/pitch :e))]])))
(is (= [[(alda.lisp/note (alda.lisp/pitch :c))]
[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]
[(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]
[(alda.lisp/note (alda.lisp/pitch :c))]]
(alda.lisp/times 4
[[[1 2 4] (alda.lisp/note (alda.lisp/pitch :c))]
[[2 3] [(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]]]))))
(testing "alternate endings range errors"
(is (thrown? AssertionError
(alda.lisp/times 3
[[[0 2] (alda.lisp/note (alda.lisp/pitch :c))]
[[1 3] (alda.lisp/note (alda.lisp/pitch :d))]])))
(is (thrown? AssertionError
(alda.lisp/times 3
[[[1 2] (alda.lisp/note (alda.lisp/pitch :c))]
[[2 4] (alda.lisp/note (alda.lisp/pitch :d))]])))))
| null | https://raw.githubusercontent.com/alda-lang/alda-core/4c92eb4fe363485193c58b77b1ec8e36c8866fb5/test/alda/lisp/repeats_test.clj | clojure | (ns alda.lisp.repeats-test
(:require [clojure.test :refer :all]
[alda.lisp :refer :all]))
(deftest repeats-test
(testing "alternate endings/numbered repeats"
(is (= [[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :d))]
[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :e))]]
(alda.lisp/times 2
[(alda.lisp/note (alda.lisp/pitch :c))
[[1] (alda.lisp/note (alda.lisp/pitch :d))]
[[2] (alda.lisp/note (alda.lisp/pitch :e))]])))
(is (= [[(alda.lisp/note (alda.lisp/pitch :c))]
[(alda.lisp/note (alda.lisp/pitch :c))
(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]
[(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]
[(alda.lisp/note (alda.lisp/pitch :c))]]
(alda.lisp/times 4
[[[1 2 4] (alda.lisp/note (alda.lisp/pitch :c))]
[[2 3] [(alda.lisp/note (alda.lisp/pitch :d))
(alda.lisp/note (alda.lisp/pitch :e))]]]))))
(testing "alternate endings range errors"
(is (thrown? AssertionError
(alda.lisp/times 3
[[[0 2] (alda.lisp/note (alda.lisp/pitch :c))]
[[1 3] (alda.lisp/note (alda.lisp/pitch :d))]])))
(is (thrown? AssertionError
(alda.lisp/times 3
[[[1 2] (alda.lisp/note (alda.lisp/pitch :c))]
[[2 4] (alda.lisp/note (alda.lisp/pitch :d))]])))))
|
|
a8f10ce1c985076ecadf7e9df666b5dd70d650c20c94f8412bc61a3ea92c5927 | originrose/cortex | tensor_operations_test.clj | (ns ^:gpu cortex.compute.cuda.tensor-operations-test
(:require [cortex.verify.tensor.operations :as verify-tensor-operations]
[cortex.compute.verify.utils
:refer [def-double-float-test
def-all-dtype-test
*datatype*
def-int-long-test
test-wrapper]]
[clojure.test :refer :all]
[cortex.compute.cpu.driver :refer [driver]]
[cortex.compute.cpu.tensor-math]))
(use-fixtures :each test-wrapper)
(def-all-dtype-test max-operation
(verify-tensor-operations/max-operation (driver) *datatype*))
(def-all-dtype-test min-operation
(verify-tensor-operations/min-operation (driver) *datatype*))
(def-all-dtype-test ceil-operation
(verify-tensor-operations/ceil-operation (driver) *datatype*))
(def-all-dtype-test floor-operation
(verify-tensor-operations/floor-operation (driver) *datatype*))
(def-double-float-test logistic-operation
(verify-tensor-operations/logistic-operation (driver) *datatype*))
(def-double-float-test tanh-operation
(verify-tensor-operations/tanh-operation (driver) *datatype*))
(def-double-float-test max-operation
(verify-tensor-operations/max-operation (driver) *datatype*))
(def-all-dtype-test exp-operation
(verify-tensor-operations/exp-operation (driver) *datatype*))
(def-all-dtype-test multiply-operation
(verify-tensor-operations/multiply-operation (driver) *datatype*))
(def-all-dtype-test add-operation
(verify-tensor-operations/multiply-operation (driver) *datatype*))
(def-all-dtype-test subtract-operation
(verify-tensor-operations/subtract-operation (driver) *datatype*))
(def-all-dtype-test >-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test >=-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test <-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test <=-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test bit-and-operation
(verify-tensor-operations/bit-and-operation (driver) *datatype*))
(def-all-dtype-test bit-xor-operation
(verify-tensor-operations/bit-xor-operation (driver) *datatype*))
(def-all-dtype-test where-operation
(verify-tensor-operations/where-operation (driver) *datatype*))
(def-all-dtype-test new-tensor-operation
(verify-tensor-operations/new-tensor-operation (driver) *datatype*))
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/test/clj/cortex/compute/cuda/tensor_operations_test.clj | clojure | (ns ^:gpu cortex.compute.cuda.tensor-operations-test
(:require [cortex.verify.tensor.operations :as verify-tensor-operations]
[cortex.compute.verify.utils
:refer [def-double-float-test
def-all-dtype-test
*datatype*
def-int-long-test
test-wrapper]]
[clojure.test :refer :all]
[cortex.compute.cpu.driver :refer [driver]]
[cortex.compute.cpu.tensor-math]))
(use-fixtures :each test-wrapper)
(def-all-dtype-test max-operation
(verify-tensor-operations/max-operation (driver) *datatype*))
(def-all-dtype-test min-operation
(verify-tensor-operations/min-operation (driver) *datatype*))
(def-all-dtype-test ceil-operation
(verify-tensor-operations/ceil-operation (driver) *datatype*))
(def-all-dtype-test floor-operation
(verify-tensor-operations/floor-operation (driver) *datatype*))
(def-double-float-test logistic-operation
(verify-tensor-operations/logistic-operation (driver) *datatype*))
(def-double-float-test tanh-operation
(verify-tensor-operations/tanh-operation (driver) *datatype*))
(def-double-float-test max-operation
(verify-tensor-operations/max-operation (driver) *datatype*))
(def-all-dtype-test exp-operation
(verify-tensor-operations/exp-operation (driver) *datatype*))
(def-all-dtype-test multiply-operation
(verify-tensor-operations/multiply-operation (driver) *datatype*))
(def-all-dtype-test add-operation
(verify-tensor-operations/multiply-operation (driver) *datatype*))
(def-all-dtype-test subtract-operation
(verify-tensor-operations/subtract-operation (driver) *datatype*))
(def-all-dtype-test >-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test >=-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test <-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test <=-operation
(verify-tensor-operations/>-operation (driver) *datatype*))
(def-all-dtype-test bit-and-operation
(verify-tensor-operations/bit-and-operation (driver) *datatype*))
(def-all-dtype-test bit-xor-operation
(verify-tensor-operations/bit-xor-operation (driver) *datatype*))
(def-all-dtype-test where-operation
(verify-tensor-operations/where-operation (driver) *datatype*))
(def-all-dtype-test new-tensor-operation
(verify-tensor-operations/new-tensor-operation (driver) *datatype*))
|
|
70d159669cc6781d300f0410248c7378e5ff47bf1976b4a4710a121c74c436b9 | mindreframer/clojure-stuff | 06_functions.clj | (ns koans.06-functions
(:require [koan-engine.core :refer :all]))
(defn multiply-by-ten [n]
(* 10 n))
(defn square [n] (* n n))
(meditations
"Calling a function is like giving it a hug with parentheses"
(= __ (square 9))
"Functions are usually defined before they are used"
(= __ (multiply-by-ten 2))
"But they can also be defined inline"
(= __ ((fn [n] (* 5 n)) 2))
"Or using an even shorter syntax"
(= __ (#(* 15 %) 4))
"Even anonymous functions may take multiple arguments"
(= __ (#(+ %1 %2 %3) 4 5 6))
"Arguments can also be skipped"
(= __ (#(* 15 %2) 1 2))
"One function can beget another"
(= 9 (((fn [] ___)) 4 5))
"Functions can also take other functions as input"
(= 20 ((fn [f] (f 4 5))
___))
"Higher-order functions take function arguments"
(= 25 (___
(fn [n] (* n n))))
"But they are often better written using the names of functions"
(= 25 (___ square)))
| null | https://raw.githubusercontent.com/mindreframer/clojure-stuff/1e761b2dacbbfbeec6f20530f136767e788e0fe3/github.com/functional-koans/clojure-koans/src/koans/06_functions.clj | clojure | (ns koans.06-functions
(:require [koan-engine.core :refer :all]))
(defn multiply-by-ten [n]
(* 10 n))
(defn square [n] (* n n))
(meditations
"Calling a function is like giving it a hug with parentheses"
(= __ (square 9))
"Functions are usually defined before they are used"
(= __ (multiply-by-ten 2))
"But they can also be defined inline"
(= __ ((fn [n] (* 5 n)) 2))
"Or using an even shorter syntax"
(= __ (#(* 15 %) 4))
"Even anonymous functions may take multiple arguments"
(= __ (#(+ %1 %2 %3) 4 5 6))
"Arguments can also be skipped"
(= __ (#(* 15 %2) 1 2))
"One function can beget another"
(= 9 (((fn [] ___)) 4 5))
"Functions can also take other functions as input"
(= 20 ((fn [f] (f 4 5))
___))
"Higher-order functions take function arguments"
(= 25 (___
(fn [n] (* n n))))
"But they are often better written using the names of functions"
(= 25 (___ square)))
|
|
ef196212ae1373f856596c870ae0b9de6af9e8b23792874f1032220a4710fd6d | macourtney/Dark-Exchange | wants_panel.clj | (ns darkexchange.controller.offer.wants-panel
(:require [darkexchange.controller.widgets.currency-combobox :as currency-combobox]
[darkexchange.controller.widgets.payment-type-combobox :as payment-type-combobox]
[seesaw.core :as seesaw-core])
(:import [java.math BigDecimal]))
(defn find-i-want-amount [parent-component]
(seesaw-core/select parent-component ["#i-want-amount"]))
(defn find-i-want-currency-combobox [parent-component]
(seesaw-core/select parent-component ["#i-want-currency"]))
(defn find-i-want-payment-type-combobox [parent-component]
(seesaw-core/select parent-component ["#i-want-payment-type"]))
(defn find-wants-panel [parent-component]
(seesaw-core/select parent-component ["#wants-panel"]))
(defn i-want-amount [parent-component]
(BigDecimal. (seesaw-core/text (find-i-want-amount parent-component))))
(defn i-want-currency [parent-component]
(:currency (seesaw-core/selection (find-i-want-currency-combobox parent-component))))
(defn i-want-payment-type [parent-component]
(:payment-type (seesaw-core/selection (find-i-want-payment-type-combobox parent-component))))
(defn wants-offer [parent-component]
{ :wants_amount (i-want-amount parent-component)
:wants_currency (:code (i-want-currency parent-component))
:wants_payment_type (:code (i-want-payment-type parent-component)) })
(defn load-currencies [parent-component]
(currency-combobox/load-data (find-i-want-currency-combobox parent-component))
parent-component)
(defn load-payment-types [parent-component]
(payment-type-combobox/load-data
(find-i-want-payment-type-combobox parent-component)
(find-i-want-currency-combobox parent-component))
parent-component)
(defn load-data [parent-component]
(load-payment-types (load-currencies parent-component)))
(defn attach [parent-component]
(payment-type-combobox/attach
(find-i-want-payment-type-combobox parent-component)
(find-i-want-currency-combobox parent-component))
parent-component) | null | https://raw.githubusercontent.com/macourtney/Dark-Exchange/1654d05cda0c81585da7b8e64f9ea3e2944b27f1/src/darkexchange/controller/offer/wants_panel.clj | clojure | (ns darkexchange.controller.offer.wants-panel
(:require [darkexchange.controller.widgets.currency-combobox :as currency-combobox]
[darkexchange.controller.widgets.payment-type-combobox :as payment-type-combobox]
[seesaw.core :as seesaw-core])
(:import [java.math BigDecimal]))
(defn find-i-want-amount [parent-component]
(seesaw-core/select parent-component ["#i-want-amount"]))
(defn find-i-want-currency-combobox [parent-component]
(seesaw-core/select parent-component ["#i-want-currency"]))
(defn find-i-want-payment-type-combobox [parent-component]
(seesaw-core/select parent-component ["#i-want-payment-type"]))
(defn find-wants-panel [parent-component]
(seesaw-core/select parent-component ["#wants-panel"]))
(defn i-want-amount [parent-component]
(BigDecimal. (seesaw-core/text (find-i-want-amount parent-component))))
(defn i-want-currency [parent-component]
(:currency (seesaw-core/selection (find-i-want-currency-combobox parent-component))))
(defn i-want-payment-type [parent-component]
(:payment-type (seesaw-core/selection (find-i-want-payment-type-combobox parent-component))))
(defn wants-offer [parent-component]
{ :wants_amount (i-want-amount parent-component)
:wants_currency (:code (i-want-currency parent-component))
:wants_payment_type (:code (i-want-payment-type parent-component)) })
(defn load-currencies [parent-component]
(currency-combobox/load-data (find-i-want-currency-combobox parent-component))
parent-component)
(defn load-payment-types [parent-component]
(payment-type-combobox/load-data
(find-i-want-payment-type-combobox parent-component)
(find-i-want-currency-combobox parent-component))
parent-component)
(defn load-data [parent-component]
(load-payment-types (load-currencies parent-component)))
(defn attach [parent-component]
(payment-type-combobox/attach
(find-i-want-payment-type-combobox parent-component)
(find-i-want-currency-combobox parent-component))
parent-component) |
|
9a1fa4f0a554a88ff0783a52bafe5352d25c310d8ecf4af73f517c60e47f5256 | PaulRivier/kiwi | Server.hs | {-# LANGUAGE OverloadedStrings #-}
module Kiwi.Server
( kiwiServer
) where
import Control.Concurrent (forkIO)
import Control.Monad.Reader
import Data.IORef (newIORef, readIORef, atomicWriteIORef)
import Data.List (stripPrefix, isInfixOf)
import Data . Maybe ( )
import qualified Data.Map.Strict as M
import qualified Data.SearchEngine as SE
-- import Data.Text (Text, intercalate)
-- import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
-- import qualified Network.Wai as WAI
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import qualified Network.Wai.Middleware.Static as Static
import Network.Wai.Middleware.Static ((>->), (<|>))
import qualified System.Directory as D
import qualified System.FilePath as FP
import qualified Text.Mustache as X
import Text.Printf (printf)
import Web.Scotty.Trans
import qualified Kiwi.ConfigFile as Conf
import Kiwi.Controller
import qualified Kiwi.PagesDB as DB
import Kiwi.Types
import Kiwi.Utils
import qualified Utils.DocIndex as DI
-- gets :: (AppState -> a) -> ServerM a
-- gets f = ask >>= return . f
pagesFSDir, imagesFSDir, filesFSDir :: FilePath
pagesFSDir = "pages"
filesFSDir = "files"
imagesFSDir = "images"
kiwiServer :: FP.FilePath -> Conf.KiwiConfig ->
Static.CacheContainer -> IO ()
kiwiServer cfp c cache = do
(ss, t) <- timeIO $ initServerState cfp c
displayStartupTime ss t
let runActionToIO m = runReaderT (runServerM m) ss
scottyT (Conf.port c) runActionToIO $ do
when (Conf.logging c) $ middleware logStdoutDev
kiwiRoute ss cache
where
displayStartupTime ss t = do
let ts = printf "%.2f" t
nb <- (show . M.size . DI.store . pagesIndex) <$> liftIO (readIORef $ pagesDB ss)
putStrLn $ concat ["Kiwi scaned ", nb, " pages and 1 theme in ", ts, "s"]
kiwiRoute :: ServerState -> Static.CacheContainer -> WebM ()
kiwiRoute ss cache = do
get "/" $ redirect "/browse/"
get "/page/:source/:pId" $ do
source <- param "source"
pId <- param "pId"
withLogin $ servePage (source, pId)
getPath " ^/page/:source/[^.]+ " $ \p - > withLogin $ do
-- servePage p
get " /browse / " $ withLogin $ do
-- tags <- param "tags"
tags
get " /browse - meta/:meta/:keys " $ withLogin $ do
-- meta <- param "meta"
-- keys <- param "keys"
serveBrowseMeta meta keys
get "/browse/:req" $ withLogin $ do
req <- TL.toStrict <$> param "req"
serveBrowseAll req
get "/search" $ withLogin $ do
query <- param "query"
serveSearch query
post "/reload" $ ifAdmin $ do
withLogin updateDB
html ""
post "/edit-page" $ ifAdmin $ do
source <- param "page-source"
pId <- param "page-id"
serveEditPage (editorCommand ss) (source, pId)
get "/login" $ serveLogin
post "/login" $ logUserIn
post "/logout" $ logUserOut
get "/show-index" $ ifAdmin $ do
db <- getDB
html $ TL.pack $ show $ DI.index $ pagesIndex db
serveStatic cache [("static/", staticDir ss)]
forM_ (contentSources ss) $ \src ->
serveStatic cache $ filesParts (contentDir ss) src
notFound serveNotFound
where
filesParts cd src = [ ( concat ["image/", src, "/"]
, FP.joinPath [cd, src, imagesFSDir] )
, ( concat ["file/", src, "/"]
, FP.joinPath [cd, src, filesFSDir]) ]
updateDB :: ActM ()
updateDB = do
dbR <- serverM $ asks pagesDB
db' <- liftIO $ readIORef dbR
cd <- asksK contentDir
db <- liftAndCatchIO $ DB.updatePagesDB cd pagesFSDir db'
liftAndCatchIO $ atomicWriteIORef dbR db
initServerState :: FP.FilePath -> Conf.KiwiConfig -> IO ServerState
initServerState cfp conf = do
kiwiDir' <- D.makeAbsolute (FP.takeDirectory cfp)
let contentDir' = FP.combine kiwiDir' (Conf.contentDir conf)
sources <- filter (\(x:_) -> x /= '.') <$> D.listDirectory contentDir'
let staticDir' = FP.joinPath [kiwiDir', (Conf.themeDir conf), "static"]
-- let pagesRootDir = FP.combine contentDir' pagesFSDir
tpl <- compileTemplate $ FP.joinPath [kiwiDir', (Conf.themeDir conf), "mustache"]
accounts' <- loadAccounts $ FP.combine kiwiDir' "_accounts"
sess <- loadSessions $ FP.combine kiwiDir' "_sessions"
sessR <- newIORef sess
db <- DB.updatePagesDB contentDir' pagesFSDir
(DB.emptyPagesDB (Conf.defaultMeta conf)
(Conf.customMetaConfig conf)
(Conf.sourcesConfig conf))
_ <- warmUpSearchEngine (searchEngine db)
dbR <- newIORef db
return $ ServerState {
kiwiName = Conf.name conf
, contentDir = contentDir'
, contentSources = sources
, staticDir = staticDir'
, kiwiDir = kiwiDir'
, editorCommand = Conf.editor conf
, uiLang = findInterfaceLang (Conf.uiLang conf)
, template = tpl
, accounts = accounts'
, sessions = sessR
, login = Nothing
, pagesDB = dbR
, tocSetting = Conf.toc conf
}
where
findInterfaceLang :: UI_Lang -> UI_Lang
findInterfaceLang UI_French = UI_French
findInterfaceLang _ = UI_English
warmUpSearchEngine se = forkIO $ do -- deep eval of search engine
True <- return $ SE.invariant se
return ()
compileTemplate :: FP.FilePath -> IO KiwiTemplate
compileTemplate dir =
KiwiTemplate <$>
make "layout" <*>
make "home" <*>
make "login" <*>
make "page" <*>
make "tagged" <*>
make "browse" <*>
make "agenda" <*>
make "search-results" <*>
make "not-found" <*>
make "forbidden"
where
make t = X.compileMustacheDir t dir
serveStatic :: Static.CacheContainer -> [(String, FP.FilePath)] -> WebM ()
serveStatic cache parts =
let opts = Static.defaultOptions { Static.cacheContainer = cache }
basePolicy = mconcat [ Static.noDots
, Static.isNotAbsolute
, Static.predicate (not . isInfixOf "/.")
]
contentPolicies = map (\(url,path) -> contentPolicy url path) parts
policy = basePolicy >-> foldl1 (<|>) contentPolicies
in middleware $
Static.staticPolicyWithOptions opts policy
where
contentPolicy url fspath = mconcat [ Static.policy (stripPrefix url)
, Static.addBase fspath ]
| null | https://raw.githubusercontent.com/PaulRivier/kiwi/c89bd5b7586939f4491d56f3d842c731047c5830/src/Kiwi/Server.hs | haskell | # LANGUAGE OverloadedStrings #
import Data.Text (Text, intercalate)
import qualified Data.Text as T
import qualified Network.Wai as WAI
gets :: (AppState -> a) -> ServerM a
gets f = ask >>= return . f
servePage p
tags <- param "tags"
meta <- param "meta"
keys <- param "keys"
let pagesRootDir = FP.combine contentDir' pagesFSDir
deep eval of search engine |
module Kiwi.Server
( kiwiServer
) where
import Control.Concurrent (forkIO)
import Control.Monad.Reader
import Data.IORef (newIORef, readIORef, atomicWriteIORef)
import Data.List (stripPrefix, isInfixOf)
import Data . Maybe ( )
import qualified Data.Map.Strict as M
import qualified Data.SearchEngine as SE
import qualified Data.Text.Lazy as TL
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import qualified Network.Wai.Middleware.Static as Static
import Network.Wai.Middleware.Static ((>->), (<|>))
import qualified System.Directory as D
import qualified System.FilePath as FP
import qualified Text.Mustache as X
import Text.Printf (printf)
import Web.Scotty.Trans
import qualified Kiwi.ConfigFile as Conf
import Kiwi.Controller
import qualified Kiwi.PagesDB as DB
import Kiwi.Types
import Kiwi.Utils
import qualified Utils.DocIndex as DI
pagesFSDir, imagesFSDir, filesFSDir :: FilePath
pagesFSDir = "pages"
filesFSDir = "files"
imagesFSDir = "images"
kiwiServer :: FP.FilePath -> Conf.KiwiConfig ->
Static.CacheContainer -> IO ()
kiwiServer cfp c cache = do
(ss, t) <- timeIO $ initServerState cfp c
displayStartupTime ss t
let runActionToIO m = runReaderT (runServerM m) ss
scottyT (Conf.port c) runActionToIO $ do
when (Conf.logging c) $ middleware logStdoutDev
kiwiRoute ss cache
where
displayStartupTime ss t = do
let ts = printf "%.2f" t
nb <- (show . M.size . DI.store . pagesIndex) <$> liftIO (readIORef $ pagesDB ss)
putStrLn $ concat ["Kiwi scaned ", nb, " pages and 1 theme in ", ts, "s"]
kiwiRoute :: ServerState -> Static.CacheContainer -> WebM ()
kiwiRoute ss cache = do
get "/" $ redirect "/browse/"
get "/page/:source/:pId" $ do
source <- param "source"
pId <- param "pId"
withLogin $ servePage (source, pId)
getPath " ^/page/:source/[^.]+ " $ \p - > withLogin $ do
get " /browse / " $ withLogin $ do
tags
get " /browse - meta/:meta/:keys " $ withLogin $ do
serveBrowseMeta meta keys
get "/browse/:req" $ withLogin $ do
req <- TL.toStrict <$> param "req"
serveBrowseAll req
get "/search" $ withLogin $ do
query <- param "query"
serveSearch query
post "/reload" $ ifAdmin $ do
withLogin updateDB
html ""
post "/edit-page" $ ifAdmin $ do
source <- param "page-source"
pId <- param "page-id"
serveEditPage (editorCommand ss) (source, pId)
get "/login" $ serveLogin
post "/login" $ logUserIn
post "/logout" $ logUserOut
get "/show-index" $ ifAdmin $ do
db <- getDB
html $ TL.pack $ show $ DI.index $ pagesIndex db
serveStatic cache [("static/", staticDir ss)]
forM_ (contentSources ss) $ \src ->
serveStatic cache $ filesParts (contentDir ss) src
notFound serveNotFound
where
filesParts cd src = [ ( concat ["image/", src, "/"]
, FP.joinPath [cd, src, imagesFSDir] )
, ( concat ["file/", src, "/"]
, FP.joinPath [cd, src, filesFSDir]) ]
updateDB :: ActM ()
updateDB = do
dbR <- serverM $ asks pagesDB
db' <- liftIO $ readIORef dbR
cd <- asksK contentDir
db <- liftAndCatchIO $ DB.updatePagesDB cd pagesFSDir db'
liftAndCatchIO $ atomicWriteIORef dbR db
initServerState :: FP.FilePath -> Conf.KiwiConfig -> IO ServerState
initServerState cfp conf = do
kiwiDir' <- D.makeAbsolute (FP.takeDirectory cfp)
let contentDir' = FP.combine kiwiDir' (Conf.contentDir conf)
sources <- filter (\(x:_) -> x /= '.') <$> D.listDirectory contentDir'
let staticDir' = FP.joinPath [kiwiDir', (Conf.themeDir conf), "static"]
tpl <- compileTemplate $ FP.joinPath [kiwiDir', (Conf.themeDir conf), "mustache"]
accounts' <- loadAccounts $ FP.combine kiwiDir' "_accounts"
sess <- loadSessions $ FP.combine kiwiDir' "_sessions"
sessR <- newIORef sess
db <- DB.updatePagesDB contentDir' pagesFSDir
(DB.emptyPagesDB (Conf.defaultMeta conf)
(Conf.customMetaConfig conf)
(Conf.sourcesConfig conf))
_ <- warmUpSearchEngine (searchEngine db)
dbR <- newIORef db
return $ ServerState {
kiwiName = Conf.name conf
, contentDir = contentDir'
, contentSources = sources
, staticDir = staticDir'
, kiwiDir = kiwiDir'
, editorCommand = Conf.editor conf
, uiLang = findInterfaceLang (Conf.uiLang conf)
, template = tpl
, accounts = accounts'
, sessions = sessR
, login = Nothing
, pagesDB = dbR
, tocSetting = Conf.toc conf
}
where
findInterfaceLang :: UI_Lang -> UI_Lang
findInterfaceLang UI_French = UI_French
findInterfaceLang _ = UI_English
True <- return $ SE.invariant se
return ()
compileTemplate :: FP.FilePath -> IO KiwiTemplate
compileTemplate dir =
KiwiTemplate <$>
make "layout" <*>
make "home" <*>
make "login" <*>
make "page" <*>
make "tagged" <*>
make "browse" <*>
make "agenda" <*>
make "search-results" <*>
make "not-found" <*>
make "forbidden"
where
make t = X.compileMustacheDir t dir
serveStatic :: Static.CacheContainer -> [(String, FP.FilePath)] -> WebM ()
serveStatic cache parts =
let opts = Static.defaultOptions { Static.cacheContainer = cache }
basePolicy = mconcat [ Static.noDots
, Static.isNotAbsolute
, Static.predicate (not . isInfixOf "/.")
]
contentPolicies = map (\(url,path) -> contentPolicy url path) parts
policy = basePolicy >-> foldl1 (<|>) contentPolicies
in middleware $
Static.staticPolicyWithOptions opts policy
where
contentPolicy url fspath = mconcat [ Static.policy (stripPrefix url)
, Static.addBase fspath ]
|
5a7f6f976fc6616de0fbdc405fb785fa8afc6130d26f81a4e0d9ae8c03265f74 | agentm/project-m36 | Deriving.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE KindSignatures #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
| Newtypes for deriving instances with customization using
-- @DerivingVia@.
--
-- Inspired by
-- [Dhall.Deriving](-1.33.1/docs/Dhall-Deriving.html)
which in turn was inspired by blog post
[ Mirror Mirror : Reflection and Encoding Via]( / mirror_mirror.html ) .
--
-- required extensions:
--
-- * DerivingVia
*
* TypeOperators ( for @('<<<')@ and @('>>>')@ )
-- * DataKinds (for types that take a string argument)
module ProjectM36.Tupleable.Deriving
* DerivingVia Newtype
Codec(..)
-- * Type-level Options
, ModifyOptions(..)
, Field
-- * Type-level 'T.Text' -> 'T.Text' Functions
, ModifyText(..)
, AddPrefix
, DropPrefix
, AddSuffix
, DropSuffix
, UpperCase
, LowerCase
, TitleCase
, CamelCase
, PascalCase
, SnakeCase
, SpinalCase
, TrainCase
-- * Composition
, AsIs
, type (<<<)
, type (>>>)
-- * Re-Exports
, Generic
, module ProjectM36.Tupleable
) where
import Data.Maybe (fromMaybe)
import Data.Proxy
import qualified Data.Text as T
import Data.Text.Manipulate
import GHC.TypeLits
import GHC.Generics (Generic, Rep)
import ProjectM36.Tupleable
| A newtype wrapper to allow for easier deriving of ' ' instances
-- with customization.
--
-- The @tag@ type variable can be used to specify options for converting the
-- datatype to and from a 'RelationTuple'. For example,
--
-- > data Example = Example
-- > { exampleFoo :: Int
-- > , exampleBar :: Int
-- > }
> deriving stock ( Generic )
> deriving ( )
> via Codec ( Field ( DropPrefix " example " > > > CamelCase ) ) Example
--
will derive an instance of ' ' where field names are translated into
-- attribute names by dropping the prefix @"example"@ and then converting the
-- result to camelCase. So @"exampleFoo"@ becomes @"foo"@ and @"exampleBar"@
becomes @"bar"@.
--
Requires the @DerivingGeneric@ and @DerivingVia@ extensions to be enabled .
newtype Codec tag a = Codec { unCodec :: a }
instance (ModifyOptions tag, Generic a, TupleableG (Rep a)) => Tupleable (Codec tag a) where
toTuple v = genericToTuple opts (unCodec v)
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
fromTuple tup = Codec <$> genericFromTuple opts tup
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
toAttributes _ = genericToAttributes opts (Proxy :: Proxy a)
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
-- | Types that can be used as tags for 'Codec'.
class ModifyOptions a where
modifyOptions :: proxy a -> TupleableOptions -> TupleableOptions
-- | Change how record field names are translated into attribute names. For
-- example,
--
-- > Field SnakeCase
--
will translate the field name @fooBar@ into the attribute name @foo_bar@.
data Field a
instance ModifyText a => ModifyOptions (Field a) where
modifyOptions _ opts = opts { fieldModifier = newFieldModifier }
where
newFieldModifier = modifyText (Proxy :: Proxy a) . fieldModifier opts
-- | Types that can be used in options that modify 'T.Text' such as in 'Field'.
class ModifyText a where
modifyText :: proxy a -> T.Text -> T.Text
| Add a prefix . @AddPrefix " foo"@ will transform @"bar"@ into @"foobar"@.
data AddPrefix (prefix :: Symbol)
instance KnownSymbol prefix => ModifyText (AddPrefix prefix) where
modifyText _ oldText = prefixText <> oldText
where
prefixText = T.pack (symbolVal (Proxy :: Proxy prefix))
| Drop a prefix . @DropPrefix " bar"@ will transform @"foobar"@ into @"foo"@.
data DropPrefix (prefix :: Symbol)
instance KnownSymbol prefix => ModifyText (DropPrefix prefix) where
modifyText _ oldText = fromMaybe oldText (T.stripPrefix prefixText oldText)
where
prefixText = T.pack (symbolVal (Proxy :: Proxy prefix))
| Add a suffix . @AddSuffix " bar"@ will transform @"foo"@ into @"foobar"@.
data AddSuffix (suffix :: Symbol)
instance KnownSymbol suffix => ModifyText (AddSuffix suffix) where
modifyText _ oldText = oldText <> suffixText
where
suffixText = T.pack (symbolVal (Proxy :: Proxy suffix))
| Drop a suffix . @DropSuffix " bar"@ will transform @"foobar"@ into @"foo"@.
data DropSuffix (suffix :: Symbol)
instance KnownSymbol suffix => ModifyText (DropSuffix suffix) where
modifyText _ oldText = fromMaybe oldText (T.stripSuffix suffixText oldText)
where
suffixText = T.pack (symbolVal (Proxy :: Proxy suffix))
| Convert to UPPERCASE . Will transform @"foobar"@ into @\"FOOBAR\"@.
data UpperCase
instance ModifyText UpperCase where
modifyText _ = T.toUpper
| Convert to lowercase . Will transform @\"FOOBAR\"@ into @"foobar"@.
data LowerCase
instance ModifyText LowerCase where
modifyText _ = T.toLower
| Convert to Title Case . Will transform @"fooBar"@ into Bar\"@.
data TitleCase
instance ModifyText TitleCase where
modifyText _ = toTitle
| Convert to camelCase . Will transform @"foo_bar"@ into @"fooBar"@.
data CamelCase
instance ModifyText CamelCase where
modifyText _ = toCamel
| Convert to PascalCase . Will transform @"foo_bar"@ into @\"FooBar\"@.
data PascalCase
instance ModifyText PascalCase where
modifyText _ = toPascal
-- | Convert to snake_case. Will transform @"fooBar"@ into @"foo_bar"@.
data SnakeCase
instance ModifyText SnakeCase where
modifyText _ = toSnake
| Convert to spinal - case . will transform @"fooBar"@ into @"foo - bar"@.
data SpinalCase
instance ModifyText SpinalCase where
modifyText _ = toSpinal
-- | Convert to Train-Case. Will transform @"fooBar"@ into @\"Foo-Bar\"@.
data TrainCase
instance ModifyText TrainCase where
modifyText _ = toTrain
-- | Identity option.
type AsIs = ()
instance ModifyOptions () where
modifyOptions _ = id
instance ModifyText () where
modifyText _ = id
-- | Right to left composition.
--
Requires the @TypeOperators@ extension to be enabled .
data a <<< b
instance (ModifyOptions a, ModifyOptions b) => ModifyOptions (a <<< b) where
modifyOptions _ = modifyOptions (Proxy :: Proxy a) . modifyOptions (Proxy :: Proxy b)
instance (ModifyText a, ModifyText b) => ModifyText (a <<< b) where
modifyText _ = modifyText (Proxy :: Proxy a) . modifyText (Proxy :: Proxy b)
-- | Left to right composition.
--
Requires the @TypeOperators@ extension to be enabled .
data a >>> b
instance (ModifyOptions a, ModifyOptions b) => ModifyOptions (a >>> b) where
modifyOptions _ = modifyOptions (Proxy :: Proxy b) . modifyOptions (Proxy :: Proxy a)
instance (ModifyText a, ModifyText b) => ModifyText (a >>> b) where
modifyText _ = modifyText (Proxy :: Proxy b) . modifyText (Proxy :: Proxy a)
| null | https://raw.githubusercontent.com/agentm/project-m36/57a75b35e84bebf0945db6dae53350fda83f24b6/src/lib/ProjectM36/Tupleable/Deriving.hs | haskell | @DerivingVia@.
Inspired by
[Dhall.Deriving](-1.33.1/docs/Dhall-Deriving.html)
required extensions:
* DerivingVia
* DataKinds (for types that take a string argument)
* Type-level Options
* Type-level 'T.Text' -> 'T.Text' Functions
* Composition
* Re-Exports
with customization.
The @tag@ type variable can be used to specify options for converting the
datatype to and from a 'RelationTuple'. For example,
> data Example = Example
> { exampleFoo :: Int
> , exampleBar :: Int
> }
attribute names by dropping the prefix @"example"@ and then converting the
result to camelCase. So @"exampleFoo"@ becomes @"foo"@ and @"exampleBar"@
| Types that can be used as tags for 'Codec'.
| Change how record field names are translated into attribute names. For
example,
> Field SnakeCase
| Types that can be used in options that modify 'T.Text' such as in 'Field'.
| Convert to snake_case. Will transform @"fooBar"@ into @"foo_bar"@.
| Convert to Train-Case. Will transform @"fooBar"@ into @\"Foo-Bar\"@.
| Identity option.
| Right to left composition.
| Left to right composition.
| # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE KindSignatures #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
| Newtypes for deriving instances with customization using
which in turn was inspired by blog post
[ Mirror Mirror : Reflection and Encoding Via]( / mirror_mirror.html ) .
*
* TypeOperators ( for @('<<<')@ and @('>>>')@ )
module ProjectM36.Tupleable.Deriving
* DerivingVia Newtype
Codec(..)
, ModifyOptions(..)
, Field
, ModifyText(..)
, AddPrefix
, DropPrefix
, AddSuffix
, DropSuffix
, UpperCase
, LowerCase
, TitleCase
, CamelCase
, PascalCase
, SnakeCase
, SpinalCase
, TrainCase
, AsIs
, type (<<<)
, type (>>>)
, Generic
, module ProjectM36.Tupleable
) where
import Data.Maybe (fromMaybe)
import Data.Proxy
import qualified Data.Text as T
import Data.Text.Manipulate
import GHC.TypeLits
import GHC.Generics (Generic, Rep)
import ProjectM36.Tupleable
| A newtype wrapper to allow for easier deriving of ' ' instances
> deriving stock ( Generic )
> deriving ( )
> via Codec ( Field ( DropPrefix " example " > > > CamelCase ) ) Example
will derive an instance of ' ' where field names are translated into
becomes @"bar"@.
Requires the @DerivingGeneric@ and @DerivingVia@ extensions to be enabled .
newtype Codec tag a = Codec { unCodec :: a }
instance (ModifyOptions tag, Generic a, TupleableG (Rep a)) => Tupleable (Codec tag a) where
toTuple v = genericToTuple opts (unCodec v)
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
fromTuple tup = Codec <$> genericFromTuple opts tup
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
toAttributes _ = genericToAttributes opts (Proxy :: Proxy a)
where
opts = modifyOptions (Proxy :: Proxy tag) defaultTupleableOptions
class ModifyOptions a where
modifyOptions :: proxy a -> TupleableOptions -> TupleableOptions
will translate the field name @fooBar@ into the attribute name @foo_bar@.
data Field a
instance ModifyText a => ModifyOptions (Field a) where
modifyOptions _ opts = opts { fieldModifier = newFieldModifier }
where
newFieldModifier = modifyText (Proxy :: Proxy a) . fieldModifier opts
class ModifyText a where
modifyText :: proxy a -> T.Text -> T.Text
| Add a prefix . @AddPrefix " foo"@ will transform @"bar"@ into @"foobar"@.
data AddPrefix (prefix :: Symbol)
instance KnownSymbol prefix => ModifyText (AddPrefix prefix) where
modifyText _ oldText = prefixText <> oldText
where
prefixText = T.pack (symbolVal (Proxy :: Proxy prefix))
| Drop a prefix . @DropPrefix " bar"@ will transform @"foobar"@ into @"foo"@.
data DropPrefix (prefix :: Symbol)
instance KnownSymbol prefix => ModifyText (DropPrefix prefix) where
modifyText _ oldText = fromMaybe oldText (T.stripPrefix prefixText oldText)
where
prefixText = T.pack (symbolVal (Proxy :: Proxy prefix))
| Add a suffix . @AddSuffix " bar"@ will transform @"foo"@ into @"foobar"@.
data AddSuffix (suffix :: Symbol)
instance KnownSymbol suffix => ModifyText (AddSuffix suffix) where
modifyText _ oldText = oldText <> suffixText
where
suffixText = T.pack (symbolVal (Proxy :: Proxy suffix))
| Drop a suffix . @DropSuffix " bar"@ will transform @"foobar"@ into @"foo"@.
data DropSuffix (suffix :: Symbol)
instance KnownSymbol suffix => ModifyText (DropSuffix suffix) where
modifyText _ oldText = fromMaybe oldText (T.stripSuffix suffixText oldText)
where
suffixText = T.pack (symbolVal (Proxy :: Proxy suffix))
| Convert to UPPERCASE . Will transform @"foobar"@ into @\"FOOBAR\"@.
data UpperCase
instance ModifyText UpperCase where
modifyText _ = T.toUpper
| Convert to lowercase . Will transform @\"FOOBAR\"@ into @"foobar"@.
data LowerCase
instance ModifyText LowerCase where
modifyText _ = T.toLower
| Convert to Title Case . Will transform @"fooBar"@ into Bar\"@.
data TitleCase
instance ModifyText TitleCase where
modifyText _ = toTitle
| Convert to camelCase . Will transform @"foo_bar"@ into @"fooBar"@.
data CamelCase
instance ModifyText CamelCase where
modifyText _ = toCamel
| Convert to PascalCase . Will transform @"foo_bar"@ into @\"FooBar\"@.
data PascalCase
instance ModifyText PascalCase where
modifyText _ = toPascal
data SnakeCase
instance ModifyText SnakeCase where
modifyText _ = toSnake
| Convert to spinal - case . will transform @"fooBar"@ into @"foo - bar"@.
data SpinalCase
instance ModifyText SpinalCase where
modifyText _ = toSpinal
data TrainCase
instance ModifyText TrainCase where
modifyText _ = toTrain
type AsIs = ()
instance ModifyOptions () where
modifyOptions _ = id
instance ModifyText () where
modifyText _ = id
Requires the @TypeOperators@ extension to be enabled .
data a <<< b
instance (ModifyOptions a, ModifyOptions b) => ModifyOptions (a <<< b) where
modifyOptions _ = modifyOptions (Proxy :: Proxy a) . modifyOptions (Proxy :: Proxy b)
instance (ModifyText a, ModifyText b) => ModifyText (a <<< b) where
modifyText _ = modifyText (Proxy :: Proxy a) . modifyText (Proxy :: Proxy b)
Requires the @TypeOperators@ extension to be enabled .
data a >>> b
instance (ModifyOptions a, ModifyOptions b) => ModifyOptions (a >>> b) where
modifyOptions _ = modifyOptions (Proxy :: Proxy b) . modifyOptions (Proxy :: Proxy a)
instance (ModifyText a, ModifyText b) => ModifyText (a >>> b) where
modifyText _ = modifyText (Proxy :: Proxy b) . modifyText (Proxy :: Proxy a)
|
07c7ec2f7356ba8c9a7ea3ed649f1e5a3d9395bc805c9d78ebc2c94e82640a9b | spell-music/csound-expression | AbletonLinkOpcodes.hs | module Csound.Typed.Opcode.AbletonLinkOpcodes (
link_beat_force, link_beat_get, link_beat_request, link_create, ableton_link_enable, link_is_enabled, link_metro, link_peers, link_tempo_get, link_tempo_set) where
import Control.Monad.Trans.Class
import Csound.Dynamic
import Csound.Typed
--
-- |
Forces the global network Ableton Link session to adopt a specific beat number and time .
--
Forces the global network Ableton Link session to adopt a specific beat number and time , like a conductor stopping an orchestra and immediately starting it again .
--
> link_beat_force i_peer , k_beat [ , k_at_time_seconds [ , k_quantum ] ]
--
-- csound doc: <>
link_beat_force :: D -> Sig -> SE ()
link_beat_force b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_beat_force" [(Xr,[Ir,Kr,Kr,Kr])] [a1,a2]
-- |
-- Returns the beat, phase with respect to the local quantum, and current time for the session.
--
Returns the beat number , phase of the beat with respect to the local quantum of the beat , and current time for the global network Ableton Link session .
--
> k_beat_number , k_phase , k_current_time_seconds link_beat_get i_peer [ , k_quantum ]
--
-- csound doc: <>
link_beat_get :: D -> (Sig,Sig,Sig)
link_beat_get b1 = pureTuple $ f <$> unD b1
where f a1 = mopcs "link_beat_get" ([Kr,Kr,Kr],[Ir,Kr]) [a1]
-- |
Requests the global network Ableton Link session to adopt a specific beat number and time .
--
> link_beat_request i_peer , k_beat [ , k_at_time_seconds [ , k_quantum ] ]
--
-- csound doc: <>
link_beat_request :: D -> Sig -> SE ()
link_beat_request b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_beat_request" [(Xr,[Ir,Kr,Kr,Kr])] [a1,a2]
-- |
Creates a peer in an Ableton Link network session .
--
Creates a peer in an Ableton Link network session . The first peer in a session determines the initial tempo of the session . The value returned must be passed as the first parameter to all subsequent Ableton Link opcode calls for this peer .
--
-- > i_peer link_create [i_bpm]
--
-- csound doc: <>
link_create :: D
link_create = D $ return $ f
where f = opcs "link_create" [(Ir,[Ir])] []
-- |
Enable or disable synchronization with the Ableton Link session .
--
Enable or disable synchronization with the global network Ableton Link session tempo and beat .
--
-- > ableton_link_enable i_peer [, k_enable]
--
-- csound doc: <>
ableton_link_enable :: D -> SE ()
ableton_link_enable b1 = SE $ (depT_ =<<) $ lift $ f <$> unD b1
where f a1 = opcs "ableton_link_enable" [(Xr,[Ir,Kr])] [a1]
-- |
Returns whether or not this peer is synchronized with the global network Ableton Link session .
--
Returns whether or not the beat and time of his peer are synchronized with the global network Ableton Link session .
--
-- > k_is_enabled link_is_enabled i_peer
--
-- csound doc: <>
link_is_enabled :: D -> Sig
link_is_enabled b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_is_enabled" [(Kr,[Ir])] [a1]
-- |
Returns a trigger that is 1 on the beat and 0 otherwise along with beat , phase , and time for this session of Ableton Link .
--
Returns a trigger that is 1 on the beat and 0 otherwise along with the beat , phase , and current time of Ableton Link for this session for a given quantum .
--
> k_trigger , k_beat , k_phase , k_current_time_seconds link_metro i_peer [ , k_quantum ]
--
-- csound doc: <>
link_metro :: D -> (Sig,Sig,Sig,Sig)
link_metro b1 = pureTuple $ f <$> unD b1
where f a1 = mopcs "link_metro" ([Kr,Kr,Kr,Kr],[Ir,Kr]) [a1]
-- |
-- Returns the number of peers in the session.
--
Returns the number of peers in the global network Ableton Link session .
--
> k_count link_peers i_peer
--
-- csound doc: <>
link_peers :: D -> Sig
link_peers b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_peers" [(Kr,[Ir])] [a1]
-- |
Returns the current tempo of the global network Ableton Link session .
--
> i_peer
--
-- csound doc: <>
link_tempo_get :: D -> Sig
link_tempo_get b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_tempo_get" [(Kr,[Ir])] [a1]
-- |
-- Sets the tempo.
--
Sets the local tempo if this peer is not enabled ; sets the tempo of the global network Ableton Link session if this peer is enabled .
--
> link_tempo_set i_peer , , k_at_time_seconds ]
--
-- csound doc: <>
link_tempo_set :: D -> Sig -> SE ()
link_tempo_set b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_tempo_set" [(Xr,[Ir,Kr,Kr])] [a1,a2] | null | https://raw.githubusercontent.com/spell-music/csound-expression/29c1611172153347b16d0b6b133e4db61a7218d5/csound-expression-opcodes/src/Csound/Typed/Opcode/AbletonLinkOpcodes.hs | haskell |
|
csound doc: <>
|
Returns the beat, phase with respect to the local quantum, and current time for the session.
csound doc: <>
|
csound doc: <>
|
> i_peer link_create [i_bpm]
csound doc: <>
|
> ableton_link_enable i_peer [, k_enable]
csound doc: <>
|
> k_is_enabled link_is_enabled i_peer
csound doc: <>
|
csound doc: <>
|
Returns the number of peers in the session.
csound doc: <>
|
csound doc: <>
|
Sets the tempo.
csound doc: <> | module Csound.Typed.Opcode.AbletonLinkOpcodes (
link_beat_force, link_beat_get, link_beat_request, link_create, ableton_link_enable, link_is_enabled, link_metro, link_peers, link_tempo_get, link_tempo_set) where
import Control.Monad.Trans.Class
import Csound.Dynamic
import Csound.Typed
Forces the global network Ableton Link session to adopt a specific beat number and time .
Forces the global network Ableton Link session to adopt a specific beat number and time , like a conductor stopping an orchestra and immediately starting it again .
> link_beat_force i_peer , k_beat [ , k_at_time_seconds [ , k_quantum ] ]
link_beat_force :: D -> Sig -> SE ()
link_beat_force b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_beat_force" [(Xr,[Ir,Kr,Kr,Kr])] [a1,a2]
Returns the beat number , phase of the beat with respect to the local quantum of the beat , and current time for the global network Ableton Link session .
> k_beat_number , k_phase , k_current_time_seconds link_beat_get i_peer [ , k_quantum ]
link_beat_get :: D -> (Sig,Sig,Sig)
link_beat_get b1 = pureTuple $ f <$> unD b1
where f a1 = mopcs "link_beat_get" ([Kr,Kr,Kr],[Ir,Kr]) [a1]
Requests the global network Ableton Link session to adopt a specific beat number and time .
> link_beat_request i_peer , k_beat [ , k_at_time_seconds [ , k_quantum ] ]
link_beat_request :: D -> Sig -> SE ()
link_beat_request b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_beat_request" [(Xr,[Ir,Kr,Kr,Kr])] [a1,a2]
Creates a peer in an Ableton Link network session .
Creates a peer in an Ableton Link network session . The first peer in a session determines the initial tempo of the session . The value returned must be passed as the first parameter to all subsequent Ableton Link opcode calls for this peer .
link_create :: D
link_create = D $ return $ f
where f = opcs "link_create" [(Ir,[Ir])] []
Enable or disable synchronization with the Ableton Link session .
Enable or disable synchronization with the global network Ableton Link session tempo and beat .
ableton_link_enable :: D -> SE ()
ableton_link_enable b1 = SE $ (depT_ =<<) $ lift $ f <$> unD b1
where f a1 = opcs "ableton_link_enable" [(Xr,[Ir,Kr])] [a1]
Returns whether or not this peer is synchronized with the global network Ableton Link session .
Returns whether or not the beat and time of his peer are synchronized with the global network Ableton Link session .
link_is_enabled :: D -> Sig
link_is_enabled b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_is_enabled" [(Kr,[Ir])] [a1]
Returns a trigger that is 1 on the beat and 0 otherwise along with beat , phase , and time for this session of Ableton Link .
Returns a trigger that is 1 on the beat and 0 otherwise along with the beat , phase , and current time of Ableton Link for this session for a given quantum .
> k_trigger , k_beat , k_phase , k_current_time_seconds link_metro i_peer [ , k_quantum ]
link_metro :: D -> (Sig,Sig,Sig,Sig)
link_metro b1 = pureTuple $ f <$> unD b1
where f a1 = mopcs "link_metro" ([Kr,Kr,Kr,Kr],[Ir,Kr]) [a1]
Returns the number of peers in the global network Ableton Link session .
> k_count link_peers i_peer
link_peers :: D -> Sig
link_peers b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_peers" [(Kr,[Ir])] [a1]
Returns the current tempo of the global network Ableton Link session .
> i_peer
link_tempo_get :: D -> Sig
link_tempo_get b1 = Sig $ f <$> unD b1
where f a1 = opcs "link_tempo_get" [(Kr,[Ir])] [a1]
Sets the local tempo if this peer is not enabled ; sets the tempo of the global network Ableton Link session if this peer is enabled .
> link_tempo_set i_peer , , k_at_time_seconds ]
link_tempo_set :: D -> Sig -> SE ()
link_tempo_set b1 b2 = SE $ (depT_ =<<) $ lift $ f <$> unD b1 <*> unSig b2
where f a1 a2 = opcs "link_tempo_set" [(Xr,[Ir,Kr,Kr])] [a1,a2] |
1ac3bc97f4a928a2aaa6da2e2452db1e5b54d51612fd3dcc340bb38b0ffd604b | JPMoresmau/dbIDE | Parser.hs | # LANGUAGE PatternGuards #
-- | Parses the output from GHCi
A lot of this work is Copyright 2014 . See < >
module Language.Haskell.Ghci.Parser
( parseShowModules
, parseLoad
)
where
import System.FilePath
import Data.Char
import Data.List
import Language.Haskell.Ghci.Types
-- | Parse messages from show modules command
parseShowModules :: [String] -> [(String, FilePath)]
parseShowModules xs =
[ (takeWhile (not . isSpace) $ dropWhile isSpace a, takeWhile (/= ',') b)
| x <- xs, (a,'(':' ':b) <- [break (== '(') x]]
-- | Parse messages given on reload
parseLoad :: [String] -> [Load]
parseLoad (('[':xs):rest) =
map (uncurry Loading) (parseShowModules [drop 11 $ dropWhile (/= ']') xs]) ++
parseLoad rest
parseLoad (x:xs)
| not $ " " `isPrefixOf` x
, (file,':':rest) <- break (== ':') x
, takeExtension file `elem` [".hs",".lhs"]
, (pos,rest2) <- span (\c -> c == ':' || isDigit c) rest
, [p1,p2] <- map read $ words $ map (\c -> if c == ':' then ' ' else c) pos
, (msg,las) <- span (isPrefixOf " ") xs
, rest3 <- dropWhile isSpace rest2
, sev <- if "Warning:" `isPrefixOf` rest3 then Warning else Error
= Message sev file (p1,p2) (x:msg) : parseLoad las
parseLoad (_:xs) = parseLoad xs
parseLoad [] = [] | null | https://raw.githubusercontent.com/JPMoresmau/dbIDE/dae37edf67fbe55660e7e22c9c5356d0ada47c61/ghci-lib/src/Language/Haskell/Ghci/Parser.hs | haskell | | Parses the output from GHCi
| Parse messages from show modules command
| Parse messages given on reload | # LANGUAGE PatternGuards #
A lot of this work is Copyright 2014 . See < >
module Language.Haskell.Ghci.Parser
( parseShowModules
, parseLoad
)
where
import System.FilePath
import Data.Char
import Data.List
import Language.Haskell.Ghci.Types
parseShowModules :: [String] -> [(String, FilePath)]
parseShowModules xs =
[ (takeWhile (not . isSpace) $ dropWhile isSpace a, takeWhile (/= ',') b)
| x <- xs, (a,'(':' ':b) <- [break (== '(') x]]
parseLoad :: [String] -> [Load]
parseLoad (('[':xs):rest) =
map (uncurry Loading) (parseShowModules [drop 11 $ dropWhile (/= ']') xs]) ++
parseLoad rest
parseLoad (x:xs)
| not $ " " `isPrefixOf` x
, (file,':':rest) <- break (== ':') x
, takeExtension file `elem` [".hs",".lhs"]
, (pos,rest2) <- span (\c -> c == ':' || isDigit c) rest
, [p1,p2] <- map read $ words $ map (\c -> if c == ':' then ' ' else c) pos
, (msg,las) <- span (isPrefixOf " ") xs
, rest3 <- dropWhile isSpace rest2
, sev <- if "Warning:" `isPrefixOf` rest3 then Warning else Error
= Message sev file (p1,p2) (x:msg) : parseLoad las
parseLoad (_:xs) = parseLoad xs
parseLoad [] = [] |
edbcfe25431bcc9b66d2414c585969ea323b18ba5ed9f822a2de0f1f3df2acbd | aeternity/aeternity | aest_community_fork_SUITE.erl | -module(aest_community_fork_SUITE).
%=== EXPORTS ===================================================================
% Common Test exports
-export([all/0]).
-export([init_per_suite/1]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([end_per_suite/1]).
% Test cases
-export([fork_chain/1, fork_sync/1]).
-import(aest_nodes, [
setup_nodes/2,
start_node/2,
stop_node/3,
wait_for_value/4,
wait_for_startup/3,
get_block/2,
get_top/1,
get_status/1
]).
%=== INCLUDES ==================================================================
-include_lib("stdlib/include/assert.hrl").
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
-define(MINING_TIMEOUT, 10 * 1000).
-define(SIGNALLING_START_HEIGHT, 5).
-define(SIGNALLING_END_HEIGHT, 15).
-define(SIGNALLING_BLOCK_COUNT, 9).
-define(INFO_FIELD, 1234).
-define(VERSION, 3).
-define(FORK_HEIGHT, ?SIGNALLING_END_HEIGHT).
-define(FORK,
#{signalling_start_height => ?SIGNALLING_START_HEIGHT,
signalling_end_height => ?SIGNALLING_END_HEIGHT,
signalling_block_count => ?SIGNALLING_BLOCK_COUNT,
info_field => ?INFO_FIELD,
version => ?VERSION}).
-define(FORK_ENABLED, maps:put(enabled, true, ?FORK)).
-define(FORK_DISABLED, maps:put(enabled, false, ?FORK)).
-define(NODE1,
#{name => node1,
peers => [node2, node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_ENABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE2,
#{name => node2,
peers => [node1, node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_ENABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE3,
#{name => node3,
peers => [node1, node2],
mining => #{autostart => false},
fork_management => #{fork => ?FORK_DISABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE4,
#{name => node4,
peers => [node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_DISABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
%=== COMMON TEST FUNCTIONS =====================================================
all() -> [
fork_sync,
fork_chain
].
init_per_suite(Config) ->
%% Some parameters depend on the speed and capacity of the docker containers:
%% timers must be less than gen_server:call timeout.
[{blocks_per_second, 1},
Time may take to get the node to respond to http
Time it may take to stop node cleanly
| Config].
init_per_testcase(_TC, Config) ->
aest_nodes:ct_setup(Config).
end_per_testcase(_TC, Config) ->
aest_nodes:ct_cleanup(Config).
end_per_suite(_Config) -> ok.
%=== TEST CASES ================================================================
fork_chain(Cfg) ->
setup_nodes([?NODE1, ?NODE2, ?NODE3, ?NODE4], Cfg),
%% Supports new protocol, mining.
start_node(name(?NODE1), Cfg),
%% Supports new protocol, mining.
start_node(name(?NODE2), Cfg),
%% Doesn't support new protocol, not mining to make sure the blocks in the
%% signalling interval have the signal supporting the new protocol.
start_node(name(?NODE3), Cfg),
node4 is started later . It 's mining and not supporting the new
%% protocol. If it was started from the beginning it could mine more blocks
than node1 and and they would n't switch to a new protocol .
node 3 is not guaranteed to even sync as far as block 1 from nodes running
the new protocol config ( and block 1 is the definition of started here )
%% if the peer offers blocks after the fork for its sync pool.
Assume it 's enough for this test case that nodes 3 and 4 get to the right state in the end .
wait_for_startup([name(?NODE1), name(?NODE2)], 1, Cfg),
%% Check node picked user config
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE1)),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE2)),
All the three node have the same chain until the last signalling
block . One block after the signalling block the node3 stays with the
current protocol and node1 and switch to the new protocol .
LastSigBlockHeight = ?SIGNALLING_END_HEIGHT - 1,
wait_for_value({height, LastSigBlockHeight}, [name(?NODE1), name(?NODE2)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
#{hash := LastSigBlockHash1, version := LastSigBlockVsn1} = get_block(name(?NODE1), LastSigBlockHeight),
#{hash := LastSigBlockHash2, version := LastSigBlockVsn2} = get_block(name(?NODE2), LastSigBlockHeight),
?assertEqual(LastSigBlockHash1, LastSigBlockHash2),
?assertEqual(LastSigBlockVsn1, LastSigBlockVsn2),
node1 and can switch to the new protocol and continue adding blocks
to the chain . Since is not mining and stays with the old protocol ,
it can not produce the blocks with the old protocol , so is started
( which is mining the old protocol blocks ) to verify that can still
%% add old protocol blocks to the chain.
start_node(name(?NODE4), Cfg),
wait_for_startup([name(?NODE3), name(?NODE4)], 1, Cfg),
%% Check node picked user config
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE4)),
wait_for_value({height, LastSigBlockHeight}, [name(?NODE3), name(?NODE4)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
All the nodes are one block before the fork height . node1 and will
upgrade the protocol , and stay with the old protocol .
AfterForkHeight = ?FORK_HEIGHT + 1,
wait_for_value({height, AfterForkHeight}, [name(?NODE1), name(?NODE2), name(?NODE3), name(?NODE4)],
(AfterForkHeight - LastSigBlockHeight) * ?MINING_TIMEOUT, Cfg),
#{hash := ForkBlockHash1, version := ForkBlockVsn1} = get_block(name(?NODE1), ?FORK_HEIGHT),
#{hash := ForkBlockHash2, version := ForkBlockVsn2} = get_block(name(?NODE2), ?FORK_HEIGHT),
#{hash := ForkBlockHash3, version := ForkBlockVsn3} = get_block(name(?NODE3), ?FORK_HEIGHT),
#{hash := ForkBlockHash4, version := ForkBlockVsn4} = get_block(name(?NODE4), ?FORK_HEIGHT),
?assertEqual(ForkBlockHash1, ForkBlockHash2),
?assertEqual(ForkBlockVsn1, ForkBlockVsn2),
?assertEqual(ForkBlockHash3, ForkBlockHash4),
?assertEqual(ForkBlockVsn3, ForkBlockVsn4),
?assertNotEqual(ForkBlockHash1, ForkBlockHash3),
?assertNotEqual(ForkBlockVsn1, ForkBlockVsn3),
?assert(ForkBlockVsn1 > ForkBlockVsn3),
?assert(has_status_new_protocol(name(?NODE1), {?VERSION, ?FORK_HEIGHT})),
?assert(has_status_new_protocol(name(?NODE2), {?VERSION, ?FORK_HEIGHT})),
?assertNot(has_status_new_protocol(name(?NODE3), {?VERSION, ?FORK_HEIGHT})),
?assertNot(has_status_new_protocol(name(?NODE4), {?VERSION, ?FORK_HEIGHT})),
stop_node(name(?NODE1), 10000, Cfg),
stop_node(name(?NODE2), 10000, Cfg),
stop_node(name(?NODE3), 10000, Cfg),
stop_node(name(?NODE4), 10000, Cfg),
ok.
Test that a second mining node can sync to a node that has already mined
%% past the new protocol
This test is not absolutely deterministic , but node1 only needs to mine 5 more
blocks before node2 has started and synced . This makes it many times more
%% likely to pass than many of our other tests :)
fork_sync(Cfg) ->
Node1 = maps:put(peers, [node2], ?NODE1),
Node2 = maps:put(peers, [node1], ?NODE2),
setup_nodes([Node1, Node2], Cfg),
%% Supports new protocol, mining.
start_node(name(?NODE1), Cfg),
Started , and mined almost up to the end of the signalling block
wait_for_startup([name(?NODE1)], 10, Cfg),
%% Check node picked user config
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE1)),
%% Supports new protocol, mining. Started
start_node(name(?NODE2), Cfg),
wait_for_startup([name(?NODE2)], 1, Cfg),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE2)),
LastSigBlockHeight = ?SIGNALLING_END_HEIGHT - 1,
wait_for_value({height, LastSigBlockHeight}, [name(?NODE1), name(?NODE2)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
#{hash := LastSigBlockHash1, version := LastSigBlockVsn1} = get_block(name(?NODE1), LastSigBlockHeight),
#{hash := LastSigBlockHash2, version := LastSigBlockVsn2} = get_block(name(?NODE2), LastSigBlockHeight),
?assertEqual(LastSigBlockHash1, LastSigBlockHash2),
?assertEqual(LastSigBlockVsn1, LastSigBlockVsn2),
The nodes are at this point at least up to one block before the fork height and either will
%% soon or already have upgraded the protocol
AfterForkHeight = ?FORK_HEIGHT + 1,
wait_for_value({height, AfterForkHeight}, [name(?NODE1), name(?NODE2)],
(AfterForkHeight - LastSigBlockHeight) * ?MINING_TIMEOUT, Cfg),
#{hash := ForkBlockHash1, version := ForkBlockVsn1} = get_block(name(?NODE1), ?FORK_HEIGHT),
#{hash := ForkBlockHash2, version := ForkBlockVsn2} = get_block(name(?NODE2), ?FORK_HEIGHT),
?assertEqual(ForkBlockHash1, ForkBlockHash2),
?assertEqual(ForkBlockVsn1, ForkBlockVsn2),
?assert(has_status_new_protocol(name(?NODE1), {?VERSION, ?FORK_HEIGHT})),
?assert(has_status_new_protocol(name(?NODE2), {?VERSION, ?FORK_HEIGHT})),
stop_node(name(?NODE1), 10000, Cfg),
stop_node(name(?NODE2), 10000, Cfg),
ok.
has_status_new_protocol(Node, {Protocol, Height}) ->
#{protocols := Protocols} = get_status(Node),
lists:member(#{version => Protocol, <<"effective_at_height">> => Height}, Protocols).
name(#{name := Name}) -> Name.
| null | https://raw.githubusercontent.com/aeternity/aeternity/2b193881c42d86ed3ad360bae264cd8e0defa003/system_test/common/aest_community_fork_SUITE.erl | erlang | === EXPORTS ===================================================================
Common Test exports
Test cases
=== INCLUDES ==================================================================
=== COMMON TEST FUNCTIONS =====================================================
Some parameters depend on the speed and capacity of the docker containers:
timers must be less than gen_server:call timeout.
=== TEST CASES ================================================================
Supports new protocol, mining.
Supports new protocol, mining.
Doesn't support new protocol, not mining to make sure the blocks in the
signalling interval have the signal supporting the new protocol.
protocol. If it was started from the beginning it could mine more blocks
if the peer offers blocks after the fork for its sync pool.
Check node picked user config
add old protocol blocks to the chain.
Check node picked user config
past the new protocol
likely to pass than many of our other tests :)
Supports new protocol, mining.
Check node picked user config
Supports new protocol, mining. Started
soon or already have upgraded the protocol | -module(aest_community_fork_SUITE).
-export([all/0]).
-export([init_per_suite/1]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([end_per_suite/1]).
-export([fork_chain/1, fork_sync/1]).
-import(aest_nodes, [
setup_nodes/2,
start_node/2,
stop_node/3,
wait_for_value/4,
wait_for_startup/3,
get_block/2,
get_top/1,
get_status/1
]).
-include_lib("stdlib/include/assert.hrl").
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
-define(MINING_TIMEOUT, 10 * 1000).
-define(SIGNALLING_START_HEIGHT, 5).
-define(SIGNALLING_END_HEIGHT, 15).
-define(SIGNALLING_BLOCK_COUNT, 9).
-define(INFO_FIELD, 1234).
-define(VERSION, 3).
-define(FORK_HEIGHT, ?SIGNALLING_END_HEIGHT).
-define(FORK,
#{signalling_start_height => ?SIGNALLING_START_HEIGHT,
signalling_end_height => ?SIGNALLING_END_HEIGHT,
signalling_block_count => ?SIGNALLING_BLOCK_COUNT,
info_field => ?INFO_FIELD,
version => ?VERSION}).
-define(FORK_ENABLED, maps:put(enabled, true, ?FORK)).
-define(FORK_DISABLED, maps:put(enabled, false, ?FORK)).
-define(NODE1,
#{name => node1,
peers => [node2, node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_ENABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE2,
#{name => node2,
peers => [node1, node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_ENABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE3,
#{name => node3,
peers => [node1, node2],
mining => #{autostart => false},
fork_management => #{fork => ?FORK_DISABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
-define(NODE4,
#{name => node4,
peers => [node3],
mining => #{autostart => true},
fork_management => #{fork => ?FORK_DISABLED},
backend => aest_docker,
source => {pull, "aeternity/aeternity:local"}}).
all() -> [
fork_sync,
fork_chain
].
init_per_suite(Config) ->
[{blocks_per_second, 1},
Time may take to get the node to respond to http
Time it may take to stop node cleanly
| Config].
init_per_testcase(_TC, Config) ->
aest_nodes:ct_setup(Config).
end_per_testcase(_TC, Config) ->
aest_nodes:ct_cleanup(Config).
end_per_suite(_Config) -> ok.
fork_chain(Cfg) ->
setup_nodes([?NODE1, ?NODE2, ?NODE3, ?NODE4], Cfg),
start_node(name(?NODE1), Cfg),
start_node(name(?NODE2), Cfg),
start_node(name(?NODE3), Cfg),
node4 is started later . It 's mining and not supporting the new
than node1 and and they would n't switch to a new protocol .
node 3 is not guaranteed to even sync as far as block 1 from nodes running
the new protocol config ( and block 1 is the definition of started here )
Assume it 's enough for this test case that nodes 3 and 4 get to the right state in the end .
wait_for_startup([name(?NODE1), name(?NODE2)], 1, Cfg),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE1)),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE2)),
All the three node have the same chain until the last signalling
block . One block after the signalling block the node3 stays with the
current protocol and node1 and switch to the new protocol .
LastSigBlockHeight = ?SIGNALLING_END_HEIGHT - 1,
wait_for_value({height, LastSigBlockHeight}, [name(?NODE1), name(?NODE2)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
#{hash := LastSigBlockHash1, version := LastSigBlockVsn1} = get_block(name(?NODE1), LastSigBlockHeight),
#{hash := LastSigBlockHash2, version := LastSigBlockVsn2} = get_block(name(?NODE2), LastSigBlockHeight),
?assertEqual(LastSigBlockHash1, LastSigBlockHash2),
?assertEqual(LastSigBlockVsn1, LastSigBlockVsn2),
node1 and can switch to the new protocol and continue adding blocks
to the chain . Since is not mining and stays with the old protocol ,
it can not produce the blocks with the old protocol , so is started
( which is mining the old protocol blocks ) to verify that can still
start_node(name(?NODE4), Cfg),
wait_for_startup([name(?NODE3), name(?NODE4)], 1, Cfg),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE4)),
wait_for_value({height, LastSigBlockHeight}, [name(?NODE3), name(?NODE4)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
All the nodes are one block before the fork height . node1 and will
upgrade the protocol , and stay with the old protocol .
AfterForkHeight = ?FORK_HEIGHT + 1,
wait_for_value({height, AfterForkHeight}, [name(?NODE1), name(?NODE2), name(?NODE3), name(?NODE4)],
(AfterForkHeight - LastSigBlockHeight) * ?MINING_TIMEOUT, Cfg),
#{hash := ForkBlockHash1, version := ForkBlockVsn1} = get_block(name(?NODE1), ?FORK_HEIGHT),
#{hash := ForkBlockHash2, version := ForkBlockVsn2} = get_block(name(?NODE2), ?FORK_HEIGHT),
#{hash := ForkBlockHash3, version := ForkBlockVsn3} = get_block(name(?NODE3), ?FORK_HEIGHT),
#{hash := ForkBlockHash4, version := ForkBlockVsn4} = get_block(name(?NODE4), ?FORK_HEIGHT),
?assertEqual(ForkBlockHash1, ForkBlockHash2),
?assertEqual(ForkBlockVsn1, ForkBlockVsn2),
?assertEqual(ForkBlockHash3, ForkBlockHash4),
?assertEqual(ForkBlockVsn3, ForkBlockVsn4),
?assertNotEqual(ForkBlockHash1, ForkBlockHash3),
?assertNotEqual(ForkBlockVsn1, ForkBlockVsn3),
?assert(ForkBlockVsn1 > ForkBlockVsn3),
?assert(has_status_new_protocol(name(?NODE1), {?VERSION, ?FORK_HEIGHT})),
?assert(has_status_new_protocol(name(?NODE2), {?VERSION, ?FORK_HEIGHT})),
?assertNot(has_status_new_protocol(name(?NODE3), {?VERSION, ?FORK_HEIGHT})),
?assertNot(has_status_new_protocol(name(?NODE4), {?VERSION, ?FORK_HEIGHT})),
stop_node(name(?NODE1), 10000, Cfg),
stop_node(name(?NODE2), 10000, Cfg),
stop_node(name(?NODE3), 10000, Cfg),
stop_node(name(?NODE4), 10000, Cfg),
ok.
Test that a second mining node can sync to a node that has already mined
This test is not absolutely deterministic , but node1 only needs to mine 5 more
blocks before node2 has started and synced . This makes it many times more
fork_sync(Cfg) ->
Node1 = maps:put(peers, [node2], ?NODE1),
Node2 = maps:put(peers, [node1], ?NODE2),
setup_nodes([Node1, Node2], Cfg),
start_node(name(?NODE1), Cfg),
Started , and mined almost up to the end of the signalling block
wait_for_startup([name(?NODE1)], 10, Cfg),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE1)),
start_node(name(?NODE2), Cfg),
wait_for_startup([name(?NODE2)], 1, Cfg),
#{network_id := <<"ae_system_test">>} = get_status(name(?NODE2)),
LastSigBlockHeight = ?SIGNALLING_END_HEIGHT - 1,
wait_for_value({height, LastSigBlockHeight}, [name(?NODE1), name(?NODE2)],
LastSigBlockHeight * ?MINING_TIMEOUT, Cfg),
#{hash := LastSigBlockHash1, version := LastSigBlockVsn1} = get_block(name(?NODE1), LastSigBlockHeight),
#{hash := LastSigBlockHash2, version := LastSigBlockVsn2} = get_block(name(?NODE2), LastSigBlockHeight),
?assertEqual(LastSigBlockHash1, LastSigBlockHash2),
?assertEqual(LastSigBlockVsn1, LastSigBlockVsn2),
The nodes are at this point at least up to one block before the fork height and either will
AfterForkHeight = ?FORK_HEIGHT + 1,
wait_for_value({height, AfterForkHeight}, [name(?NODE1), name(?NODE2)],
(AfterForkHeight - LastSigBlockHeight) * ?MINING_TIMEOUT, Cfg),
#{hash := ForkBlockHash1, version := ForkBlockVsn1} = get_block(name(?NODE1), ?FORK_HEIGHT),
#{hash := ForkBlockHash2, version := ForkBlockVsn2} = get_block(name(?NODE2), ?FORK_HEIGHT),
?assertEqual(ForkBlockHash1, ForkBlockHash2),
?assertEqual(ForkBlockVsn1, ForkBlockVsn2),
?assert(has_status_new_protocol(name(?NODE1), {?VERSION, ?FORK_HEIGHT})),
?assert(has_status_new_protocol(name(?NODE2), {?VERSION, ?FORK_HEIGHT})),
stop_node(name(?NODE1), 10000, Cfg),
stop_node(name(?NODE2), 10000, Cfg),
ok.
has_status_new_protocol(Node, {Protocol, Height}) ->
#{protocols := Protocols} = get_status(Node),
lists:member(#{version => Protocol, <<"effective_at_height">> => Height}, Protocols).
name(#{name := Name}) -> Name.
|
0ac26aae2ec3ab0234519376ac19b4d67a02a15a297001eaa6e37b8c51cb7c89 | hopv/homusat | HFS.mli | (* Hierarchical Function System *)
type simple_type =
| Prop
| Arrow of simple_type * simple_type
val string_of_simple_type : simple_type -> string
(* Formulas without fixed-point operators and lambda abstractions *)
type formula =
(* | Var of Id.t *)
(* Bare variables are expressed as empty applications *)
| Or of formula list
| And of formula list
| Box of LTS.label * formula
| Diamond of LTS.label * formula
| App of Id.t * (formula list)
val string_of_formula : formula -> string
(* Fixed-point operators *)
type fp = Mu | Nu
val string_of_fp : fp -> string
type argument = Id.t * simple_type
val string_of_arg : argument -> string
val string_of_args : argument list -> string
(* Functions in HFS *)
type func = fp * Id.t * simple_type * (argument list) * formula
val string_of_func : func -> string
(* HFS *)
type t = func list
val to_string : t -> string
| null | https://raw.githubusercontent.com/hopv/homusat/cc05711a3f9d45b253b83ad09a2d0288115cc4f4/HFS.mli | ocaml | Hierarchical Function System
Formulas without fixed-point operators and lambda abstractions
| Var of Id.t
Bare variables are expressed as empty applications
Fixed-point operators
Functions in HFS
HFS |
type simple_type =
| Prop
| Arrow of simple_type * simple_type
val string_of_simple_type : simple_type -> string
type formula =
| Or of formula list
| And of formula list
| Box of LTS.label * formula
| Diamond of LTS.label * formula
| App of Id.t * (formula list)
val string_of_formula : formula -> string
type fp = Mu | Nu
val string_of_fp : fp -> string
type argument = Id.t * simple_type
val string_of_arg : argument -> string
val string_of_args : argument list -> string
type func = fp * Id.t * simple_type * (argument list) * formula
val string_of_func : func -> string
type t = func list
val to_string : t -> string
|
12729d1dd262045d75f6ac4a37d669b7adcc89b2e3b4880ba3c6d8998c9dcc8b | amuletml/amulet | Let.hs | # LANGUAGE FlexibleContexts , TypeFamilies , TupleSections #
module Syntax.Let where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Graph
import Control.Lens
import Syntax.Var
import Syntax
import GHC.Exts (IsList(..))
depOrder :: Ord (Var p)
=> [Binding p]
-> [SCC (Binding p)]
depOrder binds = extra ++ stronglyConnComp nodes where
(extra, nodes, mapping) = foldr buildNode (mempty, mempty, mempty) binds
buildNode it@(Binding var _ ex _ _) (e, n, m) =
( e, (it, var, freeInMapped ex):n, Map.insert var var m )
buildNode it@(Matching p ex _) (e, n, m) =
case bound p of
[] -> (AcyclicSCC it:e, n, m)
vs@(var:_) -> (e, (it, var, freeInMapped ex):n, foldr (`Map.insert`var) m vs)
buildNode it@(TypedMatching p ex _ _) (e, n, m) =
case bound p of
[] -> (AcyclicSCC it:e, n, m)
vs@(var:_) -> (e, (it, var, freeInMapped ex):n, foldr (`Map.insert`var) m vs)
freeInMapped = Set.toList . Set.foldr (maybe id Set.insert . flip Map.lookup mapping) mempty . freeIn
freeIn :: Ord (Var p) => Expr p -> Set.Set (Var p)
freeIn (Ascription e _ _) = freeIn e
freeIn (RecordExt e rs _) = freeIn e <> foldMap (freeIn . view fExpr) rs
freeIn (BinOp a b c _) = freeIn a <> freeIn b <> freeIn c
freeIn (VarRef v _) = Set.singleton v
freeIn (Begin es _) = foldMap freeIn es
freeIn (Let _ vs b _) =
(freeIn b <> foldMap (freeIn . view bindBody) vs)
Set.\\ foldMapOf (each . bindVariable) Set.singleton vs
freeIn (App f x _) = freeIn f <> freeIn x
freeIn (Fun p e _) = freeIn e Set.\\ bound (p ^. paramPat)
freeIn (Record rs _) = foldMap (freeIn . view fExpr) rs
freeIn (Access e _ _) = freeIn e
freeIn (Match t ps _ _) = freeIn t <> foldMap freeInBranch ps where
freeInBranch (Arm p g e _) = (freeIn e <> foldMap freeIn g) Set.\\ bound p
freeIn Literal{} = mempty
freeIn Hole{} = mempty
freeIn (If a b c _) = freeIn a <> freeIn b <> freeIn c
freeIn (Tuple es _) = foldMap freeIn es
freeIn (ExprWrapper w e _) =
case w of
x Syntax.:> y -> freeIn (ExprWrapper x (ExprWrapper y e undefined) undefined)
ExprApp x -> freeIn x <> freeIn e
_ -> freeIn e
freeIn (Parens e _) = freeIn e
freeIn (LeftSection a b _) = freeIn a <> freeIn b
freeIn (RightSection a b _) = freeIn a <> freeIn b
freeIn (BothSection b _) = freeIn b
freeIn AccessSection{} = mempty
freeIn (Vta e _ _) = freeIn e
freeIn (Idiom vp va es _) = Set.fromList [vp, va] <> freeIn es
freeIn (ListFrom v x _) = Set.insert v (freeIn x)
freeIn (ListFromTo v x y _) = Set.insert v (freeIn x <> freeIn y)
freeIn (ListFromThen v x y _) = Set.insert v (freeIn x <> freeIn y)
freeIn (ListFromThenTo v x y z _) = Set.insert v (freeIn x <> freeIn y <> freeIn z)
freeIn (ListExp e _) = foldMap freeIn e
freeIn (ListComp e qs _) = freeIn e <> freeInStmt qs
freeIn (MLet v p e b _) = Set.insert v (freeIn e <> (freeIn b `Set.difference` bound p))
freeIn (OpenIn _ e _) = freeIn e
freeIn Function{} = error "ds Function freeIn"
freeIn TupleSection{} = error "ds TupleSection freeIn"
freeIn Syntax.Lazy{} = error "ds Lazy freeIn"
freeInStmt :: Ord (Var p) => [CompStmt p] -> Set.Set (Var p)
freeInStmt (CompGen p e _:qs) = (freeIn e <> freeInStmt qs) `Set.difference` bound p
freeInStmt (CompLet bs _:qs) =
(foldMap (freeIn . view bindBody) bs <> freeInStmt qs)
`Set.difference` foldMapOf (each . bindVariable) Set.singleton bs
freeInStmt (CompGuard e:qs) = freeIn e <> freeInStmt qs
freeInStmt [] = mempty
bound :: (IsList m, Item m ~ Var p, Monoid m)
=> Pattern p -> m
bound (Destructure _ x _) = foldMap bound x
bound (PAs p v _) = fromList [v] <> bound p
bound (PRecord vs _) = foldMap (bound . snd) vs
bound (PTuple ps _) = foldMap bound ps
bound (PList ps _) = foldMap bound ps
bound (POr p q _) = bound p <> bound q
bound (Capture p _) = fromList [p]
bound (PType p _ _) = bound p
bound (PGadtCon _ _ vs p _) = fromList (map fst vs) <> foldMap bound p
bound Wildcard{} = mempty
bound PLiteral{} = mempty
boundWith :: (IsList m, Item m ~ (Var p, Ann p), Monoid m)
=> Pattern p -> m
boundWith (Destructure _ x _) = foldMap boundWith x
boundWith (PRecord vs _) = foldMap (boundWith . snd) vs
boundWith (PTuple ps _) = foldMap boundWith ps
boundWith (PList ps _) = foldMap boundWith ps
boundWith (POr p q _) = boundWith p <> boundWith q
boundWith (Capture p a) = fromList [(p, a)]
boundWith (PAs p v a) = fromList [(v, a)] <> boundWith p
boundWith (PType p _ _) = boundWith p
boundWith (PGadtCon _ _ vs p a) = fromList (map ((,a) . fst) vs) <> foldMap boundWith p
boundWith Wildcard{} = mempty
boundWith PLiteral{} = mempty
bindVariables :: (IsList (m (Var p)), Item (m (Var p)) ~ Var p, Monoid (m (Var p)))
=> Binding p -> m (Var p)
bindVariables Binding { _bindVariable = v } = fromList [v]
bindVariables Matching { _bindPattern = p } = bound p
bindVariables TypedMatching { _bindPattern = p } = bound p
| null | https://raw.githubusercontent.com/amuletml/amulet/fcba0b7e198b8d354e95722bbe118bccc8483f4e/src/Syntax/Let.hs | haskell | # LANGUAGE FlexibleContexts , TypeFamilies , TupleSections #
module Syntax.Let where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Graph
import Control.Lens
import Syntax.Var
import Syntax
import GHC.Exts (IsList(..))
depOrder :: Ord (Var p)
=> [Binding p]
-> [SCC (Binding p)]
depOrder binds = extra ++ stronglyConnComp nodes where
(extra, nodes, mapping) = foldr buildNode (mempty, mempty, mempty) binds
buildNode it@(Binding var _ ex _ _) (e, n, m) =
( e, (it, var, freeInMapped ex):n, Map.insert var var m )
buildNode it@(Matching p ex _) (e, n, m) =
case bound p of
[] -> (AcyclicSCC it:e, n, m)
vs@(var:_) -> (e, (it, var, freeInMapped ex):n, foldr (`Map.insert`var) m vs)
buildNode it@(TypedMatching p ex _ _) (e, n, m) =
case bound p of
[] -> (AcyclicSCC it:e, n, m)
vs@(var:_) -> (e, (it, var, freeInMapped ex):n, foldr (`Map.insert`var) m vs)
freeInMapped = Set.toList . Set.foldr (maybe id Set.insert . flip Map.lookup mapping) mempty . freeIn
freeIn :: Ord (Var p) => Expr p -> Set.Set (Var p)
freeIn (Ascription e _ _) = freeIn e
freeIn (RecordExt e rs _) = freeIn e <> foldMap (freeIn . view fExpr) rs
freeIn (BinOp a b c _) = freeIn a <> freeIn b <> freeIn c
freeIn (VarRef v _) = Set.singleton v
freeIn (Begin es _) = foldMap freeIn es
freeIn (Let _ vs b _) =
(freeIn b <> foldMap (freeIn . view bindBody) vs)
Set.\\ foldMapOf (each . bindVariable) Set.singleton vs
freeIn (App f x _) = freeIn f <> freeIn x
freeIn (Fun p e _) = freeIn e Set.\\ bound (p ^. paramPat)
freeIn (Record rs _) = foldMap (freeIn . view fExpr) rs
freeIn (Access e _ _) = freeIn e
freeIn (Match t ps _ _) = freeIn t <> foldMap freeInBranch ps where
freeInBranch (Arm p g e _) = (freeIn e <> foldMap freeIn g) Set.\\ bound p
freeIn Literal{} = mempty
freeIn Hole{} = mempty
freeIn (If a b c _) = freeIn a <> freeIn b <> freeIn c
freeIn (Tuple es _) = foldMap freeIn es
freeIn (ExprWrapper w e _) =
case w of
x Syntax.:> y -> freeIn (ExprWrapper x (ExprWrapper y e undefined) undefined)
ExprApp x -> freeIn x <> freeIn e
_ -> freeIn e
freeIn (Parens e _) = freeIn e
freeIn (LeftSection a b _) = freeIn a <> freeIn b
freeIn (RightSection a b _) = freeIn a <> freeIn b
freeIn (BothSection b _) = freeIn b
freeIn AccessSection{} = mempty
freeIn (Vta e _ _) = freeIn e
freeIn (Idiom vp va es _) = Set.fromList [vp, va] <> freeIn es
freeIn (ListFrom v x _) = Set.insert v (freeIn x)
freeIn (ListFromTo v x y _) = Set.insert v (freeIn x <> freeIn y)
freeIn (ListFromThen v x y _) = Set.insert v (freeIn x <> freeIn y)
freeIn (ListFromThenTo v x y z _) = Set.insert v (freeIn x <> freeIn y <> freeIn z)
freeIn (ListExp e _) = foldMap freeIn e
freeIn (ListComp e qs _) = freeIn e <> freeInStmt qs
freeIn (MLet v p e b _) = Set.insert v (freeIn e <> (freeIn b `Set.difference` bound p))
freeIn (OpenIn _ e _) = freeIn e
freeIn Function{} = error "ds Function freeIn"
freeIn TupleSection{} = error "ds TupleSection freeIn"
freeIn Syntax.Lazy{} = error "ds Lazy freeIn"
freeInStmt :: Ord (Var p) => [CompStmt p] -> Set.Set (Var p)
freeInStmt (CompGen p e _:qs) = (freeIn e <> freeInStmt qs) `Set.difference` bound p
freeInStmt (CompLet bs _:qs) =
(foldMap (freeIn . view bindBody) bs <> freeInStmt qs)
`Set.difference` foldMapOf (each . bindVariable) Set.singleton bs
freeInStmt (CompGuard e:qs) = freeIn e <> freeInStmt qs
freeInStmt [] = mempty
bound :: (IsList m, Item m ~ Var p, Monoid m)
=> Pattern p -> m
bound (Destructure _ x _) = foldMap bound x
bound (PAs p v _) = fromList [v] <> bound p
bound (PRecord vs _) = foldMap (bound . snd) vs
bound (PTuple ps _) = foldMap bound ps
bound (PList ps _) = foldMap bound ps
bound (POr p q _) = bound p <> bound q
bound (Capture p _) = fromList [p]
bound (PType p _ _) = bound p
bound (PGadtCon _ _ vs p _) = fromList (map fst vs) <> foldMap bound p
bound Wildcard{} = mempty
bound PLiteral{} = mempty
boundWith :: (IsList m, Item m ~ (Var p, Ann p), Monoid m)
=> Pattern p -> m
boundWith (Destructure _ x _) = foldMap boundWith x
boundWith (PRecord vs _) = foldMap (boundWith . snd) vs
boundWith (PTuple ps _) = foldMap boundWith ps
boundWith (PList ps _) = foldMap boundWith ps
boundWith (POr p q _) = boundWith p <> boundWith q
boundWith (Capture p a) = fromList [(p, a)]
boundWith (PAs p v a) = fromList [(v, a)] <> boundWith p
boundWith (PType p _ _) = boundWith p
boundWith (PGadtCon _ _ vs p a) = fromList (map ((,a) . fst) vs) <> foldMap boundWith p
boundWith Wildcard{} = mempty
boundWith PLiteral{} = mempty
bindVariables :: (IsList (m (Var p)), Item (m (Var p)) ~ Var p, Monoid (m (Var p)))
=> Binding p -> m (Var p)
bindVariables Binding { _bindVariable = v } = fromList [v]
bindVariables Matching { _bindPattern = p } = bound p
bindVariables TypedMatching { _bindPattern = p } = bound p
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.