_id
stringlengths
64
64
repository
stringlengths
6
84
name
stringlengths
4
110
content
stringlengths
0
248k
license
null
download_url
stringlengths
89
454
language
stringclasses
7 values
comments
stringlengths
0
74.6k
code
stringlengths
0
248k
0790a3086781f85165cecb8242509331a20566bd06958439e416412d1f844d9a
macchiato-framework/macchiato-core
cors.cljs
(ns ^{:doc "Ring middleware for Cross-Origin Resource Sharing." :author "Mihael Konjević"} macchiato.middleware.cors (:require [clojure.set :as set] [cuerdas.core :as str] [macchiato.util.response :as r :refer [get-header]])) (defn origin "Returns the Origin request header." [request] (get-header request "origin")) (defn preflight? "Returns true if the request is a preflight request" [request] (= (request :request-method) :options)) (defn lower-case-set "Converts strings in a sequence to lower-case, and put them into a set" [s] (->> s (map str/trim) (map str/lower) (set))) (defn parse-headers "Transforms a comma-separated string to a set" [s] (->> (str/split (str s) #",") (remove str/blank?) (lower-case-set))) (defn allow-preflight-headers? "Returns true if the request is a preflight request and all the headers that it's going to use are allowed. Returns false otherwise." [request allowed-headers] (if (nil? allowed-headers) true (set/subset? (parse-headers (get-header request "access-control-request-headers")) (lower-case-set (map name allowed-headers))))) (defn allow-method? "In the case of regular requests it checks if the request-method is allowed. In the case of preflight requests it checks if the access-control-request-method is allowed." [request allowed-methods] (let [preflight-name [:headers "access-control-request-method"] request-method (if (preflight? request) (keyword (str/lower (get-in request preflight-name ""))) (:request-method request))] (contains? allowed-methods request-method))) (defn allow-request? "Returns true if the request's origin matches the access control origin, otherwise false." [request access-control] (let [origin (origin request) allowed-origins (:access-control-allow-origin access-control) allowed-headers (:access-control-allow-headers access-control) allowed-methods (:access-control-allow-methods access-control)] (if (and origin (seq allowed-origins) (seq allowed-methods) (some #(re-matches % origin) allowed-origins) (if (preflight? request) (allow-preflight-headers? request allowed-headers) true) (allow-method? request allowed-methods)) true false))) (defn header-name "Returns the capitalized header name as a string." [header] (if header (->> (str/split (name header) #"-") (map str/capitalize) (str/join "-")))) (defn normalize-headers "Normalize the headers by converting them to capitalized strings." [headers] (let [upcase #(str/join ", " (sort (map (comp str/upper name) %))) to-header-names #(str/join ", " (sort (map (comp header-name name) %)))] (reduce (fn [acc [k v]] (assoc acc (header-name k) (case k :access-control-allow-methods (upcase v) :access-control-allow-headers (to-header-names v) v))) {} headers))) (defn add-headers "Add the access control headers using the request's origin to the response." [request access-control response] (if-let [origin (origin request)] (update-in response [:headers] merge (assoc access-control :access-control-allow-origin origin)) response)) (defn add-allowed-headers "Adds the allowed headers to the request" [request allowed-headers response] (if (preflight? request) (let [request-headers (get-header request "access-control-request-headers") allowed-headers (if (nil? allowed-headers) (parse-headers request-headers) allowed-headers)] (if allowed-headers (update-in response [:headers] merge {:access-control-allow-headers allowed-headers}) response)) response)) (defn add-access-control "Add the access-control headers to the response based on the rules and what came on the header." [request access-control response] (let [allowed-headers (:access-control-allow-headers access-control) rest-of-headers (dissoc access-control :access-control-allow-headers) unnormalized-resp (->> response (add-headers request rest-of-headers) (add-allowed-headers request allowed-headers))] (update-in unnormalized-resp [:headers] normalize-headers))) (defn normalize-config [access-control] (-> access-control (update-in [:access-control-allow-methods] set) (update-in [:access-control-allow-headers] #(if (coll? %) (set %) %)) (update-in [:access-control-allow-origin] #(if (sequential? %) % [%])))) (defn wrap-cors "Middleware that adds Cross-Origin Resource Sharing headers. (def handler (-> routes (wrap-cors {:access-control-allow-origin #\"\" :access-control-allow-methods [:get :put :post :delete]))}) " ([handler] (wrap-cors handler {})) ([handler {:keys [message] :as opts}] (let [access-control (normalize-config opts)] (fn [request respond raise] (if (and (preflight? request) (allow-request? request access-control)) (let [blank-response {:status 200 :headers {} :body (or message "preflight complete")}] (respond (add-access-control request access-control blank-response))) (if (origin request) (if (allow-request? request access-control) (handler request #(respond (add-access-control request access-control %)) raise) (handler request respond raise)) (handler request respond raise)))))))
null
https://raw.githubusercontent.com/macchiato-framework/macchiato-core/14eac3dbc561927ee61b6127f30ef0b0269b2af6/src/macchiato/middleware/cors.cljs
clojure
(ns ^{:doc "Ring middleware for Cross-Origin Resource Sharing." :author "Mihael Konjević"} macchiato.middleware.cors (:require [clojure.set :as set] [cuerdas.core :as str] [macchiato.util.response :as r :refer [get-header]])) (defn origin "Returns the Origin request header." [request] (get-header request "origin")) (defn preflight? "Returns true if the request is a preflight request" [request] (= (request :request-method) :options)) (defn lower-case-set "Converts strings in a sequence to lower-case, and put them into a set" [s] (->> s (map str/trim) (map str/lower) (set))) (defn parse-headers "Transforms a comma-separated string to a set" [s] (->> (str/split (str s) #",") (remove str/blank?) (lower-case-set))) (defn allow-preflight-headers? "Returns true if the request is a preflight request and all the headers that it's going to use are allowed. Returns false otherwise." [request allowed-headers] (if (nil? allowed-headers) true (set/subset? (parse-headers (get-header request "access-control-request-headers")) (lower-case-set (map name allowed-headers))))) (defn allow-method? "In the case of regular requests it checks if the request-method is allowed. In the case of preflight requests it checks if the access-control-request-method is allowed." [request allowed-methods] (let [preflight-name [:headers "access-control-request-method"] request-method (if (preflight? request) (keyword (str/lower (get-in request preflight-name ""))) (:request-method request))] (contains? allowed-methods request-method))) (defn allow-request? "Returns true if the request's origin matches the access control origin, otherwise false." [request access-control] (let [origin (origin request) allowed-origins (:access-control-allow-origin access-control) allowed-headers (:access-control-allow-headers access-control) allowed-methods (:access-control-allow-methods access-control)] (if (and origin (seq allowed-origins) (seq allowed-methods) (some #(re-matches % origin) allowed-origins) (if (preflight? request) (allow-preflight-headers? request allowed-headers) true) (allow-method? request allowed-methods)) true false))) (defn header-name "Returns the capitalized header name as a string." [header] (if header (->> (str/split (name header) #"-") (map str/capitalize) (str/join "-")))) (defn normalize-headers "Normalize the headers by converting them to capitalized strings." [headers] (let [upcase #(str/join ", " (sort (map (comp str/upper name) %))) to-header-names #(str/join ", " (sort (map (comp header-name name) %)))] (reduce (fn [acc [k v]] (assoc acc (header-name k) (case k :access-control-allow-methods (upcase v) :access-control-allow-headers (to-header-names v) v))) {} headers))) (defn add-headers "Add the access control headers using the request's origin to the response." [request access-control response] (if-let [origin (origin request)] (update-in response [:headers] merge (assoc access-control :access-control-allow-origin origin)) response)) (defn add-allowed-headers "Adds the allowed headers to the request" [request allowed-headers response] (if (preflight? request) (let [request-headers (get-header request "access-control-request-headers") allowed-headers (if (nil? allowed-headers) (parse-headers request-headers) allowed-headers)] (if allowed-headers (update-in response [:headers] merge {:access-control-allow-headers allowed-headers}) response)) response)) (defn add-access-control "Add the access-control headers to the response based on the rules and what came on the header." [request access-control response] (let [allowed-headers (:access-control-allow-headers access-control) rest-of-headers (dissoc access-control :access-control-allow-headers) unnormalized-resp (->> response (add-headers request rest-of-headers) (add-allowed-headers request allowed-headers))] (update-in unnormalized-resp [:headers] normalize-headers))) (defn normalize-config [access-control] (-> access-control (update-in [:access-control-allow-methods] set) (update-in [:access-control-allow-headers] #(if (coll? %) (set %) %)) (update-in [:access-control-allow-origin] #(if (sequential? %) % [%])))) (defn wrap-cors "Middleware that adds Cross-Origin Resource Sharing headers. (def handler (-> routes (wrap-cors {:access-control-allow-origin #\"\" :access-control-allow-methods [:get :put :post :delete]))}) " ([handler] (wrap-cors handler {})) ([handler {:keys [message] :as opts}] (let [access-control (normalize-config opts)] (fn [request respond raise] (if (and (preflight? request) (allow-request? request access-control)) (let [blank-response {:status 200 :headers {} :body (or message "preflight complete")}] (respond (add-access-control request access-control blank-response))) (if (origin request) (if (allow-request? request access-control) (handler request #(respond (add-access-control request access-control %)) raise) (handler request respond raise)) (handler request respond raise)))))))
f963d547aeb62dd35a23e2167063d63423f5123cee9d8d69f6602f5e8ebe9e54
Verites/verigraph
Morphism.hs
# LANGUAGE TypeFamilies # module Data.Graphs.Morphism ( -- * Types GraphMorphism(..) , compose -- * Construction , Data.Graphs.Morphism.empty , buildGraphMorphism , fromGraphsAndRelations , fromGraphsAndLists -- * Transformation , invertGraphMorphism , updateCodomain , updateDomain , updateNodes , updateNodeRelation , updateEdgeRelation , updateEdges , removeEdgeFromDomain , removeEdgeFromCodomain , removeNodeFromDomain , removeNodeFromDomainForced , removeNodeFromCodomain , createEdgeOnDomain , createEdgeOnCodomain , createNodeOnDomain , createNodeOnCodomain -- * Query , applyNode , applyNodeUnsafe , applyNodeId , applyNodeIdUnsafe , applyEdge , applyEdgeUnsafe , applyEdgeId , applyEdgeIdUnsafe , orphanNodeIds , orphanEdgeIds , orphanEdges ) where import Control.Arrow import Data.Function (on) import qualified Data.List as List import Base.Valid import Data.Graphs as G import Data.Maybe (fromMaybe, isNothing) import qualified Data.Relation as R data GraphMorphism a b = GraphMorphism { domainGraph :: Graph a b , codomainGraph :: Graph a b , nodeRelation :: R.Relation G.NodeId , edgeRelation :: R.Relation G.EdgeId } compose :: GraphMorphism a b -> GraphMorphism a b -> GraphMorphism a b compose m2 m1 = GraphMorphism (domainGraph m1) (codomainGraph m2) (R.compose (nodeRelation m1) (nodeRelation m2)) (R.compose (edgeRelation m1) (edgeRelation m2)) instance Eq (GraphMorphism a b) where m1 == m2 = domainGraph m1 == domainGraph m2 && codomainGraph m1 == codomainGraph m2 && nodeRelation m1 == nodeRelation m2 && edgeRelation m1 == edgeRelation m2 instance Show (GraphMorphism a b) where show m = concat $ "\nNode mappings: \n" : (map showNode . List.sort) (G.nodeIds $ domainGraph m) ++ "\nEdge mappings: \n" : (map showEdge . List.sortBy (compare `on` edgeId)) (G.edges $ domainGraph m) where showNode n = show n ++ " --> " ++ show (applyNodeId m n) ++ "\n" showEdge (Edge e srcId tgtId _) = show e ++ " --> " ++ show (applyEdgeId m e) ++ " (from: " ++ show srcId ++ " to:" ++ show tgtId ++ ")\n" -- | Return the nodes ids of the codomain which are not in the image of the given morphism. orphanNodeIds :: GraphMorphism a b -> [G.NodeId] orphanNodeIds gm = R.orphans (nodeRelation gm) -- | Return the edges of the codomain which are not in the image of the given morphism. orphanEdges :: GraphMorphism a b -> [G.Edge b] orphanEdges gm = map idToEdge (R.orphans (edgeRelation gm)) where idToEdge id = fromMaybe (error "orphanEdges: EdgeId is not in graph") (lookupEdge id (codomainGraph gm)) -- | Return the edge ids of the codomain which are not in the image of the given morphism. orphanEdgeIds :: GraphMorphism a b -> [G.EdgeId] orphanEdgeIds gm = R.orphans (edgeRelation gm) -- | Return the node to which @ln@ gets mapped. applyNode :: GraphMorphism a b -> G.Node a -> Maybe (G.Node a) applyNode m ln = case applyNodeId m (nodeId ln) of Just x -> lookupNode x (codomainGraph m) Nothing -> Nothing -- | Return the nodeId to which @ln@ gets mapped. applyNodeId :: GraphMorphism a b -> G.NodeId -> Maybe G.NodeId applyNodeId m ln = case R.apply (nodeRelation m) ln of (x:_) -> Just x _ -> Nothing -- | Return the edge to which @le@ gets mapped. applyEdge :: GraphMorphism a b -> G.Edge b -> Maybe (G.Edge b) applyEdge m le = case applyEdgeId m (edgeId le) of Just x -> lookupEdge x (codomainGraph m) Nothing -> Nothing -- | Return the edgeId to which @le@ gets mapped. applyEdgeId :: GraphMorphism a b -> G.EdgeId -> Maybe G.EdgeId applyEdgeId m le = case R.apply (edgeRelation m) le of (x:_) -> Just x _ -> Nothing -- | Return the node to which @le@ gets mapped or error in the case of undefined applyNodeUnsafe :: GraphMorphism a b -> G.Node a -> G.Node a applyNodeUnsafe morph n = fromMaybe (error "Error, apply nodeId in a non total morphism") $ applyNode morph n -- | Return the nodeId to which @le@ gets mapped or error in the case of undefined applyNodeIdUnsafe :: GraphMorphism a b -> NodeId -> NodeId applyNodeIdUnsafe morph n = fromMaybe (error "Error, apply nodeId in a non total morphism") $ applyNodeId morph n -- | Return the edge to which @le@ gets mapped or error in the case of undefined applyEdgeUnsafe :: GraphMorphism a b -> G.Edge b -> G.Edge b applyEdgeUnsafe morph e = fromMaybe (error "Error, apply edge in a non total morphism") $ applyEdge morph e -- | Return the edgeId to which @le@ gets mapped or error in the case of undefined applyEdgeIdUnsafe :: GraphMorphism a b -> EdgeId -> EdgeId applyEdgeIdUnsafe morph e = fromMaybe (error "Error, apply edgeId in a non total morphism") $ applyEdgeId morph e | An empty morphism between two graphs . empty :: Graph a b -> Graph a b -> GraphMorphism a b empty gA gB = GraphMorphism gA gB (R.empty (nodeIds gA) (nodeIds gB)) (R.empty (edgeIds gA) (edgeIds gB)) -- | Construct a graph morphism buildGraphMorphism :: Graph a b -> Graph a b -> [(Int,Int)] -> [(Int,Int)] -> GraphMorphism a b buildGraphMorphism gA gB n = foldr (uncurry updateEdges . (EdgeId *** EdgeId)) g where g = foldr (uncurry updateNodes . (NodeId *** NodeId)) (Data.Graphs.Morphism.empty gA gB) n | Constructs a @GraphMorphism@ from two Graphs , a node relation and a edge relation . fromGraphsAndRelations :: Graph a b -> Graph a b -> R.Relation NodeId -> R.Relation EdgeId -> GraphMorphism a b fromGraphsAndRelations = GraphMorphism | Constructs a @GraphMorphism@ from two Graphs , and lists describing the node and edge mappings . fromGraphsAndLists :: Graph a b -> Graph a b -> [(NodeId, NodeId)] -> [(EdgeId, EdgeId)] -> GraphMorphism a b fromGraphsAndLists dom cod nodes edges = GraphMorphism dom cod nodeRelation edgeRelation where nodeRelation = R.fromLists (nodeIds dom) (nodeIds cod) nodes edgeRelation = R.fromLists (edgeIds dom) (edgeIds cod) edges -- | The inverse graph morphism. invertGraphMorphism :: GraphMorphism a b -> GraphMorphism a b invertGraphMorphism (GraphMorphism dom cod nm em) = GraphMorphism cod dom (R.inverseRelation nm) (R.inverseRelation em) -- | Set a new codomain. updateCodomain :: Graph a b -> GraphMorphism a b -> GraphMorphism a b updateCodomain g gm = gm { codomainGraph = g } -- | Set a new domain. updateDomain :: Graph a b -> GraphMorphism a b -> GraphMorphism a b updateDomain g gm = gm { domainGraph = g } -- | Add a mapping between both nodes into the morphism. If @ln@ is already -- mapped, or neither nodes are in their respective graphs, return the original -- morphism. updateNodes :: NodeId -> NodeId -> GraphMorphism a b -> GraphMorphism a b updateNodes ln gn morphism@(GraphMorphism l g nm em) | G.isNodeOf l ln && G.isNodeOf g gn && notMapped morphism ln = GraphMorphism l g (R.updateRelation ln gn nm) em | otherwise = morphism where notMapped m = isNothing . applyNodeId m -- | Add a mapping between both edges into the morphism. If @le@ is already -- mapped, or neither edges are in their respective graphs, return the original -- morphism. updateEdges :: EdgeId -> EdgeId -> GraphMorphism a b -> GraphMorphism a b updateEdges le ge morphism@(GraphMorphism l g nm em) | G.isEdgeOf l le && G.isEdgeOf g ge && notMapped morphism le = GraphMorphism l g nm (R.updateRelation le ge em) | otherwise = morphism where notMapped m = isNothing . applyEdgeId m -- | Remove an edge from the domain of the morphism removeEdgeFromDomain :: G.EdgeId -> GraphMorphism a b -> GraphMorphism a b removeEdgeFromDomain e gm = gm { domainGraph = removeEdge e (domainGraph gm) , edgeRelation = R.removeFromDomain e (edgeRelation gm) } -- | Remove an edge from the codomain of the morphism removeEdgeFromCodomain :: G.EdgeId -> GraphMorphism a b -> GraphMorphism a b removeEdgeFromCodomain e gm = gm { codomainGraph = G.removeEdge e (codomainGraph gm) , edgeRelation = R.removeFromCodomain e (edgeRelation gm) } -- | Remove a node from the domain of the morphism. -- Don't change the morphism if there were edges incident to the node. removeNodeFromDomain :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromDomain n gm = if currentDomain == updatedDomain then gm else updatedGM where currentDomain = domainGraph gm updatedDomain = removeNode n currentDomain updatedGM = gm { domainGraph = updatedDomain , nodeRelation = R.removeFromDomain n $ nodeRelation gm } -- | Remove a node from the domain of the morphism -- It does not verify if the node has incident edges, thus it may generate invalid graph morphisms. removeNodeFromDomainForced :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromDomainForced n gm = gm { domainGraph = removeNodeForced n (domainGraph gm) , nodeRelation = R.removeFromDomain n (nodeRelation gm) } -- | Remove a node from the codomain of the morphism -- Don't change the morphism if there were edges incident to the node. removeNodeFromCodomain :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromCodomain n gm = if currentCodomain == updatedCodomain then gm else updatedGM where currentCodomain = codomainGraph gm updatedCodomain = removeNode n currentCodomain updatedGM = gm { codomainGraph = updatedCodomain , nodeRelation = R.removeFromCodomain n $ nodeRelation gm } -- | Inserts nodes in a graph morphism, if the nodes do not exist, they are created updateNodeRelation :: G.NodeId -> G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b updateNodeRelation n1 n2 gm = gm { domainGraph = G.insertNode n1 (domainGraph gm) , codomainGraph = G.insertNode n2 (codomainGraph gm) , nodeRelation = R.updateRelation n1 n2 (nodeRelation gm) } -- | Modifies a graph morphism, mapping edge e1 to edge e2. It assumes both edges already exist. updateEdgeRelation :: G.EdgeId -> G.EdgeId -> GraphMorphism a b -> GraphMorphism a b updateEdgeRelation e1 e2 gm = gm { edgeRelation = R.updateRelation e1 e2 (edgeRelation gm) } -- | This function adds an edge e1 (with source s1 and target t1) to the domain of the morphism, and associate it to e2 -- It assumes s1, t1, e2 already exist, and that e1 does not exist. createEdgeOnDomain :: G.EdgeId -> G.NodeId -> G.NodeId -> G.EdgeId -> GraphMorphism a (Maybe b) -> GraphMorphism a (Maybe b) createEdgeOnDomain e1 s1 t1 e2 gm = gm { domainGraph = G.insertEdge e1 s1 t1 (domainGraph gm) , edgeRelation = R.updateRelation e1 e2 (edgeRelation gm) } -- | This function adds an edge e2 (with source s2 and target t2) to the codomain of the morphism. It assumes that s2,t2 exist , and that e2 does not exist createEdgeOnCodomain :: G.EdgeId -> G.NodeId -> G.NodeId -> GraphMorphism a (Maybe b) -> GraphMorphism a (Maybe b) createEdgeOnCodomain e2 s2 t2 gm = gm { codomainGraph = G.insertEdge e2 s2 t2 (codomainGraph gm) , edgeRelation = R.insertOnCodomain e2 (edgeRelation gm) } -- | This function adds an edge e1 (with source s1 and target t1) to the domain of the morphism, and associate it to e2 -- It assumes s1, t1, e2 already exist, and that e1 does not exist. createNodeOnDomain :: G.NodeId -> G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b createNodeOnDomain n1 n2 gm = gm { domainGraph = G.insertNode n1 (domainGraph gm) , nodeRelation = R.updateRelation n1 n2 (nodeRelation gm) } -- | This function adds an edge e2 (with source s2 and target t2) to the codomain of the morphism. It assumes that s2,t2 exist , and that e2 does not exist createNodeOnCodomain :: G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b createNodeOnCodomain n2 gm = gm { codomainGraph = G.insertNode n2 (codomainGraph gm) , nodeRelation = R.insertOnCodomain n2 (nodeRelation gm) } instance Valid (GraphMorphism a b) where validate morphism@(GraphMorphism dom cod nodeMap edgeMap) = mconcat [ withContext "domain" (validate dom) , withContext "codomain" (validate cod) , ensure (R.isFunctional nodeMap) "The relation of nodes is not functional" , ensure (R.isTotal nodeMap) "The function of nodes is not total on its domain" , ensure (R.isFunctional edgeMap) "The relation of edges is not functional" , ensure (R.isTotal edgeMap) "The function of edges is not total on its domain" , ensure incidencePreserved "The morphism doesn't preserve incidence/adjacency" ] where incidencePreserved = all (\e@(Edge _ domSrc domTgt _) -> (Just . sourceId =<< applyEdge morphism e) == applyNodeId morphism domSrc && (Just . targetId =<< applyEdge morphism e) == applyNodeId morphism domTgt) (G.edges dom)
null
https://raw.githubusercontent.com/Verites/verigraph/754ec08bf4a55ea7402d8cd0705e58b1d2c9cd67/src/library/Data/Graphs/Morphism.hs
haskell
* Types * Construction * Transformation * Query | Return the nodes ids of the codomain which are not in the image of the given morphism. | Return the edges of the codomain which are not in the image of the given morphism. | Return the edge ids of the codomain which are not in the image of the given morphism. | Return the node to which @ln@ gets mapped. | Return the nodeId to which @ln@ gets mapped. | Return the edge to which @le@ gets mapped. | Return the edgeId to which @le@ gets mapped. | Return the node to which @le@ gets mapped or error in the case of undefined | Return the nodeId to which @le@ gets mapped or error in the case of undefined | Return the edge to which @le@ gets mapped or error in the case of undefined | Return the edgeId to which @le@ gets mapped or error in the case of undefined | Construct a graph morphism | The inverse graph morphism. | Set a new codomain. | Set a new domain. | Add a mapping between both nodes into the morphism. If @ln@ is already mapped, or neither nodes are in their respective graphs, return the original morphism. | Add a mapping between both edges into the morphism. If @le@ is already mapped, or neither edges are in their respective graphs, return the original morphism. | Remove an edge from the domain of the morphism | Remove an edge from the codomain of the morphism | Remove a node from the domain of the morphism. Don't change the morphism if there were edges incident to the node. | Remove a node from the domain of the morphism It does not verify if the node has incident edges, thus it may generate invalid graph morphisms. | Remove a node from the codomain of the morphism Don't change the morphism if there were edges incident to the node. | Inserts nodes in a graph morphism, if the nodes do not exist, they are created | Modifies a graph morphism, mapping edge e1 to edge e2. It assumes both edges already exist. | This function adds an edge e1 (with source s1 and target t1) to the domain of the morphism, and associate it to e2 It assumes s1, t1, e2 already exist, and that e1 does not exist. | This function adds an edge e2 (with source s2 and target t2) to the codomain of the morphism. | This function adds an edge e1 (with source s1 and target t1) to the domain of the morphism, and associate it to e2 It assumes s1, t1, e2 already exist, and that e1 does not exist. | This function adds an edge e2 (with source s2 and target t2) to the codomain of the morphism.
# LANGUAGE TypeFamilies # module Data.Graphs.Morphism ( GraphMorphism(..) , compose , Data.Graphs.Morphism.empty , buildGraphMorphism , fromGraphsAndRelations , fromGraphsAndLists , invertGraphMorphism , updateCodomain , updateDomain , updateNodes , updateNodeRelation , updateEdgeRelation , updateEdges , removeEdgeFromDomain , removeEdgeFromCodomain , removeNodeFromDomain , removeNodeFromDomainForced , removeNodeFromCodomain , createEdgeOnDomain , createEdgeOnCodomain , createNodeOnDomain , createNodeOnCodomain , applyNode , applyNodeUnsafe , applyNodeId , applyNodeIdUnsafe , applyEdge , applyEdgeUnsafe , applyEdgeId , applyEdgeIdUnsafe , orphanNodeIds , orphanEdgeIds , orphanEdges ) where import Control.Arrow import Data.Function (on) import qualified Data.List as List import Base.Valid import Data.Graphs as G import Data.Maybe (fromMaybe, isNothing) import qualified Data.Relation as R data GraphMorphism a b = GraphMorphism { domainGraph :: Graph a b , codomainGraph :: Graph a b , nodeRelation :: R.Relation G.NodeId , edgeRelation :: R.Relation G.EdgeId } compose :: GraphMorphism a b -> GraphMorphism a b -> GraphMorphism a b compose m2 m1 = GraphMorphism (domainGraph m1) (codomainGraph m2) (R.compose (nodeRelation m1) (nodeRelation m2)) (R.compose (edgeRelation m1) (edgeRelation m2)) instance Eq (GraphMorphism a b) where m1 == m2 = domainGraph m1 == domainGraph m2 && codomainGraph m1 == codomainGraph m2 && nodeRelation m1 == nodeRelation m2 && edgeRelation m1 == edgeRelation m2 instance Show (GraphMorphism a b) where show m = concat $ "\nNode mappings: \n" : (map showNode . List.sort) (G.nodeIds $ domainGraph m) ++ "\nEdge mappings: \n" : (map showEdge . List.sortBy (compare `on` edgeId)) (G.edges $ domainGraph m) where showNode n = show n ++ " --> " ++ show (applyNodeId m n) ++ "\n" showEdge (Edge e srcId tgtId _) = show e ++ " --> " ++ show (applyEdgeId m e) ++ " (from: " ++ show srcId ++ " to:" ++ show tgtId ++ ")\n" orphanNodeIds :: GraphMorphism a b -> [G.NodeId] orphanNodeIds gm = R.orphans (nodeRelation gm) orphanEdges :: GraphMorphism a b -> [G.Edge b] orphanEdges gm = map idToEdge (R.orphans (edgeRelation gm)) where idToEdge id = fromMaybe (error "orphanEdges: EdgeId is not in graph") (lookupEdge id (codomainGraph gm)) orphanEdgeIds :: GraphMorphism a b -> [G.EdgeId] orphanEdgeIds gm = R.orphans (edgeRelation gm) applyNode :: GraphMorphism a b -> G.Node a -> Maybe (G.Node a) applyNode m ln = case applyNodeId m (nodeId ln) of Just x -> lookupNode x (codomainGraph m) Nothing -> Nothing applyNodeId :: GraphMorphism a b -> G.NodeId -> Maybe G.NodeId applyNodeId m ln = case R.apply (nodeRelation m) ln of (x:_) -> Just x _ -> Nothing applyEdge :: GraphMorphism a b -> G.Edge b -> Maybe (G.Edge b) applyEdge m le = case applyEdgeId m (edgeId le) of Just x -> lookupEdge x (codomainGraph m) Nothing -> Nothing applyEdgeId :: GraphMorphism a b -> G.EdgeId -> Maybe G.EdgeId applyEdgeId m le = case R.apply (edgeRelation m) le of (x:_) -> Just x _ -> Nothing applyNodeUnsafe :: GraphMorphism a b -> G.Node a -> G.Node a applyNodeUnsafe morph n = fromMaybe (error "Error, apply nodeId in a non total morphism") $ applyNode morph n applyNodeIdUnsafe :: GraphMorphism a b -> NodeId -> NodeId applyNodeIdUnsafe morph n = fromMaybe (error "Error, apply nodeId in a non total morphism") $ applyNodeId morph n applyEdgeUnsafe :: GraphMorphism a b -> G.Edge b -> G.Edge b applyEdgeUnsafe morph e = fromMaybe (error "Error, apply edge in a non total morphism") $ applyEdge morph e applyEdgeIdUnsafe :: GraphMorphism a b -> EdgeId -> EdgeId applyEdgeIdUnsafe morph e = fromMaybe (error "Error, apply edgeId in a non total morphism") $ applyEdgeId morph e | An empty morphism between two graphs . empty :: Graph a b -> Graph a b -> GraphMorphism a b empty gA gB = GraphMorphism gA gB (R.empty (nodeIds gA) (nodeIds gB)) (R.empty (edgeIds gA) (edgeIds gB)) buildGraphMorphism :: Graph a b -> Graph a b -> [(Int,Int)] -> [(Int,Int)] -> GraphMorphism a b buildGraphMorphism gA gB n = foldr (uncurry updateEdges . (EdgeId *** EdgeId)) g where g = foldr (uncurry updateNodes . (NodeId *** NodeId)) (Data.Graphs.Morphism.empty gA gB) n | Constructs a @GraphMorphism@ from two Graphs , a node relation and a edge relation . fromGraphsAndRelations :: Graph a b -> Graph a b -> R.Relation NodeId -> R.Relation EdgeId -> GraphMorphism a b fromGraphsAndRelations = GraphMorphism | Constructs a @GraphMorphism@ from two Graphs , and lists describing the node and edge mappings . fromGraphsAndLists :: Graph a b -> Graph a b -> [(NodeId, NodeId)] -> [(EdgeId, EdgeId)] -> GraphMorphism a b fromGraphsAndLists dom cod nodes edges = GraphMorphism dom cod nodeRelation edgeRelation where nodeRelation = R.fromLists (nodeIds dom) (nodeIds cod) nodes edgeRelation = R.fromLists (edgeIds dom) (edgeIds cod) edges invertGraphMorphism :: GraphMorphism a b -> GraphMorphism a b invertGraphMorphism (GraphMorphism dom cod nm em) = GraphMorphism cod dom (R.inverseRelation nm) (R.inverseRelation em) updateCodomain :: Graph a b -> GraphMorphism a b -> GraphMorphism a b updateCodomain g gm = gm { codomainGraph = g } updateDomain :: Graph a b -> GraphMorphism a b -> GraphMorphism a b updateDomain g gm = gm { domainGraph = g } updateNodes :: NodeId -> NodeId -> GraphMorphism a b -> GraphMorphism a b updateNodes ln gn morphism@(GraphMorphism l g nm em) | G.isNodeOf l ln && G.isNodeOf g gn && notMapped morphism ln = GraphMorphism l g (R.updateRelation ln gn nm) em | otherwise = morphism where notMapped m = isNothing . applyNodeId m updateEdges :: EdgeId -> EdgeId -> GraphMorphism a b -> GraphMorphism a b updateEdges le ge morphism@(GraphMorphism l g nm em) | G.isEdgeOf l le && G.isEdgeOf g ge && notMapped morphism le = GraphMorphism l g nm (R.updateRelation le ge em) | otherwise = morphism where notMapped m = isNothing . applyEdgeId m removeEdgeFromDomain :: G.EdgeId -> GraphMorphism a b -> GraphMorphism a b removeEdgeFromDomain e gm = gm { domainGraph = removeEdge e (domainGraph gm) , edgeRelation = R.removeFromDomain e (edgeRelation gm) } removeEdgeFromCodomain :: G.EdgeId -> GraphMorphism a b -> GraphMorphism a b removeEdgeFromCodomain e gm = gm { codomainGraph = G.removeEdge e (codomainGraph gm) , edgeRelation = R.removeFromCodomain e (edgeRelation gm) } removeNodeFromDomain :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromDomain n gm = if currentDomain == updatedDomain then gm else updatedGM where currentDomain = domainGraph gm updatedDomain = removeNode n currentDomain updatedGM = gm { domainGraph = updatedDomain , nodeRelation = R.removeFromDomain n $ nodeRelation gm } removeNodeFromDomainForced :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromDomainForced n gm = gm { domainGraph = removeNodeForced n (domainGraph gm) , nodeRelation = R.removeFromDomain n (nodeRelation gm) } removeNodeFromCodomain :: G.NodeId -> GraphMorphism a b -> GraphMorphism a b removeNodeFromCodomain n gm = if currentCodomain == updatedCodomain then gm else updatedGM where currentCodomain = codomainGraph gm updatedCodomain = removeNode n currentCodomain updatedGM = gm { codomainGraph = updatedCodomain , nodeRelation = R.removeFromCodomain n $ nodeRelation gm } updateNodeRelation :: G.NodeId -> G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b updateNodeRelation n1 n2 gm = gm { domainGraph = G.insertNode n1 (domainGraph gm) , codomainGraph = G.insertNode n2 (codomainGraph gm) , nodeRelation = R.updateRelation n1 n2 (nodeRelation gm) } updateEdgeRelation :: G.EdgeId -> G.EdgeId -> GraphMorphism a b -> GraphMorphism a b updateEdgeRelation e1 e2 gm = gm { edgeRelation = R.updateRelation e1 e2 (edgeRelation gm) } createEdgeOnDomain :: G.EdgeId -> G.NodeId -> G.NodeId -> G.EdgeId -> GraphMorphism a (Maybe b) -> GraphMorphism a (Maybe b) createEdgeOnDomain e1 s1 t1 e2 gm = gm { domainGraph = G.insertEdge e1 s1 t1 (domainGraph gm) , edgeRelation = R.updateRelation e1 e2 (edgeRelation gm) } It assumes that s2,t2 exist , and that e2 does not exist createEdgeOnCodomain :: G.EdgeId -> G.NodeId -> G.NodeId -> GraphMorphism a (Maybe b) -> GraphMorphism a (Maybe b) createEdgeOnCodomain e2 s2 t2 gm = gm { codomainGraph = G.insertEdge e2 s2 t2 (codomainGraph gm) , edgeRelation = R.insertOnCodomain e2 (edgeRelation gm) } createNodeOnDomain :: G.NodeId -> G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b createNodeOnDomain n1 n2 gm = gm { domainGraph = G.insertNode n1 (domainGraph gm) , nodeRelation = R.updateRelation n1 n2 (nodeRelation gm) } It assumes that s2,t2 exist , and that e2 does not exist createNodeOnCodomain :: G.NodeId -> GraphMorphism (Maybe a) b -> GraphMorphism (Maybe a) b createNodeOnCodomain n2 gm = gm { codomainGraph = G.insertNode n2 (codomainGraph gm) , nodeRelation = R.insertOnCodomain n2 (nodeRelation gm) } instance Valid (GraphMorphism a b) where validate morphism@(GraphMorphism dom cod nodeMap edgeMap) = mconcat [ withContext "domain" (validate dom) , withContext "codomain" (validate cod) , ensure (R.isFunctional nodeMap) "The relation of nodes is not functional" , ensure (R.isTotal nodeMap) "The function of nodes is not total on its domain" , ensure (R.isFunctional edgeMap) "The relation of edges is not functional" , ensure (R.isTotal edgeMap) "The function of edges is not total on its domain" , ensure incidencePreserved "The morphism doesn't preserve incidence/adjacency" ] where incidencePreserved = all (\e@(Edge _ domSrc domTgt _) -> (Just . sourceId =<< applyEdge morphism e) == applyNodeId morphism domSrc && (Just . targetId =<< applyEdge morphism e) == applyNodeId morphism domTgt) (G.edges dom)
9efc305f2e36c658c15a67694ce00224dfde90d086dda3dd18849fbe9e1dfcb0
programaker-project/Programaker-Core
automate_rest_api_templates_root.erl
%%% @doc %%% REST endpoint to manage knowledge collections. %%% @end -module(automate_rest_api_templates_root). -export([init/2]). -export([ allowed_methods/2 , options/2 , is_authorized/2 , content_types_provided/2 , content_types_accepted/2 , resource_exists/2 ]). -export([ accept_json_create_template/2 , to_json/2 ]). -define(UTILS, automate_rest_api_utils). -include("./records.hrl"). -include("../../automate_template_engine/src/records.hrl"). -record(state, { user_id :: binary() }). -spec init(_,_) -> {'cowboy_rest',_,_}. init(Req, _Opts) -> UserId = cowboy_req:binding(user_id, Req), {cowboy_rest, Req , #state{ user_id=UserId }}. resource_exists(Req, State) -> case cowboy_req:method(Req) of <<"POST">> -> { false, Req, State }; _ -> { true, Req, State} end. %% CORS options(Req, State) -> Req1 = automate_rest_api_cors:set_headers(Req), {ok, Req1, State}. %% Authentication -spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}. allowed_methods(Req, State) -> {[<<"GET">>, <<"POST">>, <<"OPTIONS">>], Req, State}. is_authorized(Req, State) -> Req1 = automate_rest_api_cors:set_headers(Req), case cowboy_req:method(Req1) of %% Don't do authentication if it's just asking for options <<"OPTIONS">> -> { true, Req1, State }; Method -> case cowboy_req:header(<<"authorization">>, Req, undefined) of undefined -> { {false, <<"Authorization header not found">>} , Req1, State }; X -> Scope = case Method of <<"GET">> -> list_templates; <<"POST">> -> create_templates end, #state{user_id=UserId} = State, case automate_rest_api_backend:is_valid_token_uid(X, Scope) of {true, UserId} -> { true, Req1, State }; {true, _} -> %% Non matching user id { { false, <<"Unauthorized to create a template here">>}, Req1, State }; false -> { { false, <<"Authorization not correct">>}, Req1, State } end end end. %% POST handler content_types_accepted(Req, State) -> {[{{<<"application">>, <<"json">>, []}, accept_json_create_template}], Req, State}. -spec accept_json_create_template(cowboy_req:req(), #state{}) -> {'true',cowboy_req:req(), #state{}}. accept_json_create_template(Req, State) -> #state{user_id=UserId} = State, {ok, Body, Req1} = ?UTILS:read_body(Req), Template = jiffy:decode(Body, [return_maps]), #{ <<"name">> := TemplateName, <<"content">> := TemplateContent } = Template, case automate_rest_api_backend:create_template({user, UserId}, TemplateName, TemplateContent) of { ok, TemplateId } -> Output = jiffy:encode(#{ <<"id">> => TemplateId }), Res1 = cowboy_req:set_resp_body(Output, Req1), Res2 = cowboy_req:delete_resp_header(<<"content-type">>, Res1), Res3 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res2), { true, Res3, State } end. %% GET handler content_types_provided(Req, State) -> {[{{<<"application">>, <<"json">>, []}, to_json}], Req, State}. -spec to_json(cowboy_req:req(), #state{}) -> {binary(),cowboy_req:req(), #state{}}. to_json(Req, State=#state{user_id=UserId}) -> case automate_template_engine:list_templates({user, UserId}) of { ok, Templates } -> Output = jiffy:encode(lists:map(fun template_to_map/1, Templates)), Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req), Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res1), { Output, Res2, State } end. template_to_map(#template_entry{ id=Id , name=Name , owner={OwnerType, OwnerId} , content=_Content }) -> #{ id => Id , name => Name , owner => OwnerId , owner_full => #{ type => OwnerType, id => OwnerId } }.
null
https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_rest_api/src/automate_rest_api_templates_root.erl
erlang
@doc REST endpoint to manage knowledge collections. @end CORS Authentication Don't do authentication if it's just asking for options Non matching user id POST handler GET handler
-module(automate_rest_api_templates_root). -export([init/2]). -export([ allowed_methods/2 , options/2 , is_authorized/2 , content_types_provided/2 , content_types_accepted/2 , resource_exists/2 ]). -export([ accept_json_create_template/2 , to_json/2 ]). -define(UTILS, automate_rest_api_utils). -include("./records.hrl"). -include("../../automate_template_engine/src/records.hrl"). -record(state, { user_id :: binary() }). -spec init(_,_) -> {'cowboy_rest',_,_}. init(Req, _Opts) -> UserId = cowboy_req:binding(user_id, Req), {cowboy_rest, Req , #state{ user_id=UserId }}. resource_exists(Req, State) -> case cowboy_req:method(Req) of <<"POST">> -> { false, Req, State }; _ -> { true, Req, State} end. options(Req, State) -> Req1 = automate_rest_api_cors:set_headers(Req), {ok, Req1, State}. -spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}. allowed_methods(Req, State) -> {[<<"GET">>, <<"POST">>, <<"OPTIONS">>], Req, State}. is_authorized(Req, State) -> Req1 = automate_rest_api_cors:set_headers(Req), case cowboy_req:method(Req1) of <<"OPTIONS">> -> { true, Req1, State }; Method -> case cowboy_req:header(<<"authorization">>, Req, undefined) of undefined -> { {false, <<"Authorization header not found">>} , Req1, State }; X -> Scope = case Method of <<"GET">> -> list_templates; <<"POST">> -> create_templates end, #state{user_id=UserId} = State, case automate_rest_api_backend:is_valid_token_uid(X, Scope) of {true, UserId} -> { true, Req1, State }; { { false, <<"Unauthorized to create a template here">>}, Req1, State }; false -> { { false, <<"Authorization not correct">>}, Req1, State } end end end. content_types_accepted(Req, State) -> {[{{<<"application">>, <<"json">>, []}, accept_json_create_template}], Req, State}. -spec accept_json_create_template(cowboy_req:req(), #state{}) -> {'true',cowboy_req:req(), #state{}}. accept_json_create_template(Req, State) -> #state{user_id=UserId} = State, {ok, Body, Req1} = ?UTILS:read_body(Req), Template = jiffy:decode(Body, [return_maps]), #{ <<"name">> := TemplateName, <<"content">> := TemplateContent } = Template, case automate_rest_api_backend:create_template({user, UserId}, TemplateName, TemplateContent) of { ok, TemplateId } -> Output = jiffy:encode(#{ <<"id">> => TemplateId }), Res1 = cowboy_req:set_resp_body(Output, Req1), Res2 = cowboy_req:delete_resp_header(<<"content-type">>, Res1), Res3 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res2), { true, Res3, State } end. content_types_provided(Req, State) -> {[{{<<"application">>, <<"json">>, []}, to_json}], Req, State}. -spec to_json(cowboy_req:req(), #state{}) -> {binary(),cowboy_req:req(), #state{}}. to_json(Req, State=#state{user_id=UserId}) -> case automate_template_engine:list_templates({user, UserId}) of { ok, Templates } -> Output = jiffy:encode(lists:map(fun template_to_map/1, Templates)), Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req), Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res1), { Output, Res2, State } end. template_to_map(#template_entry{ id=Id , name=Name , owner={OwnerType, OwnerId} , content=_Content }) -> #{ id => Id , name => Name , owner => OwnerId , owner_full => #{ type => OwnerType, id => OwnerId } }.
9020c3b4f1cdd1af1fdac0960a57faf76fc7d419a3446f24ba68fc973778d64b
NorfairKing/tickler
ItemUUID.hs
module Tickler.Data.ItemUUID ( ItemUUID, module Data.UUID.Typed, ) where import Data.UUID.Typed import Tickler.Data.UUID () type ItemUUID = UUID Item data Item
null
https://raw.githubusercontent.com/NorfairKing/tickler/8f0d984c2f4e57a76eed95cad9ed7615433dc39d/tickler-data/src/Tickler/Data/ItemUUID.hs
haskell
module Tickler.Data.ItemUUID ( ItemUUID, module Data.UUID.Typed, ) where import Data.UUID.Typed import Tickler.Data.UUID () type ItemUUID = UUID Item data Item
2564c9218402fbdb355ee78f89ce066a5c3355c49e2ee9c12efd4d167b996d20
avsm/platform
opamSwitchCommand.ml
(**************************************************************************) (* *) Copyright 2012 - 2015 OCamlPro Copyright 2012 INRIA (* *) (* All rights reserved. This file is distributed under the terms of the *) GNU Lesser General Public License version 2.1 , with the special (* exception on linking described in the file LICENSE. *) (* *) (**************************************************************************) open OpamTypes open OpamStateTypes open OpamPackage.Set.Op open OpamStd.Op module S = OpamFile.SwitchSelections let log fmt = OpamConsole.log "SWITCH" fmt let slog = OpamConsole.slog let list gt ~print_short = log "list"; let gt = OpamGlobalState.fix_switch_list gt in if print_short then List.iter (OpamConsole.msg "%s\n" @* OpamSwitch.to_string) (List.sort compare (OpamFile.Config.installed_switches gt.config)) else let installed_switches = OpamGlobalState.fold_switches (fun sw sel acc -> let opams = OpamPackage.Set.fold (fun nv acc -> match OpamFile.OPAM.read_opt (OpamPath.Switch.installed_opam gt.root sw nv) with | Some opam -> OpamPackage.Map.add nv opam acc | None -> acc) sel.sel_compiler OpamPackage.Map.empty in let ifempty default m = if OpamPackage.Map.is_empty m then default else m in let comp = OpamPackage.Map.filter (fun nv _ -> OpamPackage.Set.mem nv sel.sel_roots) opams |> ifempty opams in let comp = OpamPackage.Map.filter (fun _ opam -> OpamFile.OPAM.has_flag Pkgflag_Compiler opam) comp |> ifempty comp in let conf = OpamFile.Switch_config.read_opt (OpamPath.Switch.switch_config gt.root sw) in let descr = match conf with | Some c -> c.OpamFile.Switch_config.synopsis | None -> OpamConsole.colorise `red "Missing config file" in OpamSwitch.Map.add sw (OpamPackage.keys comp, descr) acc) gt OpamSwitch.Map.empty in let list = OpamSwitch.Map.bindings installed_switches in let table = List.map (OpamConsole.colorise `blue) ["#"; "switch"; "compiler"; "description" ] :: List.map (fun (switch, (packages, descr)) -> let current = Some switch = OpamStateConfig.get_switch_opt () in List.map (if current then OpamConsole.colorise `bold else fun s -> s) [ if current then OpamConsole.(utf8_symbol Symbols.rightwards_arrow "->") else ""; OpamSwitch.to_string switch; OpamStd.List.concat_map "," (OpamConsole.colorise `yellow @* OpamPackage.to_string) (OpamPackage.Set.elements packages); descr ]) list in OpamConsole.print_table stdout ~sep:" " (OpamStd.Format.align_table table); match OpamStateConfig.get_switch_opt (), OpamStateConfig.(!r.switch_from) with | None, _ when OpamFile.Config.installed_switches gt.config <> [] -> OpamConsole.note "No switch is currently set, you should use 'opam switch <switch>' \ to set an active switch" | Some switch, `Env -> let sys = OpamFile.Config.switch gt.config in if not (OpamGlobalState.switch_exists gt switch) then (OpamConsole.msg "\n"; OpamConsole.warning "The OPAMSWITCH variable does not point to a valid switch: %S" (OpamSwitch.to_string switch)) else if sys <> Some switch then (OpamConsole.msg "\n"; OpamConsole.note "Current switch is set locally through the OPAMSWITCH variable.\n\ The current global system switch is %s." (OpamStd.Option.to_string ~none:"unset" (fun s -> OpamConsole.colorise `bold (OpamSwitch.to_string s)) sys)) else (match OpamStateConfig.get_current_switch_from_cwd gt.root with | None -> () | Some sw -> OpamConsole.msg "\n"; OpamConsole.note "Current switch is set globally and through the OPAMSWITCH variable.\n\ Thus, the local switch found at %s was ignored." (OpamConsole.colorise `bold (OpamSwitch.to_string sw))) | Some switch, `Default when not (OpamGlobalState.switch_exists gt switch) -> OpamConsole.msg "\n"; OpamConsole.warning "The currently selected switch (%S) is invalid.\n%s" (OpamSwitch.to_string switch) (if OpamSwitch.is_external switch then "Stale '_opam' directory or link ?" else "Fix the selection with 'opam switch set SWITCH'.") | Some switch, `Default when OpamSwitch.is_external switch -> OpamConsole.msg "\n"; OpamConsole.note "Current switch has been selected based on the current directory.\n\ The current global system switch is %s." (OpamStd.Option.to_string ~none:"unset" (fun s -> OpamConsole.colorise `bold (OpamSwitch.to_string s)) (OpamFile.Config.switch gt.config)); if not (OpamEnv.is_up_to_date_switch gt.root switch) then OpamConsole.warning "The environment is not in sync with the current switch.\n\ You should run: %s" (OpamEnv.eval_string gt (Some switch)) | Some switch, `Default -> if not (OpamEnv.is_up_to_date_switch gt.root switch) then (OpamConsole.msg "\n"; OpamConsole.warning "The environment is not in sync with the current switch.\n\ You should run: %s" (OpamEnv.eval_string gt (Some switch))) | _ -> () let clear_switch ?(keep_debug=false) gt switch = let module C = OpamFile.Config in let config = gt.config in let config = C.with_installed_switches (List.filter ((<>) switch) (C.installed_switches config)) config in let config = if C.switch config = Some switch then C.with_switch_opt None config else config in let gt = { gt with config } in OpamGlobalState.write gt; let comp_dir = OpamPath.Switch.root gt.root switch in if keep_debug && (OpamClientConfig.(!r.keep_build_dir) || (OpamConsole.debug ())) then (OpamConsole.note "Keeping %s despite errors (debug mode), \ you may want to remove it by hand" (OpamFilename.Dir.to_string comp_dir); gt) else try OpamFilename.rmdir comp_dir; gt with OpamSystem.Internal_error _ -> gt let remove gt ?(confirm = true) switch = log "remove switch=%a" (slog OpamSwitch.to_string) switch; if not (OpamGlobalState.switch_exists gt switch) then ( OpamConsole.msg "The compiler switch %s does not exist.\n" (OpamSwitch.to_string switch); OpamStd.Sys.exit_because `Not_found; ); if not confirm || OpamConsole.confirm "Switch %s and all its packages will be wiped. Are you sure?" (OpamSwitch.to_string switch) then clear_switch gt switch else gt let install_compiler_packages t atoms = (* install the compiler packages *) if atoms = [] then t else let roots = OpamPackage.Name.Set.of_list (List.map fst atoms) in let not_found = OpamPackage.Name.Set.diff roots @@ OpamPackage.names_of_packages @@ OpamPackage.packages_of_names t.packages roots in if not (OpamPackage.Name.Set.is_empty not_found) then OpamConsole.error_and_exit `Not_found "No packages %s found." (OpamPackage.Name.Set.to_string not_found); let solution = OpamSolution.resolve t Switch ~orphans:OpamPackage.Set.empty ~requested:roots { wish_install = []; wish_remove = []; wish_upgrade = atoms; criteria = `Default; extra_attributes = []; } in let solution = match solution with | Success s -> s | Conflicts cs -> OpamConsole.error_and_exit `No_solution "Could not resolve set of base packages:\n%s" (OpamCudf.string_of_conflict t.packages (OpamSwitchState.unavailable_reason t) cs); in let () = match OpamSolver.stats solution with | { s_install = _; s_reinstall = 0; s_upgrade = 0; s_downgrade=0; s_remove = 0 } -> () | stats -> OpamConsole.error_and_exit `No_solution "Inconsistent resolution of base package installs:\n%s" (OpamSolver.string_of_stats stats) in let to_install_pkgs = OpamSolver.new_packages solution in let base_comp = OpamPackage.packages_of_names to_install_pkgs roots in let non_comp = OpamPackage.Set.filter (fun nv -> not (OpamFile.OPAM.has_flag Pkgflag_Compiler (OpamSwitchState.opam t nv))) base_comp in if not (OpamPackage.Set.is_empty non_comp) && not (OpamConsole.confirm ~default:false "Packages %s don't have the 'compiler' flag set. Are you sure \ you want to set them as the compiler base for this switch?" (OpamPackage.Set.to_string non_comp)) then OpamConsole.error_and_exit `Aborted "Aborted installation of non-compiler packages \ as switch base."; let t = if t.switch_config.OpamFile.Switch_config.synopsis = "" then let synopsis = match OpamPackage.Set.elements base_comp with | [] -> OpamSwitch.to_string t.switch | [pkg] -> let open OpamStd.Option.Op in (OpamSwitchState.opam_opt t pkg >>= OpamFile.OPAM.synopsis) +! OpamPackage.to_string pkg | pkgs -> OpamStd.List.concat_map " " OpamPackage.to_string pkgs in let switch_config = { t.switch_config with OpamFile.Switch_config.synopsis } in if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then OpamSwitchAction.install_switch_config t.switch_global.root t.switch switch_config; { t with switch_config } else t in let t = { t with compiler_packages = to_install_pkgs } in let t, result = OpamSolution.apply ~ask:OpamClientConfig.(!r.show) t Switch ~requested:roots solution in OpamSolution.check_solution ~quiet:OpamClientConfig.(not !r.show) t result; t let install gt ?rt ?synopsis ?repos ~update_config ~packages ?(local_compiler=false) switch = let update_config = update_config && not (OpamSwitch.is_external switch) in let old_switch_opt = OpamFile.Config.switch gt.config in let comp_dir = OpamPath.Switch.root gt.root switch in if OpamGlobalState.switch_exists gt switch then OpamConsole.error_and_exit `Bad_arguments "There already is an installed switch named %s" (OpamSwitch.to_string switch); if Sys.file_exists (OpamFilename.Dir.to_string comp_dir) then OpamConsole.error_and_exit `Bad_arguments "Directory %S already exists, please choose a different name" (OpamFilename.Dir.to_string comp_dir); let gt, st = if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then let gt = OpamSwitchAction.create_empty_switch gt ?synopsis ?repos switch in if update_config then gt, OpamSwitchAction.set_current_switch `Lock_write gt ?rt switch else let rt = match rt with | None -> OpamRepositoryState.load `Lock_none gt | Some rt -> ({ rt with repos_global = (gt :> unlocked global_state) } :> unlocked repos_state) in gt, OpamSwitchState.load `Lock_write gt rt switch else gt, let rt = match rt with | None -> OpamRepositoryState.load `Lock_none gt | Some rt -> (rt :> unlocked repos_state) in let st = OpamSwitchState.load_virtual ?repos_list:repos gt rt in let available_packages = lazy (OpamSwitchState.compute_available_packages gt switch (OpamSwitchAction.gen_switch_config gt.root ?repos switch) ~pinned:OpamPackage.Set.empty ~opams:st.opams) in { st with switch; available_packages } in let st = if OpamSwitch.is_external switch && local_compiler then OpamAuxCommands.autopin st ~quiet:true [`Dirname (OpamFilename.Dir.of_string (OpamSwitch.to_string switch))] |> fst else st in let packages = try OpamSolution.sanitize_atom_list st packages with e -> OpamStd.Exn.finalise e @@ fun () -> if update_config then (OpamEnv.clear_dynamic_init_scripts gt; OpamStd.Option.iter (ignore @* OpamSwitchAction.set_current_switch `Lock_write gt) old_switch_opt); ignore (OpamSwitchState.unlock st); ignore (clear_switch gt switch) in let gt = OpamGlobalState.unlock gt in try gt, install_compiler_packages st packages with e -> if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then ((try OpamStd.Exn.fatal e with e -> OpamConsole.warning "Switch %s left partially installed" (OpamSwitch.to_string switch); raise e); if OpamConsole.confirm "Switch initialisation failed: clean up? \ ('n' will leave the switch partially installed)" then begin ignore (OpamSwitchState.unlock st); ignore (clear_switch gt switch) end); raise e let switch lock gt switch = log "switch switch=%a" (slog OpamSwitch.to_string) switch; if OpamGlobalState.switch_exists gt switch then let st = if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then OpamSwitchAction.set_current_switch lock gt switch else let rt = OpamRepositoryState.load `Lock_none gt in OpamSwitchState.load lock gt rt switch in OpamEnv.check_and_print_env_warning st; st else let installed_switches = OpamFile.Config.installed_switches gt.config in OpamConsole.error_and_exit `Not_found "No switch %s is currently installed. Did you mean \ 'opam switch create %s'?\n\ Installed switches are:\n%s" (OpamSwitch.to_string switch) (OpamSwitch.to_string switch) (OpamStd.Format.itemize OpamSwitch.to_string installed_switches) let import_t ?ask importfile t = log "import switch"; let import_sel = importfile.OpamFile.SwitchExport.selections in let import_opams = importfile.OpamFile.SwitchExport.overlays in let opams = OpamPackage.Name.Map.fold (fun name opam opams -> let nv = OpamPackage.create name (OpamFile.OPAM.version opam) in OpamPackage.Map.add nv opam opams) import_opams t.opams in let packages = t.packages ++ OpamPackage.keys opams in let pinned = let names = OpamPackage.names_of_packages import_sel.sel_pinned in OpamPackage.Set.filter (fun nv -> not (OpamPackage.Name.Set.mem nv.name names)) t.pinned ++ import_sel.sel_pinned in let available = OpamSwitchState.compute_available_packages t.switch_global t.switch t.switch_config ~pinned ~opams in let compiler_packages, to_install = if OpamPackage.Set.is_empty t.compiler_packages then import_sel.sel_compiler %% available, import_sel.sel_installed else t.compiler_packages, import_sel.sel_installed -- import_sel.sel_compiler in let t = { t with available_packages = lazy available; packages; compiler_packages; pinned; opams; } in let unavailable_version, unavailable = let available_names = OpamPackage.names_of_packages available in OpamPackage.Set.partition (fun nv -> OpamPackage.Name.Set.mem nv.name available_names) (to_install -- available) in if not (OpamPackage.Set.is_empty unavailable_version) then OpamConsole.warning "These packages aren't available at the specified versions, \ version constraints have been discarded:\n%s" (OpamStd.Format.itemize OpamPackage.to_string (OpamPackage.Set.elements unavailable_version)); if not (OpamPackage.Set.is_empty unavailable) then OpamConsole.warning "These packages are unavailable, they have been ignored from \ the import file:\n%s" (OpamStd.Format.itemize OpamPackage.to_string (OpamPackage.Set.elements unavailable)); let t, solution = let to_import = OpamSolution.eq_atoms_of_packages (to_install %% available) @ OpamSolution.atoms_of_packages unavailable_version in let add_roots = OpamPackage.names_of_packages import_sel.sel_roots in OpamSolution.resolve_and_apply ?ask t Import ~requested:(OpamPackage.Name.Set.of_list @@ List.map fst to_import) ~add_roots ~orphans:OpamPackage.Set.empty { wish_install = to_import; wish_remove = []; wish_upgrade = []; criteria = `Default; extra_attributes = []; } in OpamSolution.check_solution t solution; if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then begin (* Put imported overlays in place *) OpamPackage.Set.iter (fun nv -> match OpamPackage.Name.Map.find_opt nv.name import_opams with | None -> () | Some opam -> OpamFilename.rmdir (OpamPath.Switch.Overlay.package t.switch_global.root t.switch nv.name); OpamFile.OPAM.write (OpamPath.Switch.Overlay.opam t.switch_global.root t.switch nv.name) opam) pinned; (* Save new pinnings *) let sel = OpamSwitchState.load_selections t.switch_global t.switch in S.write (OpamPath.Switch.selections t.switch_global.root t.switch) { sel with sel_pinned = pinned } end; t let read_overlays (read: package -> OpamFile.OPAM.t option) packages = OpamPackage.Set.fold (fun nv acc -> match read nv with | Some opam -> if OpamFile.OPAM.extra_files opam <> None then (OpamConsole.warning "Metadata of package %s uses a files/ subdirectory, it may not be \ re-imported correctly (skipping definition)" (OpamPackage.to_string nv); acc) else OpamPackage.Name.Map.add nv.name opam acc | None -> acc) packages OpamPackage.Name.Map.empty let export ?(full=false) filename = let switch = OpamStateConfig.get_switch () in let root = OpamStateConfig.(!r.root_dir) in let export = OpamFilename.with_flock `Lock_none (OpamPath.Switch.lock root switch) @@ fun _ -> let selections = S.safe_read (OpamPath.Switch.selections root switch) in let overlays = read_overlays (fun nv -> OpamFileTools.read_opam (OpamPath.Switch.Overlay.package root switch nv.name)) selections.sel_pinned in let overlays = if full then OpamPackage.Name.Map.union (fun a _ -> a) overlays @@ read_overlays (fun nv -> OpamFile.OPAM.read_opt (OpamPath.Switch.installed_opam root switch nv)) (selections.sel_installed -- selections.sel_pinned) else overlays in { OpamFile.SwitchExport.selections; overlays } in match filename with | None -> OpamFile.SwitchExport.write_to_channel stdout export | Some f -> OpamFile.SwitchExport.write f export let show () = OpamConsole.msg "%s\n" (OpamSwitch.to_string (OpamStateConfig.get_switch ())) let reinstall init_st = let switch = init_st.switch in log "reinstall switch=%a" (slog OpamSwitch.to_string) switch; let gt = init_st.switch_global in let switch_root = OpamPath.Switch.root gt.root switch in let opam_subdir = OpamPath.Switch.meta gt.root switch in let pkg_dirs = List.filter ((<>) opam_subdir) (OpamFilename.dirs switch_root) in List.iter OpamFilename.cleandir pkg_dirs; List.iter OpamFilename.remove (OpamFilename.files switch_root); OpamFilename.cleandir (OpamPath.Switch.config_dir gt.root switch); OpamFilename.cleandir (OpamPath.Switch.installed_opams gt.root switch); let st = { init_st with installed = OpamPackage.Set.empty; installed_roots = OpamPackage.Set.empty; reinstall = OpamPackage.Set.empty; } in import_t { OpamFile.SwitchExport. selections = OpamSwitchState.selections init_st; overlays = OpamPackage.Name.Map.empty; } st let import st filename = let import_str = match filename with | None -> OpamSystem.string_of_channel stdin | Some f -> OpamFilename.read (OpamFile.filename f) in let importfile = try OpamFile.SwitchExport.read_from_string ?filename import_str with OpamPp.Bad_format _ as e -> log "Error loading export file, trying the old file format"; try let selections = OpamFile.LegacyState.read_from_string import_str in { OpamFile.SwitchExport.selections; overlays = OpamPackage.Name.Map.empty } with e1 -> OpamStd.Exn.fatal e1; raise e in import_t importfile st let set_compiler st namesv = let name_unknown = List.filter (fun (name,_) -> not (OpamPackage.has_name st.packages name)) namesv in if name_unknown <> [] then OpamConsole.error_and_exit `Not_found "No packages by these names found: %s" (OpamStd.List.concat_map ", " (OpamPackage.Name.to_string @* fst) name_unknown); let packages = List.map (function | name, Some v -> OpamPackage.create name v | name, None -> OpamSwitchState.get_package st name) namesv in let uninstalled = List.filter (fun nv -> not (OpamPackage.Set.mem nv st.installed)) packages in if uninstalled <> [] then (OpamConsole.warning "These packages are not installed:\n%s" (OpamStd.List.concat_map ", " OpamPackage.to_string uninstalled); if not (OpamConsole.confirm "Set them as compilers at the proposed versions regardless?") then OpamStd.Sys.exit_because `Aborted); let st = { st with compiler_packages = OpamPackage.Set.of_list packages } in OpamSwitchAction.write_selections st; st let get_compiler_packages ?repos rt = let repos = match repos with | None -> OpamGlobalState.repos_list rt.repos_global | Some r -> r in let package_index = OpamRepositoryState.build_index rt repos in OpamPackage.Map.filter (fun _ opam -> OpamFile.OPAM.has_flag Pkgflag_Compiler opam && OpamFilter.eval_to_bool ~default:false (OpamPackageVar.resolve_global rt.repos_global) (OpamFile.OPAM.available opam)) package_index |> OpamPackage.keys let advise_compiler_dependencies rt opams compilers name atoms = let packages = OpamFormula.packages_of_atoms (OpamPackage.keys opams) atoms in let deps = List.map (fun nv -> let opam = OpamPackage.Map.find nv opams in OpamPackageVar.filter_depends_formula ~default:false ~env:(OpamPackageVar.resolve_switch_raw ~package:nv rt.repos_global (OpamSwitch.of_string name) (OpamFile.Switch_config.empty)) (OpamFile.OPAM.depends opam)) (OpamPackage.Set.elements packages) in let comp_deps = List.fold_left (fun acc f -> OpamPackage.Set.union acc (OpamFormula.packages compilers f)) OpamPackage.Set.empty deps in if not (OpamPackage.Set.is_empty comp_deps) then OpamConsole.formatted_msg "Package%s %s do%sn't have the 'compiler' flag set, and may not be \ suitable to set as switch base. You probably meant to choose among \ the following compiler implementations, which they depend \ upon:\n%s" (match atoms with [_] -> "" | _ -> "s") (OpamStd.List.concat_map ", " OpamFormula.short_string_of_atom atoms) (match atoms with [_] -> "es" | _ -> "") (OpamStd.Format.itemize OpamPackage.Name.to_string (OpamPackage.Name.Set.elements (OpamPackage.names_of_packages comp_deps))) let guess_compiler_package ?repos rt name = let repos = match repos with | None -> OpamGlobalState.repos_list rt.repos_global | Some r -> r in let opams = OpamRepositoryState.build_index rt repos |> OpamPackage.Map.filter (fun _ opam -> OpamFilter.eval_to_bool ~default:false (OpamPackageVar.resolve_global rt.repos_global) (OpamFile.OPAM.available opam)) in let compiler_packages = OpamPackage.Map.filter (fun _ -> OpamFile.OPAM.has_flag Pkgflag_Compiler) opams |> OpamPackage.keys in let no_compiler_error () = OpamConsole.error_and_exit `Not_found "No compiler matching '%s' found, use 'opam switch list-available' \ to see what is available, or use '--packages' to select packages \ explicitly." name in match OpamPackage.of_string_opt name with | Some nv when OpamPackage.Set.mem nv compiler_packages -> [OpamSolution.eq_atom_of_package nv] | Some nv when OpamRepositoryState.find_package_opt rt repos nv <> None -> advise_compiler_dependencies rt opams compiler_packages name [OpamSolution.eq_atom_of_package nv]; no_compiler_error () | _ -> let pkgname = try Some (OpamPackage.Name.of_string name) with Failure _ -> None in match pkgname with | Some pkgname when OpamPackage.has_name compiler_packages pkgname -> [pkgname, None] | Some pkgname when OpamPackage.Map.exists (fun nv _ -> OpamPackage.name nv = pkgname) opams -> advise_compiler_dependencies rt opams compiler_packages name [pkgname, None]; no_compiler_error () | _ -> let version = OpamPackage.Version.of_string name in let has_version = OpamPackage.Set.filter (fun nv -> nv.version = version) compiler_packages in try [OpamSolution.eq_atom_of_package (OpamPackage.Set.choose_one has_version)] with | Not_found -> no_compiler_error () | Failure _ -> OpamConsole.error_and_exit `Bad_arguments "Compiler selection '%s' is ambiguous. matching packages: %s" name (OpamPackage.Set.to_string has_version)
null
https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/opam-client.2.0.5%2Bdune/src/client/opamSwitchCommand.ml
ocaml
************************************************************************ All rights reserved. This file is distributed under the terms of the exception on linking described in the file LICENSE. ************************************************************************ install the compiler packages Put imported overlays in place Save new pinnings
Copyright 2012 - 2015 OCamlPro Copyright 2012 INRIA GNU Lesser General Public License version 2.1 , with the special open OpamTypes open OpamStateTypes open OpamPackage.Set.Op open OpamStd.Op module S = OpamFile.SwitchSelections let log fmt = OpamConsole.log "SWITCH" fmt let slog = OpamConsole.slog let list gt ~print_short = log "list"; let gt = OpamGlobalState.fix_switch_list gt in if print_short then List.iter (OpamConsole.msg "%s\n" @* OpamSwitch.to_string) (List.sort compare (OpamFile.Config.installed_switches gt.config)) else let installed_switches = OpamGlobalState.fold_switches (fun sw sel acc -> let opams = OpamPackage.Set.fold (fun nv acc -> match OpamFile.OPAM.read_opt (OpamPath.Switch.installed_opam gt.root sw nv) with | Some opam -> OpamPackage.Map.add nv opam acc | None -> acc) sel.sel_compiler OpamPackage.Map.empty in let ifempty default m = if OpamPackage.Map.is_empty m then default else m in let comp = OpamPackage.Map.filter (fun nv _ -> OpamPackage.Set.mem nv sel.sel_roots) opams |> ifempty opams in let comp = OpamPackage.Map.filter (fun _ opam -> OpamFile.OPAM.has_flag Pkgflag_Compiler opam) comp |> ifempty comp in let conf = OpamFile.Switch_config.read_opt (OpamPath.Switch.switch_config gt.root sw) in let descr = match conf with | Some c -> c.OpamFile.Switch_config.synopsis | None -> OpamConsole.colorise `red "Missing config file" in OpamSwitch.Map.add sw (OpamPackage.keys comp, descr) acc) gt OpamSwitch.Map.empty in let list = OpamSwitch.Map.bindings installed_switches in let table = List.map (OpamConsole.colorise `blue) ["#"; "switch"; "compiler"; "description" ] :: List.map (fun (switch, (packages, descr)) -> let current = Some switch = OpamStateConfig.get_switch_opt () in List.map (if current then OpamConsole.colorise `bold else fun s -> s) [ if current then OpamConsole.(utf8_symbol Symbols.rightwards_arrow "->") else ""; OpamSwitch.to_string switch; OpamStd.List.concat_map "," (OpamConsole.colorise `yellow @* OpamPackage.to_string) (OpamPackage.Set.elements packages); descr ]) list in OpamConsole.print_table stdout ~sep:" " (OpamStd.Format.align_table table); match OpamStateConfig.get_switch_opt (), OpamStateConfig.(!r.switch_from) with | None, _ when OpamFile.Config.installed_switches gt.config <> [] -> OpamConsole.note "No switch is currently set, you should use 'opam switch <switch>' \ to set an active switch" | Some switch, `Env -> let sys = OpamFile.Config.switch gt.config in if not (OpamGlobalState.switch_exists gt switch) then (OpamConsole.msg "\n"; OpamConsole.warning "The OPAMSWITCH variable does not point to a valid switch: %S" (OpamSwitch.to_string switch)) else if sys <> Some switch then (OpamConsole.msg "\n"; OpamConsole.note "Current switch is set locally through the OPAMSWITCH variable.\n\ The current global system switch is %s." (OpamStd.Option.to_string ~none:"unset" (fun s -> OpamConsole.colorise `bold (OpamSwitch.to_string s)) sys)) else (match OpamStateConfig.get_current_switch_from_cwd gt.root with | None -> () | Some sw -> OpamConsole.msg "\n"; OpamConsole.note "Current switch is set globally and through the OPAMSWITCH variable.\n\ Thus, the local switch found at %s was ignored." (OpamConsole.colorise `bold (OpamSwitch.to_string sw))) | Some switch, `Default when not (OpamGlobalState.switch_exists gt switch) -> OpamConsole.msg "\n"; OpamConsole.warning "The currently selected switch (%S) is invalid.\n%s" (OpamSwitch.to_string switch) (if OpamSwitch.is_external switch then "Stale '_opam' directory or link ?" else "Fix the selection with 'opam switch set SWITCH'.") | Some switch, `Default when OpamSwitch.is_external switch -> OpamConsole.msg "\n"; OpamConsole.note "Current switch has been selected based on the current directory.\n\ The current global system switch is %s." (OpamStd.Option.to_string ~none:"unset" (fun s -> OpamConsole.colorise `bold (OpamSwitch.to_string s)) (OpamFile.Config.switch gt.config)); if not (OpamEnv.is_up_to_date_switch gt.root switch) then OpamConsole.warning "The environment is not in sync with the current switch.\n\ You should run: %s" (OpamEnv.eval_string gt (Some switch)) | Some switch, `Default -> if not (OpamEnv.is_up_to_date_switch gt.root switch) then (OpamConsole.msg "\n"; OpamConsole.warning "The environment is not in sync with the current switch.\n\ You should run: %s" (OpamEnv.eval_string gt (Some switch))) | _ -> () let clear_switch ?(keep_debug=false) gt switch = let module C = OpamFile.Config in let config = gt.config in let config = C.with_installed_switches (List.filter ((<>) switch) (C.installed_switches config)) config in let config = if C.switch config = Some switch then C.with_switch_opt None config else config in let gt = { gt with config } in OpamGlobalState.write gt; let comp_dir = OpamPath.Switch.root gt.root switch in if keep_debug && (OpamClientConfig.(!r.keep_build_dir) || (OpamConsole.debug ())) then (OpamConsole.note "Keeping %s despite errors (debug mode), \ you may want to remove it by hand" (OpamFilename.Dir.to_string comp_dir); gt) else try OpamFilename.rmdir comp_dir; gt with OpamSystem.Internal_error _ -> gt let remove gt ?(confirm = true) switch = log "remove switch=%a" (slog OpamSwitch.to_string) switch; if not (OpamGlobalState.switch_exists gt switch) then ( OpamConsole.msg "The compiler switch %s does not exist.\n" (OpamSwitch.to_string switch); OpamStd.Sys.exit_because `Not_found; ); if not confirm || OpamConsole.confirm "Switch %s and all its packages will be wiped. Are you sure?" (OpamSwitch.to_string switch) then clear_switch gt switch else gt let install_compiler_packages t atoms = if atoms = [] then t else let roots = OpamPackage.Name.Set.of_list (List.map fst atoms) in let not_found = OpamPackage.Name.Set.diff roots @@ OpamPackage.names_of_packages @@ OpamPackage.packages_of_names t.packages roots in if not (OpamPackage.Name.Set.is_empty not_found) then OpamConsole.error_and_exit `Not_found "No packages %s found." (OpamPackage.Name.Set.to_string not_found); let solution = OpamSolution.resolve t Switch ~orphans:OpamPackage.Set.empty ~requested:roots { wish_install = []; wish_remove = []; wish_upgrade = atoms; criteria = `Default; extra_attributes = []; } in let solution = match solution with | Success s -> s | Conflicts cs -> OpamConsole.error_and_exit `No_solution "Could not resolve set of base packages:\n%s" (OpamCudf.string_of_conflict t.packages (OpamSwitchState.unavailable_reason t) cs); in let () = match OpamSolver.stats solution with | { s_install = _; s_reinstall = 0; s_upgrade = 0; s_downgrade=0; s_remove = 0 } -> () | stats -> OpamConsole.error_and_exit `No_solution "Inconsistent resolution of base package installs:\n%s" (OpamSolver.string_of_stats stats) in let to_install_pkgs = OpamSolver.new_packages solution in let base_comp = OpamPackage.packages_of_names to_install_pkgs roots in let non_comp = OpamPackage.Set.filter (fun nv -> not (OpamFile.OPAM.has_flag Pkgflag_Compiler (OpamSwitchState.opam t nv))) base_comp in if not (OpamPackage.Set.is_empty non_comp) && not (OpamConsole.confirm ~default:false "Packages %s don't have the 'compiler' flag set. Are you sure \ you want to set them as the compiler base for this switch?" (OpamPackage.Set.to_string non_comp)) then OpamConsole.error_and_exit `Aborted "Aborted installation of non-compiler packages \ as switch base."; let t = if t.switch_config.OpamFile.Switch_config.synopsis = "" then let synopsis = match OpamPackage.Set.elements base_comp with | [] -> OpamSwitch.to_string t.switch | [pkg] -> let open OpamStd.Option.Op in (OpamSwitchState.opam_opt t pkg >>= OpamFile.OPAM.synopsis) +! OpamPackage.to_string pkg | pkgs -> OpamStd.List.concat_map " " OpamPackage.to_string pkgs in let switch_config = { t.switch_config with OpamFile.Switch_config.synopsis } in if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then OpamSwitchAction.install_switch_config t.switch_global.root t.switch switch_config; { t with switch_config } else t in let t = { t with compiler_packages = to_install_pkgs } in let t, result = OpamSolution.apply ~ask:OpamClientConfig.(!r.show) t Switch ~requested:roots solution in OpamSolution.check_solution ~quiet:OpamClientConfig.(not !r.show) t result; t let install gt ?rt ?synopsis ?repos ~update_config ~packages ?(local_compiler=false) switch = let update_config = update_config && not (OpamSwitch.is_external switch) in let old_switch_opt = OpamFile.Config.switch gt.config in let comp_dir = OpamPath.Switch.root gt.root switch in if OpamGlobalState.switch_exists gt switch then OpamConsole.error_and_exit `Bad_arguments "There already is an installed switch named %s" (OpamSwitch.to_string switch); if Sys.file_exists (OpamFilename.Dir.to_string comp_dir) then OpamConsole.error_and_exit `Bad_arguments "Directory %S already exists, please choose a different name" (OpamFilename.Dir.to_string comp_dir); let gt, st = if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then let gt = OpamSwitchAction.create_empty_switch gt ?synopsis ?repos switch in if update_config then gt, OpamSwitchAction.set_current_switch `Lock_write gt ?rt switch else let rt = match rt with | None -> OpamRepositoryState.load `Lock_none gt | Some rt -> ({ rt with repos_global = (gt :> unlocked global_state) } :> unlocked repos_state) in gt, OpamSwitchState.load `Lock_write gt rt switch else gt, let rt = match rt with | None -> OpamRepositoryState.load `Lock_none gt | Some rt -> (rt :> unlocked repos_state) in let st = OpamSwitchState.load_virtual ?repos_list:repos gt rt in let available_packages = lazy (OpamSwitchState.compute_available_packages gt switch (OpamSwitchAction.gen_switch_config gt.root ?repos switch) ~pinned:OpamPackage.Set.empty ~opams:st.opams) in { st with switch; available_packages } in let st = if OpamSwitch.is_external switch && local_compiler then OpamAuxCommands.autopin st ~quiet:true [`Dirname (OpamFilename.Dir.of_string (OpamSwitch.to_string switch))] |> fst else st in let packages = try OpamSolution.sanitize_atom_list st packages with e -> OpamStd.Exn.finalise e @@ fun () -> if update_config then (OpamEnv.clear_dynamic_init_scripts gt; OpamStd.Option.iter (ignore @* OpamSwitchAction.set_current_switch `Lock_write gt) old_switch_opt); ignore (OpamSwitchState.unlock st); ignore (clear_switch gt switch) in let gt = OpamGlobalState.unlock gt in try gt, install_compiler_packages st packages with e -> if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then ((try OpamStd.Exn.fatal e with e -> OpamConsole.warning "Switch %s left partially installed" (OpamSwitch.to_string switch); raise e); if OpamConsole.confirm "Switch initialisation failed: clean up? \ ('n' will leave the switch partially installed)" then begin ignore (OpamSwitchState.unlock st); ignore (clear_switch gt switch) end); raise e let switch lock gt switch = log "switch switch=%a" (slog OpamSwitch.to_string) switch; if OpamGlobalState.switch_exists gt switch then let st = if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then OpamSwitchAction.set_current_switch lock gt switch else let rt = OpamRepositoryState.load `Lock_none gt in OpamSwitchState.load lock gt rt switch in OpamEnv.check_and_print_env_warning st; st else let installed_switches = OpamFile.Config.installed_switches gt.config in OpamConsole.error_and_exit `Not_found "No switch %s is currently installed. Did you mean \ 'opam switch create %s'?\n\ Installed switches are:\n%s" (OpamSwitch.to_string switch) (OpamSwitch.to_string switch) (OpamStd.Format.itemize OpamSwitch.to_string installed_switches) let import_t ?ask importfile t = log "import switch"; let import_sel = importfile.OpamFile.SwitchExport.selections in let import_opams = importfile.OpamFile.SwitchExport.overlays in let opams = OpamPackage.Name.Map.fold (fun name opam opams -> let nv = OpamPackage.create name (OpamFile.OPAM.version opam) in OpamPackage.Map.add nv opam opams) import_opams t.opams in let packages = t.packages ++ OpamPackage.keys opams in let pinned = let names = OpamPackage.names_of_packages import_sel.sel_pinned in OpamPackage.Set.filter (fun nv -> not (OpamPackage.Name.Set.mem nv.name names)) t.pinned ++ import_sel.sel_pinned in let available = OpamSwitchState.compute_available_packages t.switch_global t.switch t.switch_config ~pinned ~opams in let compiler_packages, to_install = if OpamPackage.Set.is_empty t.compiler_packages then import_sel.sel_compiler %% available, import_sel.sel_installed else t.compiler_packages, import_sel.sel_installed -- import_sel.sel_compiler in let t = { t with available_packages = lazy available; packages; compiler_packages; pinned; opams; } in let unavailable_version, unavailable = let available_names = OpamPackage.names_of_packages available in OpamPackage.Set.partition (fun nv -> OpamPackage.Name.Set.mem nv.name available_names) (to_install -- available) in if not (OpamPackage.Set.is_empty unavailable_version) then OpamConsole.warning "These packages aren't available at the specified versions, \ version constraints have been discarded:\n%s" (OpamStd.Format.itemize OpamPackage.to_string (OpamPackage.Set.elements unavailable_version)); if not (OpamPackage.Set.is_empty unavailable) then OpamConsole.warning "These packages are unavailable, they have been ignored from \ the import file:\n%s" (OpamStd.Format.itemize OpamPackage.to_string (OpamPackage.Set.elements unavailable)); let t, solution = let to_import = OpamSolution.eq_atoms_of_packages (to_install %% available) @ OpamSolution.atoms_of_packages unavailable_version in let add_roots = OpamPackage.names_of_packages import_sel.sel_roots in OpamSolution.resolve_and_apply ?ask t Import ~requested:(OpamPackage.Name.Set.of_list @@ List.map fst to_import) ~add_roots ~orphans:OpamPackage.Set.empty { wish_install = to_import; wish_remove = []; wish_upgrade = []; criteria = `Default; extra_attributes = []; } in OpamSolution.check_solution t solution; if not (OpamStateConfig.(!r.dryrun) || OpamClientConfig.(!r.show)) then begin OpamPackage.Set.iter (fun nv -> match OpamPackage.Name.Map.find_opt nv.name import_opams with | None -> () | Some opam -> OpamFilename.rmdir (OpamPath.Switch.Overlay.package t.switch_global.root t.switch nv.name); OpamFile.OPAM.write (OpamPath.Switch.Overlay.opam t.switch_global.root t.switch nv.name) opam) pinned; let sel = OpamSwitchState.load_selections t.switch_global t.switch in S.write (OpamPath.Switch.selections t.switch_global.root t.switch) { sel with sel_pinned = pinned } end; t let read_overlays (read: package -> OpamFile.OPAM.t option) packages = OpamPackage.Set.fold (fun nv acc -> match read nv with | Some opam -> if OpamFile.OPAM.extra_files opam <> None then (OpamConsole.warning "Metadata of package %s uses a files/ subdirectory, it may not be \ re-imported correctly (skipping definition)" (OpamPackage.to_string nv); acc) else OpamPackage.Name.Map.add nv.name opam acc | None -> acc) packages OpamPackage.Name.Map.empty let export ?(full=false) filename = let switch = OpamStateConfig.get_switch () in let root = OpamStateConfig.(!r.root_dir) in let export = OpamFilename.with_flock `Lock_none (OpamPath.Switch.lock root switch) @@ fun _ -> let selections = S.safe_read (OpamPath.Switch.selections root switch) in let overlays = read_overlays (fun nv -> OpamFileTools.read_opam (OpamPath.Switch.Overlay.package root switch nv.name)) selections.sel_pinned in let overlays = if full then OpamPackage.Name.Map.union (fun a _ -> a) overlays @@ read_overlays (fun nv -> OpamFile.OPAM.read_opt (OpamPath.Switch.installed_opam root switch nv)) (selections.sel_installed -- selections.sel_pinned) else overlays in { OpamFile.SwitchExport.selections; overlays } in match filename with | None -> OpamFile.SwitchExport.write_to_channel stdout export | Some f -> OpamFile.SwitchExport.write f export let show () = OpamConsole.msg "%s\n" (OpamSwitch.to_string (OpamStateConfig.get_switch ())) let reinstall init_st = let switch = init_st.switch in log "reinstall switch=%a" (slog OpamSwitch.to_string) switch; let gt = init_st.switch_global in let switch_root = OpamPath.Switch.root gt.root switch in let opam_subdir = OpamPath.Switch.meta gt.root switch in let pkg_dirs = List.filter ((<>) opam_subdir) (OpamFilename.dirs switch_root) in List.iter OpamFilename.cleandir pkg_dirs; List.iter OpamFilename.remove (OpamFilename.files switch_root); OpamFilename.cleandir (OpamPath.Switch.config_dir gt.root switch); OpamFilename.cleandir (OpamPath.Switch.installed_opams gt.root switch); let st = { init_st with installed = OpamPackage.Set.empty; installed_roots = OpamPackage.Set.empty; reinstall = OpamPackage.Set.empty; } in import_t { OpamFile.SwitchExport. selections = OpamSwitchState.selections init_st; overlays = OpamPackage.Name.Map.empty; } st let import st filename = let import_str = match filename with | None -> OpamSystem.string_of_channel stdin | Some f -> OpamFilename.read (OpamFile.filename f) in let importfile = try OpamFile.SwitchExport.read_from_string ?filename import_str with OpamPp.Bad_format _ as e -> log "Error loading export file, trying the old file format"; try let selections = OpamFile.LegacyState.read_from_string import_str in { OpamFile.SwitchExport.selections; overlays = OpamPackage.Name.Map.empty } with e1 -> OpamStd.Exn.fatal e1; raise e in import_t importfile st let set_compiler st namesv = let name_unknown = List.filter (fun (name,_) -> not (OpamPackage.has_name st.packages name)) namesv in if name_unknown <> [] then OpamConsole.error_and_exit `Not_found "No packages by these names found: %s" (OpamStd.List.concat_map ", " (OpamPackage.Name.to_string @* fst) name_unknown); let packages = List.map (function | name, Some v -> OpamPackage.create name v | name, None -> OpamSwitchState.get_package st name) namesv in let uninstalled = List.filter (fun nv -> not (OpamPackage.Set.mem nv st.installed)) packages in if uninstalled <> [] then (OpamConsole.warning "These packages are not installed:\n%s" (OpamStd.List.concat_map ", " OpamPackage.to_string uninstalled); if not (OpamConsole.confirm "Set them as compilers at the proposed versions regardless?") then OpamStd.Sys.exit_because `Aborted); let st = { st with compiler_packages = OpamPackage.Set.of_list packages } in OpamSwitchAction.write_selections st; st let get_compiler_packages ?repos rt = let repos = match repos with | None -> OpamGlobalState.repos_list rt.repos_global | Some r -> r in let package_index = OpamRepositoryState.build_index rt repos in OpamPackage.Map.filter (fun _ opam -> OpamFile.OPAM.has_flag Pkgflag_Compiler opam && OpamFilter.eval_to_bool ~default:false (OpamPackageVar.resolve_global rt.repos_global) (OpamFile.OPAM.available opam)) package_index |> OpamPackage.keys let advise_compiler_dependencies rt opams compilers name atoms = let packages = OpamFormula.packages_of_atoms (OpamPackage.keys opams) atoms in let deps = List.map (fun nv -> let opam = OpamPackage.Map.find nv opams in OpamPackageVar.filter_depends_formula ~default:false ~env:(OpamPackageVar.resolve_switch_raw ~package:nv rt.repos_global (OpamSwitch.of_string name) (OpamFile.Switch_config.empty)) (OpamFile.OPAM.depends opam)) (OpamPackage.Set.elements packages) in let comp_deps = List.fold_left (fun acc f -> OpamPackage.Set.union acc (OpamFormula.packages compilers f)) OpamPackage.Set.empty deps in if not (OpamPackage.Set.is_empty comp_deps) then OpamConsole.formatted_msg "Package%s %s do%sn't have the 'compiler' flag set, and may not be \ suitable to set as switch base. You probably meant to choose among \ the following compiler implementations, which they depend \ upon:\n%s" (match atoms with [_] -> "" | _ -> "s") (OpamStd.List.concat_map ", " OpamFormula.short_string_of_atom atoms) (match atoms with [_] -> "es" | _ -> "") (OpamStd.Format.itemize OpamPackage.Name.to_string (OpamPackage.Name.Set.elements (OpamPackage.names_of_packages comp_deps))) let guess_compiler_package ?repos rt name = let repos = match repos with | None -> OpamGlobalState.repos_list rt.repos_global | Some r -> r in let opams = OpamRepositoryState.build_index rt repos |> OpamPackage.Map.filter (fun _ opam -> OpamFilter.eval_to_bool ~default:false (OpamPackageVar.resolve_global rt.repos_global) (OpamFile.OPAM.available opam)) in let compiler_packages = OpamPackage.Map.filter (fun _ -> OpamFile.OPAM.has_flag Pkgflag_Compiler) opams |> OpamPackage.keys in let no_compiler_error () = OpamConsole.error_and_exit `Not_found "No compiler matching '%s' found, use 'opam switch list-available' \ to see what is available, or use '--packages' to select packages \ explicitly." name in match OpamPackage.of_string_opt name with | Some nv when OpamPackage.Set.mem nv compiler_packages -> [OpamSolution.eq_atom_of_package nv] | Some nv when OpamRepositoryState.find_package_opt rt repos nv <> None -> advise_compiler_dependencies rt opams compiler_packages name [OpamSolution.eq_atom_of_package nv]; no_compiler_error () | _ -> let pkgname = try Some (OpamPackage.Name.of_string name) with Failure _ -> None in match pkgname with | Some pkgname when OpamPackage.has_name compiler_packages pkgname -> [pkgname, None] | Some pkgname when OpamPackage.Map.exists (fun nv _ -> OpamPackage.name nv = pkgname) opams -> advise_compiler_dependencies rt opams compiler_packages name [pkgname, None]; no_compiler_error () | _ -> let version = OpamPackage.Version.of_string name in let has_version = OpamPackage.Set.filter (fun nv -> nv.version = version) compiler_packages in try [OpamSolution.eq_atom_of_package (OpamPackage.Set.choose_one has_version)] with | Not_found -> no_compiler_error () | Failure _ -> OpamConsole.error_and_exit `Bad_arguments "Compiler selection '%s' is ambiguous. matching packages: %s" name (OpamPackage.Set.to_string has_version)
102f169d042f26f94edc891c10dd598b69d935bab147793e467e6a70cb5c10da
dktr0/estuary
Header.hs
# LANGUAGE RecursiveDo , OverloadedStrings # module Estuary.Widgets.Header where import Reflex import Reflex.Dom hiding (Request,Response) import Data.Text (Text) import qualified Data.Text as T import Data.Map.Strict import Estuary.Types.Language import Estuary.Types.Hint import qualified Estuary.Types.Term as Term import Estuary.Widgets.Reflex import Estuary.Widgets.W import Estuary.Widgets.Reflex import Estuary.Types.TranslatableText header :: MonadWidget t m => W t m () header = divClass "header primary-color primary-borders" $ mdo hv <- headerVisible headerButton <- clickableDiv "header-area" $ hideableWidget' hv $ divClass "header-title" $ text "estuary" toggleHeaderVisible headerButton hideableWidget' hv $ do divClass "config-toolbar" $ do divClass "config-entry display-inline-block primary-color ui-font" $ do term Term.Theme >>= dynText let styleMap = fromList [("../css-custom/classic.css", "classic"),("../css-custom/dark.css", "Dark" ),("../css-custom/inverse.css","Inverse"),("../css-custom/grayscale.css","Grayscale"),("../css-custom/bubble.css","Bubble"),("../css-custom/minimalist.css","Minimalist")] theme >>= dropdownW styleMap >>= setTheme divClass "config-entry display-inline-block primary-color ui-font" $ do term Term.Language >>= dynText let langMap = fromList $ zip languages (fmap (T.pack . show) languages) language >>= dropdownW langMap >>= setLanguage sideBarButton <- divClass "config-entry display-inline-block primary-color ui-font" $ dynButton "?" toggleSideBarVisible sideBarButton
null
https://raw.githubusercontent.com/dktr0/estuary/c08a4790533c983ba236468e0ae197df50f2109f/client/src/Estuary/Widgets/Header.hs
haskell
# LANGUAGE RecursiveDo , OverloadedStrings # module Estuary.Widgets.Header where import Reflex import Reflex.Dom hiding (Request,Response) import Data.Text (Text) import qualified Data.Text as T import Data.Map.Strict import Estuary.Types.Language import Estuary.Types.Hint import qualified Estuary.Types.Term as Term import Estuary.Widgets.Reflex import Estuary.Widgets.W import Estuary.Widgets.Reflex import Estuary.Types.TranslatableText header :: MonadWidget t m => W t m () header = divClass "header primary-color primary-borders" $ mdo hv <- headerVisible headerButton <- clickableDiv "header-area" $ hideableWidget' hv $ divClass "header-title" $ text "estuary" toggleHeaderVisible headerButton hideableWidget' hv $ do divClass "config-toolbar" $ do divClass "config-entry display-inline-block primary-color ui-font" $ do term Term.Theme >>= dynText let styleMap = fromList [("../css-custom/classic.css", "classic"),("../css-custom/dark.css", "Dark" ),("../css-custom/inverse.css","Inverse"),("../css-custom/grayscale.css","Grayscale"),("../css-custom/bubble.css","Bubble"),("../css-custom/minimalist.css","Minimalist")] theme >>= dropdownW styleMap >>= setTheme divClass "config-entry display-inline-block primary-color ui-font" $ do term Term.Language >>= dynText let langMap = fromList $ zip languages (fmap (T.pack . show) languages) language >>= dropdownW langMap >>= setLanguage sideBarButton <- divClass "config-entry display-inline-block primary-color ui-font" $ dynButton "?" toggleSideBarVisible sideBarButton
108b44e7bb858ade22e87a4c2a5c642024b42e944c15b677103b9d9c36ee513d
10Pines/pdepreludat
Library.hs
module Library where import PdePreludat doble :: Number -> Number doble numero = numero + numero
null
https://raw.githubusercontent.com/10Pines/pdepreludat/aa72d7bcd262725d53b920591aa991cecdf36a18/the-template/src/Library.hs
haskell
module Library where import PdePreludat doble :: Number -> Number doble numero = numero + numero
86b407087aa6e344da5d6d5e5500591246df573d41abdd01b4d1db1d478f3c2a
kupl/MicSE
se.ml
(* Se is a symbolic execution module based on Tz.sym_state definition *) exception SeError of string open! Core Set of Tz.sym_state & Set of module SSet = Core.Set.Make (Tz.SymState_cmp) module MciSet = Core.Set.Make (Tz.MichCutInfo_cmp) module MFSet = Core.Set.Make (Tz.MichF_cmp) type se_result = { (* symbolic states *) sr_running : SSet.t; sr_blocked : SSet.t; sr_queries : SSet.t; sr_terminated : SSet.t; (* caches - accumulates which loop/lambdas passed *) sr_entered_loops : MciSet.t; sr_entered_lmbds : MciSet.t; (* caches - count integer to assign sym_state_id (start with 0) *) sr_sid_counter : int; } [@@deriving sexp, compare, equal] let se_result_empty : se_result = { sr_running = SSet.empty; sr_blocked = SSet.empty; sr_queries = SSet.empty; sr_terminated = SSet.empty; sr_entered_loops = MciSet.empty; sr_entered_lmbds = MciSet.empty; sr_sid_counter = 0; } let se_result_pointwise_union : se_result -> se_result -> se_result = fun r1 r2 -> { sr_running = SSet.union r1.sr_running r2.sr_running; sr_blocked = SSet.union r1.sr_blocked r2.sr_blocked; sr_queries = SSet.union r1.sr_queries r2.sr_queries; sr_terminated = SSet.union r1.sr_terminated r2.sr_terminated; sr_entered_loops = MciSet.union r1.sr_entered_loops r2.sr_entered_loops; sr_entered_lmbds = MciSet.union r1.sr_entered_lmbds r2.sr_entered_lmbds; sr_sid_counter = max r1.sr_sid_counter r2.sr_sid_counter; } (******************************************************************************) SymState as Graph (******************************************************************************) module SidMap = Core.Map.Make (Int) let construct_sid_checkmap : SSet.t -> Tz.sym_state SidMap.t = fun sset -> SSet.fold sset ~init:SidMap.empty ~f:(fun accmap ss -> SidMap.add_exn accmap ~key:(List.hd_exn ss.ss_id) ~data:ss ) module SSGraph = struct open Tz module RMCIMap = Core.Map.Make (Tz.RMichCutInfo_cmp) type 'a ps_pair = { pred : 'a; succ : 'a; } [@@deriving sexp, compare, equal] type mci_view = SSet.t ps_pair RMCIMap.t [@@deriving sexp, compare, equal] let construct_mci_view : basic_blocks:SSet.t -> mci_view = let empty_cp = { pred = SSet.empty; succ = SSet.empty } in fun ~basic_blocks -> SSet.fold basic_blocks ~init:RMCIMap.empty ~f:(fun accm ss -> let (start_rmci, block_rmci) = ( TzUtil.get_reduced_mci ss.ss_start_mci, TzUtil.get_reduced_mci ss.ss_block_mci ) in accm 1 . use symstate 's start - rmci - symstate is start - rmci 's successor (fun m -> RMCIMap.update m start_rmci ~f:(function | None -> { empty_cp with succ = SSet.singleton ss } | Some pspr -> { pspr with succ = SSet.add pspr.succ ss } )) 2 . use symstate 's block - rmci - symstate is block - rmci 's predecessor fun m -> RMCIMap.update m block_rmci ~f:(function | None -> { empty_cp with pred = SSet.singleton ss } | Some pspr -> { pspr with pred = SSet.add pspr.pred ss } ) ) let ss_view_pred : m_view:mci_view -> sym_state -> SSet.t = fun ~m_view ss -> (RMCIMap.find_exn m_view (TzUtil.get_reduced_mci ss.ss_start_mci)).pred let ss_view_succ : m_view:mci_view -> sym_state -> SSet.t = fun ~m_view ss -> (RMCIMap.find_exn m_view (TzUtil.get_reduced_mci ss.ss_block_mci)).succ end (* module SSGraph end *) (******************************************************************************) Utilities : Constraint (******************************************************************************) let add_constraints : c:Tz.mich_f list -> Tz.sym_state -> Tz.sym_state = (fun ~c ss -> { ss with ss_constraints = c @ ss.ss_constraints }) let mtz_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> if equal_mich_t t.cc_v MT_mutez then MF_mutez_bound (gen_mich_v_ctx v ~ctx) else MF_true let add_mtz_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ mtz_constraint_if_it_is_or_true ~ctx ~tv ] ss let nat_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> if equal_mich_t t.cc_v MT_nat then MF_nat_bound (gen_mich_v_ctx v ~ctx) else MF_true let add_nat_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ nat_constraint_if_it_is_or_true ~ctx ~tv ] ss let map_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> match t.cc_v with | MT_map _ | MT_big_map _ -> MF_map_default_value (gen_mich_v_ctx v ~ctx) | _ -> MF_true let add_map_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ map_constraint_if_it_is_or_true ~ctx ~tv ] ss let set_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> match t.cc_v with | MT_set _ -> MF_set_default_value (gen_mich_v_ctx v ~ctx) | _ -> MF_true let add_set_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ set_constraint_if_it_is_or_true ~ctx ~tv ] ss let michv_typ_constraints : ctx:Tz.mich_sym_ctxt -> v:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~v -> let tv = (typ_of_val v, v) in [ mtz_constraint_if_it_is_or_true ~ctx ~tv; nat_constraint_if_it_is_or_true ~ctx ~tv; map_constraint_if_it_is_or_true ~ctx ~tv; set_constraint_if_it_is_or_true ~ctx ~tv; ] let add_typ_constraints : ctx:Tz.mich_sym_ctxt -> v:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = (fun ~ctx ~v ss -> add_constraints ~c:(michv_typ_constraints ~ctx ~v) ss) let amount_balance_mutez_constraints : ctx:Tz.mich_sym_ctxt -> amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> bc_balance_v:Tz.mich_v Tz.cc -> Tz.mich_f list = fun ~ctx ~amount_v ~balance_v ~bc_balance_v -> let open Tz in let open TzUtil in [ 1 . amount , balance , and bc_balance are mutez values MF_mutez_bound (gen_mich_v_ctx ~ctx amount_v); MF_mutez_bound (gen_mich_v_ctx ~ctx balance_v); MF_mutez_bound (gen_mich_v_ctx ~ctx bc_balance_v); 2 . ( balance + bc_balance ) is also mutez value MF_mutez_bound (gen_mich_v_ctx ~ctx (MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc) ); ] let mtz_comes_from_constraint : ctx:Tz.mich_sym_ctxt -> mtz_v:Tz.mich_v Tz.cc -> from_v:Tz.mich_v Tz.cc -> Tz.mich_f = fun ~ctx ~mtz_v ~from_v -> let open Tz in let open TzUtil in MF_is_true (gen_mich_v_ctx ~ctx (MV_leq_ib (mtz_v, from_v) |> gen_dummy_cc)) let lt_2_63_constraint : ctx:Tz.mich_sym_ctxt -> Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx mv -> MF_eq ( gen_mich_v_ctx ~ctx mv, gen_mich_v_ctx ~ctx (MV_lit_mutez (Bigint.of_int64 Int64.max_value) |> gen_dummy_cc) ) let amount_balance_mutez_constraints : amount_v : balance_v : Tz.mich_v Tz.cc - > bc_balance_v : Tz.mich_v Tz.cc - > Tz.mich_f list = fun ~amount_v ~balance_v ~bc_balance_v - > let open Tz in [ ( * 1 . amount , balance , and bc_balance are mutez values amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> bc_balance_v:Tz.mich_v Tz.cc -> Tz.mich_f list = fun ~amount_v ~balance_v ~bc_balance_v -> let open Tz in [ (* 1. amount, balance, and bc_balance are mutez values *) MF_mutez_bound amount_v; MF_mutez_bound balance_v; MF_mutez_bound bc_balance_v; 2 . amount is less - or - equal than bc_balance MF_is_true (MV_leq_ib (amount_v, bc_balance_v) |> gen_dummy_cc); 3 . ( balance + bc_balance ) is also mutez value MF_mutez_bound (MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc); 4 . ( balance + bc_balance ) is equal to total - mutez - amount (let lit_total_mutez_amount = MV_lit_mutez (Bigint.of_int64 Int64.max_value) |> gen_dummy_cc in MF_eq ( MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc, lit_total_mutez_amount ) ); ] *) let ge_balance_amount_in_non_trx_entry_constraint : ctx:Tz.mich_sym_ctxt -> amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~amount_v ~balance_v -> MF_is_true (gen_mich_v_ctx ~ctx (MV_geq_ib (balance_v, amount_v) |> gen_dummy_cc)) let sigma_constraint_of_list_nil : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~lst -> let (zero : mich_v cc) = MV_lit_nat Bigint.zero |> gen_custom_cc lst in let (zero_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx zero in let (set_of_sigma_lst : mich_v cc list) = sigma_of_cont lst in List.map set_of_sigma_lst ~f:(fun sigma -> let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in MF_eq (sigma_ctx, zero_ctx) :: michv_typ_constraints ~ctx ~v:sigma ) |> List.join (* function sigma_constraint_of_list_nil end *) let sigma_constraint_of_map_empty : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map -> let (zero : mich_v cc) = MV_lit_nat Bigint.zero |> gen_custom_cc map in let (zero_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx zero in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in List.map set_of_sigma_map ~f:(fun sigma -> let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in MF_eq (sigma_ctx, zero_ctx) :: michv_typ_constraints ~ctx ~v:sigma ) |> List.join (* function sigma_constraint_of_map_empty end *) let sigma_constraint_of_map_get : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map ~key -> (* Design Note: This method for evaluating sigma of map is incomplete. The sum of elements which get from the map should be less than or equal to sigma of map. (i.e., map[A] + map[B] <= ∑map) *) let (value : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_get_xmoy (key, map) | MT_big_map _ -> MV_get_xbmo (key, map) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc map in let (none : mich_v cc) = MV_none (typ_of_val value |> get_innertyp) |> gen_custom_cc value in let (others : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_update_xomm (key, none, map) | MT_big_map _ -> MV_update_xobmbm (key, none, map) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc map in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in let (set_of_sigma_others_map : mich_v cc list) = sigma_of_cont others in List.map2 set_of_sigma_map set_of_sigma_others_map ~f:(fun sigma sigma_others -> let ((acc_elem : mich_f list), (value_elem : mich_v cc)) = MV_unlift_option value |> gen_custom_cc map |> acc_of_sigma ~sigma ~ctx in let (get_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx value in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (add_ctx : mich_v_cc_ctx) = (match (typ_of_val value_elem).cc_v with | MT_int -> MV_add_iii (value_elem, sigma_others) | MT_nat -> MV_add_nnn (value_elem, sigma_others) | MT_mutez -> MV_add_mnn (value_elem, sigma_others) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc sigma_others |> gen_mich_v_ctx ~ctx in MF_imply ( MF_not (MF_is_none get_ctx), MF_and (MF_eq (sigma_ctx, add_ctx) :: acc_elem) ) :: michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:sigma_others ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_map_get : Unequal_lengths" |> raise (* function sigma_constraint_of_map_get end *) let sigma_constraint_of_list_cons : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> hd:Tz.mich_v Tz.cc -> tl:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~lst ~hd ~tl -> let (set_of_sigma_lst : mich_v cc list) = sigma_of_cont lst in let (set_of_sigma_tl : mich_v cc list) = sigma_of_cont tl in List.map2 set_of_sigma_lst set_of_sigma_tl ~f:(fun sigma new_sigma -> let ((acc_elem : mich_f list), (value_elem : mich_v cc)) = (acc_of_sigma ~sigma ~ctx) hd in let (addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_int -> MV_add_iii (value_elem, new_sigma) | MT_nat -> MV_add_nnn (value_elem, new_sigma) | MT_mutez -> MV_add_mnn (value_elem, new_sigma) | _ -> SeError "sigma_constraint_of_list_cons : not supported" |> raise) |> gen_custom_cc new_sigma in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx addition in (MF_eq (sigma_ctx, addition_ctx) :: acc_elem) @ michv_typ_constraints ~ctx ~v:addition @ michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:new_sigma ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_list_cons : Unequal_lengths" |> raise (* function sigma_constraint_of_list_cons end *) let sigma_constraint_of_map_update : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> value:Tz.mich_v Tz.cc -> updated_map:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map ~key ~value ~updated_map -> let (old_value : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_get_xmoy (key, map) | MT_big_map _ -> MV_get_xbmo (key, map) | _ -> SeError "sigma_constraint_of_map_update : wrong type" |> raise) |> gen_custom_cc map in let (get_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx old_value in let (update_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx value in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in let (set_of_sigma_updated_map : mich_v cc list) = sigma_of_cont updated_map in List.map2 set_of_sigma_map set_of_sigma_updated_map ~f:(fun sigma new_sigma -> let ((acc_old_elem : mich_f list), (value_old_elem : mich_v cc)) = MV_unlift_option old_value |> gen_custom_cc map |> acc_of_sigma ~sigma:new_sigma ~ctx in let ((acc_new_elem : mich_f list), (value_new_elem : mich_v cc)) = MV_unlift_option value |> gen_custom_cc value |> acc_of_sigma ~sigma ~ctx in let (old_addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_mutez -> MV_add_mnn (value_new_elem, sigma) | MT_nat -> MV_add_nnn (value_new_elem, sigma) | MT_int -> MV_add_iii (value_new_elem, sigma) | _ -> SeError "sigma_constraint_of_map_update : not supported" |> raise) |> gen_custom_cc new_sigma in let (new_addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_mutez -> MV_add_mnn (value_old_elem, new_sigma) | MT_nat -> MV_add_nnn (value_old_elem, new_sigma) | MT_int -> MV_add_iii (value_old_elem, new_sigma) | _ -> SeError "sigma_constraint_of_map_update : not supported" |> raise) |> gen_custom_cc new_sigma in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (new_sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx new_sigma in let (old_addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx old_addition in let (new_addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx new_addition in MF_and [ MF_imply ( MF_and [ MF_is_none get_ctx; MF_is_none update_ctx ], MF_and [ MF_eq (sigma_ctx, new_sigma_ctx) ] ); MF_imply ( MF_and [ MF_is_none get_ctx; MF_not (MF_is_none update_ctx) ], MF_and ([ MF_eq (old_addition_ctx, new_sigma_ctx) ] @ acc_new_elem @ michv_typ_constraints ~ctx ~v:old_addition ) ); MF_imply ( MF_and [ MF_not (MF_is_none get_ctx); MF_is_none update_ctx ], MF_and ([ MF_eq (sigma_ctx, new_addition_ctx) ] @ acc_old_elem @ michv_typ_constraints ~ctx ~v:new_addition ) ); MF_imply ( MF_and [ MF_not (MF_is_none get_ctx); MF_not (MF_is_none update_ctx) ], MF_and ([ MF_eq (old_addition_ctx, new_addition_ctx) ] @ acc_old_elem @ acc_new_elem @ michv_typ_constraints ~ctx ~v:old_addition @ michv_typ_constraints ~ctx ~v:new_addition ) ); ] :: michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:new_sigma ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_map_update : Unequal_lengths" |> raise (* function sigma_constraint_of_map_update end *) let add_sigma_constraint_of_list_nil : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~lst ss -> add_constraints ~c:(sigma_constraint_of_list_nil ~ctx ~lst) ss (* function add_sigma_constraint_of_list_nil end *) let add_sigma_constraint_of_map_empty : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~map ss -> add_constraints ~c:(sigma_constraint_of_map_empty ~ctx ~map) ss (* function add_sigma_constraint_of_map_empty end *) let add_sigma_constraint_of_map_get : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~map ~key ss -> add_constraints ~c:(sigma_constraint_of_map_get ~ctx ~map ~key) ss (* function add_sigma_constraint_of_map_get end *) let add_sigma_constraint_of_list_cons : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> hd:Tz.mich_v Tz.cc -> tl:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = let open Tz in let open TzUtil in fun ~ctx ~lst ~hd ~tl ss -> if not (equal_mich_t (typ_of_val lst).cc_v (typ_of_val tl).cc_v) then SeError "add_sigma_constraint_of_list_cons : wrong type" |> raise else add_constraints ~c:(sigma_constraint_of_list_cons ~ctx ~lst ~hd ~tl) ss (* function add_sigma_constraint_of_list_cons end *) let add_sigma_constraint_of_map_update : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> value:Tz.mich_v Tz.cc -> updated_map:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = let open Tz in let open TzUtil in fun ~ctx ~map ~key ~value ~updated_map ss -> if not (equal_mich_t (typ_of_val map).cc_v (typ_of_val updated_map).cc_v) then SeError "add_sigma_constraint_of_map_update : wrong type" |> raise else add_constraints ~c:(sigma_constraint_of_map_update ~ctx ~map ~key ~value ~updated_map) ss (* function add_sigma_constraint_of_map_update end *) (******************************************************************************) (* Symbolic Run Instruction *) (******************************************************************************) let run_inst_initial_se_result : Tz.mich_t Tz.cc * Tz.mich_t Tz.cc * Tz.mich_i Tz.cc -> se_result * Tz.sym_state = let open Tz in let open TzUtil in fun (param_tcc, strg_tcc, code) -> sid_counter & sym_ctxt let scounter = 0 in let sctxt = [ scounter ] in let ctx = sctxt in mich_t cc values let cur_contract_tcc = MT_contract param_tcc |> gen_dummy_cc and addr_tcc = MT_address |> gen_dummy_cc and mutez_tcc = MT_mutez |> gen_dummy_cc and time_tcc = MT_timestamp |> gen_dummy_cc and paramstrg_tcc = MT_pair (param_tcc, strg_tcc) |> gen_dummy_cc in (* initial mich_cut_info *) let init_mci = { mci_loc = code.cc_loc; mci_cutcat = MCC_trx_entry } in (* beginning trx-image *) let param_v = MV_symbol (param_tcc, MSC_param) |> gen_dummy_cc in let beginning_ti : trx_image = { ti_contract = MV_symbol (cur_contract_tcc, MSC_contract) |> gen_dummy_cc; ti_source = MV_symbol (addr_tcc, MSC_source) |> gen_dummy_cc; ti_sender = MV_symbol (addr_tcc, MSC_sender) |> gen_dummy_cc; ti_param = param_v; ti_amount = MV_symbol (mutez_tcc, MSC_amount) |> gen_dummy_cc; ti_time = MV_symbol (time_tcc, MSC_time) |> gen_dummy_cc; } in (* beginning sym-image *) let beginning_si : sym_image = { si_mich = [ MV_symbol (paramstrg_tcc, MSC_mich_stack 0) |> gen_dummy_cc ]; si_dip = []; si_map_entry = []; si_map_exit = []; si_map_mapkey = []; si_iter = []; si_balance = MV_symbol (mutez_tcc, MSC_balance) |> gen_dummy_cc; si_bc_balance = MV_symbol (mutez_tcc, MSC_bc_balance) |> gen_dummy_cc; si_param = beginning_ti; } in blocking sym - image let blocking_si : sym_image = { beginning_si with si_balance = MV_add_mmm (beginning_si.si_balance, beginning_ti.ti_amount) |> gen_dummy_cc; si_bc_balance = MV_sub_mmm (beginning_si.si_bc_balance, beginning_ti.ti_amount) |> gen_dummy_cc; } in let initial_sym_state : sym_state = { ss_id = sctxt; ss_start_mci = init_mci; ss_block_mci = init_mci; ss_start_si = beginning_si; ss_block_si = blocking_si; ss_constraints = 1 . first stack 's CAR is parameter - value MF_eq ( gen_mich_v_ctx ~ctx beginning_ti.ti_param, gen_mich_v_ctx ~ctx (MV_car (List.hd_exn beginning_si.si_mich) |> gen_dummy_cc) ) 2 . If parameter value is mutez or nat , add constraints michv_typ_constraints ~ctx ~v:param_v @ [ 3 . Amount comes from Bc - Balance mtz_comes_from_constraint ~ctx ~mtz_v:beginning_ti.ti_amount ~from_v:beginning_si.si_bc_balance; ] 4 . amount & balance & bc_balance constraints amount_balance_mutez_constraints ~ctx ~amount_v:beginning_ti.ti_amount ~balance_v:beginning_si.si_balance ~bc_balance_v:beginning_si.si_bc_balance; } in let initial_se_result : se_result = { se_result_empty with sr_running = SSet.singleton initial_sym_state; sr_sid_counter = scounter + 1; } in (initial_se_result, initial_sym_state) (* function run_inst_initial_se_result end *) let rec run_inst : Tz.mich_i Tz.cc -> se_result -> se_result = fun inst sr -> SSet.fold sr.sr_running ~init:{ sr with sr_running = SSet.empty } ~f:(fun acc_sr ss -> se_result_pointwise_union (run_inst_i inst (acc_sr, ss)) acc_sr ) and run_inst_i : Tz.mich_i Tz.cc -> se_result * Tz.sym_state -> se_result = let open Tz in let open TzUtil in utilties : : blocked - mich - stack let get_bmstack : sym_state -> mich_v cc list = (fun ss -> ss.ss_block_si.si_mich) in let get_bmstack_1 : sym_state -> mich_v cc = fun ss -> match get_bmstack ss with | h :: _ -> h | _ -> failwith "get_bmstack_1 : unexpected" in let get_bmstack_2 : sym_state -> mich_v cc * mich_v cc = fun ss -> match get_bmstack ss with | h1 :: h2 :: _ -> (h1, h2) | _ -> failwith "get_bmstack_2 : unexpected" in let get_bmstack_3 : sym_state -> mich_v cc * mich_v cc * mich_v cc = fun ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: _ -> (h1, h2, h3) | _ -> failwith "get_bmstack_3 : unexpected" in let set_bmstack : sym_state -> mich_v cc list -> sym_state = fun ss st -> { ss with ss_block_si = { ss.ss_block_si with si_mich = st } } in let update_bmstack : f:(mich_v cc list -> mich_v cc list) -> sym_state -> sym_state = (fun ~f ss -> get_bmstack ss |> f |> set_bmstack ss) in let push_bmstack : v:mich_v cc -> sym_state -> sym_state = (fun ~v ss -> update_bmstack ~f:(List.cons v) ss) in let update_top_1_bmstack : f:(mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | hd :: tl -> f hd @ tl |> set_bmstack ss | _ -> failwith "update_top_1_bmstack : unexpected" in let update_top_2_bmstack : f:(mich_v cc * mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: tl -> f (h1, h2) @ tl |> set_bmstack ss | _ -> failwith "update_top_2_bmstack : unexpected" in let update_top_3_bmstack : f:(mich_v cc * mich_v cc * mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: tl -> f (h1, h2, h3) @ tl |> set_bmstack ss | _ -> failwith "update_top_e_bmstack : unexpected" in let set_bmstack_and_constraint : sym_state -> mich_v cc list -> mich_f list -> sym_state = fun ss st cs -> { ss with ss_block_si = { ss.ss_block_si with si_mich = st }; ss_constraints = cs; } in let update_top_1_bmstack_and_constraint : f:(mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | hd :: tl -> let (st, cs) = f hd in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_1_bmstack_and_constraint : unexpected" in let update_top_2_bmstack_and_constraint : f:(mich_v cc * mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: tl -> let (st, cs) = f (h1, h2) in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_2_bmstack_and_constraint : unexpected" in let update_top_3_bmstack_and_constraint : f:(mich_v cc * mich_v cc * mich_v cc - > mich_v cc list * mich_f list ) - > sym_state - > sym_state = fun ~f ss - > match get_bmstack ss with | h1 : : h2 : : h3 : : tl - > let ( st , cs ) = f ( h1 , h2 , h3 ) in set_bmstack_and_constraint ss ( ) ( cs @ ss.ss_constraints ) | _ - > failwith " update_top_3_bmstack_and_constraint : unexpected " in f:(mich_v cc * mich_v cc * mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: tl -> let (st, cs) = f (h1, h2, h3) in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_3_bmstack_and_constraint : unexpected" in *) (* utilities : sym_state <-> se_result *) let running_ss_to_sr : se_result -> sym_state -> se_result = (fun ctxt_sr ss -> { ctxt_sr with sr_running = SSet.singleton ss }) in (* utilities : context-se_result update *) let ctxt_sr_update : se_result -> se_result -> se_result = (* (fun ctxt_sr new_sr -> se_result_pointwise_union new_sr ctxt_sr) *) fun ctxt_sr new_sr -> { ctxt_sr with sr_entered_loops = MciSet.union ctxt_sr.sr_entered_loops new_sr.sr_entered_loops; sr_entered_lmbds = MciSet.union ctxt_sr.sr_entered_lmbds new_sr.sr_entered_lmbds; sr_sid_counter = max ctxt_sr.sr_sid_counter new_sr.sr_sid_counter; } in let ctxt_sr_sid_counter_incr : se_result -> se_result = fun ctxt_sr -> { ctxt_sr with sr_sid_counter = ctxt_sr.sr_sid_counter + 1 } in (* utilities : symbolic stack generator *) let generate_symstack : f:(int -> mich_sym_category) -> ctx:mich_sym_ctxt -> ccmaker:('a -> 'a cc) -> mich_v cc list -> mich_v cc list * mich_f list = fun ~f ~ctx ~ccmaker st -> let len = List.length st in let vl = List.mapi ~f:(fun i v -> let sc = f (len - i - 1) in MV_symbol (typ_of_val v, sc) |> ccmaker) st in let ctl = List.fold vl ~init:[] ~f:(fun accl v -> michv_typ_constraints ~ctx ~v @ accl ) in (vl, ctl) in utilities : extract paramter type from sym - state let param_typ_of_ss : sym_state -> mich_t cc = fun ss -> match (typ_of_val ss.ss_start_si.si_param.ti_contract).cc_v with | MT_contract t -> t | _ -> failwith "run_inst_i : param_typ_of_ss : unexpected" in (* FUNCTION BEGIN *) fun inst (ctxt_sr, ss) -> (* VERY VERY NAIVE OPTIMIZATION BEGIN - optimize only block_si.mich_stack's top value *) let ss = let ((*additional_constraints*) _, optimized_stack) = match ss.ss_block_si.si_mich with | [] -> ([], []) | h :: t -> let (cl, v) = opt_mvcc ~ctx:ss.ss_id h in (cl, v :: t) in { ss with ss_block_si = { ss.ss_block_si with si_mich = optimized_stack } (* ss_constraints = additional_constraints @ ss.ss_constraints; *); } in (* VERY VERY NAIVE OPTIMIZATION END *) (* let _ = (* DEBUG *) Utils.Log.debug (fun m -> m "Current MCI: %s\n\tCurrent SID: %d\n\tMichStack Length: %d\n\tStack: \n\t\t[%s]" (inst.cc_loc |> sexp_of_ccp_loc |> Sexp.to_string) ctxt_sr.sr_sid_counter (List.length ss.ss_block_si.si_mich) (List.map ss.ss_block_si.si_mich ~f:(fun v -> Tz.sexp_of_mich_v v.cc_v |> SexpUtil.to_string ) |> String.concat ~sep:"; " ) ) in *) let ctx = ss.ss_id in match inst.cc_v with | MI_seq (i1, i2) -> run_inst_i i1 (ctxt_sr, ss) |> run_inst i2 | MI_drop zn -> if Bigint.equal zn Bigint.zero then running_ss_to_sr ctxt_sr ss else update_bmstack ss ~f:(fun x -> List.split_n x (Bigint.to_int_exn zn) |> snd ) |> running_ss_to_sr ctxt_sr | MI_dup zn -> update_bmstack ss ~f:(fun x -> List.nth_exn x (Bigint.to_int_exn zn - 1) :: x ) |> running_ss_to_sr ctxt_sr | MI_swap -> update_bmstack ss ~f:(function | h1 :: h2 :: tl -> h2 :: h1 :: tl | _ -> failwith "run_inst_i : MI_swap : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_dig zn -> update_bmstack ss ~f:(fun x -> match List.split_n x (Bigint.to_int_exn zn) with | (hdlst, tlhd :: tltl) -> (tlhd :: hdlst) @ tltl | _ -> failwith "run_inst_i : MI_dig : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_dug zn -> update_bmstack ss ~f:(fun x -> match List.split_n x (Bigint.to_int_exn zn + 1) with | (hdhd :: hdtl, tl) -> hdtl @ (hdhd :: tl) | _ -> failwith "run_inst_i : MI_dug : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_push (_, v) -> push_bmstack ss ~v |> add_typ_constraints ~ctx ~v |> running_ss_to_sr ctxt_sr | MI_some -> update_top_1_bmstack ~f:(fun x -> [ MV_some x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_none t -> push_bmstack ss ~v:(MV_none t |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_unit -> push_bmstack ss ~v:(MV_unit |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_if_none (i1, i2) -> let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_none (gen_mich_v_ctx ~ctx cond_value) in (* then branch *) let then_br_sr : se_result = update_top_1_bmstack ~f:(fun _ -> []) ss |> add_constraints ~c:[ tb_cond_constraint ] |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in (* IMPORTANT: ctxt_sr name shadowing *) let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in (* else branch *) let else_br_sr : se_result = let unlifted_cond_value = MV_unlift_option cond_value |> gen_custom_cc inst in (* ctx for else-branch *) let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_none (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) |> add_constraints ~c:[ eb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_pair -> update_top_2_bmstack ~f:(fun (x, y) -> [ MV_pair (x, y) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_car -> update_top_1_bmstack_and_constraint ~f:(fun x -> let nv = MV_car x |> gen_custom_cc inst in ([ nv ], michv_typ_constraints ~ctx ~v:nv)) ss |> running_ss_to_sr ctxt_sr | MI_cdr -> update_top_1_bmstack_and_constraint ~f:(fun x -> let nv = MV_cdr x |> gen_custom_cc inst in ([ nv ], michv_typ_constraints ~ctx ~v:nv)) ss |> running_ss_to_sr ctxt_sr | MI_left t -> update_top_1_bmstack ~f:(fun h - > [ ( t , h ) | > gen_custom_cc inst ] ) ~f:(fun h -> let ty = MT_or (typ_of_val h, t) |> gen_custom_cc inst in [ MV_left (ty, h) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_right t -> update_top_1_bmstack ~f:(fun h - > [ ( t , h ) | > gen_custom_cc inst ] ) ~f:(fun h -> let ty = MT_or (t, typ_of_val h) |> gen_custom_cc inst in [ MV_right (ty, h) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_if_left (i1, i2) -> let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_left (gen_mich_v_ctx ~ctx cond_value) in (* then branch *) let then_br_sr : se_result = let unlifted_cond_value = MV_unlift_left cond_value |> gen_custom_cc inst in update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) ss |> add_constraints ~c:[ tb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in (* IMPORTANT: ctxt_sr name shadowing *) let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in (* else branch *) let else_br_sr : se_result = let unlifted_cond_value = MV_unlift_right cond_value |> gen_custom_cc inst in (* ctx for else-branch *) let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_left (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) |> add_constraints ~c:[ eb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_nil t -> let (lst : mich_v cc) = MV_nil t |> gen_custom_cc inst in push_bmstack ~v:lst ss |> add_sigma_constraint_of_list_nil ~ctx ~lst |> running_ss_to_sr ctxt_sr | MI_cons -> let ((hd : mich_v cc), (tl : mich_v cc)) = get_bmstack_2 ss in let (lst : mich_v cc) = MV_cons (hd, tl) |> gen_custom_cc inst in update_top_2_bmstack ~f:(fun _ -> [ lst ]) ss |> add_sigma_constraint_of_list_cons ~ctx ~lst ~hd ~tl |> running_ss_to_sr ctxt_sr | MI_if_cons (i1, i2) -> IF_CONS receives list - container only let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_cons (gen_mich_v_ctx ~ctx cond_value) in (* then branch *) let then_br_sr : se_result = let unlifted_cond_value_hd = MV_hd_l cond_value |> gen_custom_cc inst in let unlifted_cond_value_tl = MV_tl_l cond_value |> gen_custom_cc inst in update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value_hd; unlifted_cond_value_tl ]) ss |> add_constraints ~c:[ tb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value_hd |> add_sigma_constraint_of_list_cons ~ctx ~lst:cond_value ~hd:unlifted_cond_value_hd ~tl:unlifted_cond_value_tl It is important to update of else - branch symbolic - state |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in (* IMPORTANT: ctxt_sr name shadowing *) let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in (* else branch *) let else_br_sr : se_result = (* ctx for else-branch *) let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_cons (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> []) |> add_constraints ~c:[ eb_cond_constraint ] |> add_sigma_constraint_of_list_nil ~ctx ~lst:cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_size -> let size_gen mv : mich_v cc list * mich_f list = let mvcc = gen_custom_cc inst mv in ([ mvcc ], [ MF_nat_bound { ctx_i = ss.ss_id; ctx_v = mvcc } ]) in update_top_1_bmstack_and_constraint ~f:(fun h -> match (typ_of_val h).cc_v with | MT_set _ -> MV_size_s h |> size_gen | MT_map _ -> MV_size_m h |> size_gen | MT_list _ -> MV_size_l h |> size_gen | MT_string -> MV_size_str h |> size_gen | MT_bytes -> MV_size_b h |> size_gen | _ -> failwith "run_inst_i : MI_size : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_empty_set t -> push_bmstack ~v:(MV_empty_set t |> gen_custom_cc inst) ss |> running_ss_to_sr ctxt_sr | MI_empty_map (t1, t2) -> let (map : mich_v cc) = MV_empty_map (t1, t2) |> gen_custom_cc inst in push_bmstack ~v:map ss |> add_sigma_constraint_of_map_empty ~ctx ~map |> running_ss_to_sr ctxt_sr | MI_empty_big_map (t1, t2) -> let (map : mich_v cc) = MV_empty_big_map (t1, t2) |> gen_custom_cc inst in push_bmstack ~v:map ss |> add_sigma_constraint_of_map_empty ~ctx ~map |> running_ss_to_sr ctxt_sr | MI_map i -> let (outer_cutcat, inner_cutcat) = (MCC_ln_map, MCC_lb_map) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let container_v = get_bmstack_1 ss in let container_t : mich_t cc = typ_of_val container_v in let elem_t : mich_t cc = match container_t.cc_v with | MT_list e -> e | MT_map (kt, vt) -> MT_pair (kt, vt) |> gen_custom_cc container_v | _ -> failwith "run_inst_i : MI_map : elem_t" in let rest_stack = List.tl_exn (get_bmstack ss) in let out_elem_t = List.hd_exn (Te.typ_run_inst ~param_t:(param_typ_of_ss ss) i (elem_t :: List.map ~f:typ_of_val rest_stack) ) in let out_container_t : mich_t cc = match container_t.cc_v with | MT_list _ -> MT_list out_elem_t |> gen_custom_cc inst | MT_map (kt, _) -> MT_map (kt, out_elem_t) |> gen_custom_cc inst | _ -> failwith "run_inst_i : MI_map : out_container_t" in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this MAP - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside MAP instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let bsi = blocked_state.ss_block_si in let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let tb_container_v = MV_symbol ( container_t, MSC_map_entry_stack (List.length bsi.si_map_entry) ) |> gen_custom_cc inst in let tb_out_container_v = MV_symbol ( out_container_t, MSC_map_exit_stack (List.length bsi.si_map_exit) ) |> gen_custom_cc inst in let ctx = tb_ss_id in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let (mapkey_v, mapkey_ct) : mich_v cc list * mich_f list = match container_t.cc_v with | MT_map (kt, _) -> let v = MV_symbol ( kt, MSC_map_mapkey_stack (List.length bsi.si_map_mapkey) ) |> gen_custom_cc inst in ([ v ], michv_typ_constraints ~ctx ~v) | _ -> ([], [ MF_true ]) in ( { si_mich = michst; si_dip = dipst; si_map_entry = tb_container_v :: mapentryst; si_map_exit = tb_out_container_v :: mapexitst; si_map_mapkey = mapkey_v @ mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ mapkey_ct @ constraints_abp ) in let (tb_block_si, tb_block_constraints) : sym_image * mich_f list = let elem_v = MV_symbol (elem_t, MSC_mich_stack (List.length tb_entry_si.si_mich)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in let (tb_container_blocksi_v, tb_container_blocksi_ct) : mich_v cc * mich_f list = match container_t.cc_v with | MT_map (_, mapelem_t) -> let (key : mich_v cc) = List.hd_exn tb_entry_si.si_map_mapkey in let (value_unopt : mich_v cc) = MV_cdr elem_v |> gen_custom_cc inst in let (updated_map : mich_v cc) = MV_update_xomm ( key, MV_none mapelem_t |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in let (get : mich_v cc) = MV_get_xmoy (key, tb_container_v) |> gen_custom_cc inst in let (get_unopt : mich_v cc) = MV_unlift_option get |> gen_custom_cc inst in let (mem : mich_v cc) = MV_mem_xmb (key, tb_container_v) |> gen_custom_cc inst in ( updated_map, MF_eq ( gen_mich_v_ctx ~ctx get_unopt, gen_mich_v_ctx ~ctx value_unopt ) :: MF_is_true (gen_mich_v_ctx ~ctx mem) :: michv_typ_constraints ~ctx ~v:key @ sigma_constraint_of_map_update ~ctx ~map:tb_container_v ~key ~value:(MV_none mapelem_t |> gen_custom_cc inst) ~updated_map ) | MT_list _ -> let (hd : mich_v cc) = MV_hd_l tb_container_v |> gen_custom_cc inst in let (tl : mich_v cc) = MV_tl_l tb_container_v |> gen_custom_cc inst in ( tl, MF_eq (gen_mich_v_ctx ~ctx hd, gen_mich_v_ctx ~ctx elem_v) :: MF_is_cons (gen_mich_v_ctx ~ctx tb_container_v) :: sigma_constraint_of_list_cons ~ctx ~lst:tb_container_v ~hd ~tl ) | _ -> failwith "run_inst_i : MI_map : tb_container_blocksi_v" in ( { tb_entry_si with si_mich = elem_v :: tb_entry_si.si_mich; si_map_entry = tb_container_blocksi_v :: List.tl_exn tb_entry_si.si_map_entry; }, elem_ct @ tb_container_blocksi_ct ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_block_si; ss_constraints = tb_entry_constraints @ tb_block_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states let tb_exit_container_block_v : sym_image -> mich_v cc = fun { si_mich; si_map_exit; si_map_mapkey; _ } -> let ec = List.hd_exn si_map_exit in let ev = List.hd_exn si_mich in match (typ_of_val ec).cc_v with | MT_map _ -> MV_update_xomm (List.hd_exn si_map_mapkey, MV_some ev |> gen_custom_cc inst, ec) |> gen_custom_cc inst | MT_list _ -> MV_cons (ev, ec) |> gen_custom_cc inst | _ -> failwith "run_inst_i : MI_map : tb_exit_container_block_v" in { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> let (exit_container : mich_v cc) = tb_exit_container_block_v rss.ss_block_si in let (container_constraints : mich_f list) = michv_typ_constraints ~ctx ~v:exit_container in { rss with ss_block_si = { rss.ss_block_si with si_mich = List.tl_exn rss.ss_block_si.si_mich; si_map_exit = exit_container :: List.tl_exn rss.ss_block_si.si_map_exit; }; ss_block_mci = thenbr_mci; ss_constraints = container_constraints @ rss.ss_constraints; } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let ctx = eb_ss_id in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let eb_out_container_v = MV_symbol (out_container_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ michv_typ_constraints ~ctx ~v:eb_out_container_v @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = eb_out_container_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in (* RETURN *) { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_iter i -> (* refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. *) let (outer_cutcat, inner_cutcat) = (MCC_ln_iter, MCC_lb_iter) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let container_v = get_bmstack_1 ss in let container_t : mich_t cc = typ_of_val container_v in let elem_t : mich_t cc = match container_t.cc_v with | MT_list e -> e | MT_set e -> e | MT_map (kt, vt) -> MT_pair (kt, vt) |> gen_custom_cc container_v | _ -> failwith "run_inst_i : MI_map : elem_t" in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this ITER - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside ITER instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let bsi = blocked_state.ss_block_si in let tb_container_v = MV_symbol (container_t, MSC_iter_stack (List.length bsi.si_map_entry)) |> gen_custom_cc inst in let ctx = tb_ss_id in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = ( elem_t , MSC_mich_stack ( ) ) | > gen_custom_cc inst in MV_symbol (elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in *) let = michv_typ_constraints ~v : elem_v in (* let mapkey_ct : mich_f list = (* Precondition : container's type is map *) match container_t.cc_v with | MT_map _ -> let mapkey = MV_car elem_v |> gen_custom_cc inst in [ 1 . key is not the key of the container MF_not (MF_is_true (MV_mem_xmb (mapkey, tb_container_v) |> gen_dummy_cc |> gen_mich_v_ctx ~ctx ) ); ] | _ -> [] in *) ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = tb_container_v :: iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ (* @ mapkey_ct *) @ constraints_abp ) in let (tb_block_si, tb_block_constraints) : sym_image * mich_f list = let elem_v = MV_symbol (elem_t, MSC_mich_stack (List.length tb_entry_si.si_mich)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in let (tb_container_blocksi_v, tb_container_blocksi_ct) : mich_v cc * mich_f list = match container_t.cc_v with | MT_map (_, mapelem_t) -> let (key : mich_v cc) = MV_car elem_v |> gen_custom_cc inst in let (value_unopt : mich_v cc) = MV_cdr elem_v |> gen_custom_cc inst in let (updated_map : mich_v cc) = MV_update_xomm ( key, MV_none mapelem_t |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in let (get : mich_v cc) = MV_get_xmoy (key, tb_container_v) |> gen_custom_cc inst in let (mem : mich_v cc) = MV_mem_xmb (key, tb_container_v) |> gen_custom_cc inst in ( updated_map, MF_eq ( gen_mich_v_ctx ~ctx (MV_unlift_option get |> gen_custom_cc inst), gen_mich_v_ctx ~ctx value_unopt ) :: MF_is_true (gen_mich_v_ctx ~ctx mem) :: MF_not (MF_is_none (gen_mich_v_ctx ~ctx get)) :: michv_typ_constraints ~ctx ~v:key @ sigma_constraint_of_map_update ~ctx ~map:tb_container_v ~key ~value:(MV_none mapelem_t |> gen_custom_cc inst) ~updated_map ) | MT_set _ -> let (mem : mich_v cc) = MV_mem_xsb (elem_v, tb_container_v) |> gen_custom_cc inst in let (updated_set : mich_v cc) = MV_update_xbss ( elem_v, MV_lit_bool true |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in (updated_set, [ MF_is_true (gen_mich_v_ctx ~ctx mem) ]) | MT_list _ -> let (hd : mich_v cc) = MV_hd_l tb_container_v |> gen_custom_cc inst in let (tl : mich_v cc) = MV_tl_l tb_container_v |> gen_custom_cc inst in ( tl, MF_eq (gen_mich_v_ctx ~ctx hd, gen_mich_v_ctx ~ctx elem_v) :: MF_is_cons (gen_mich_v_ctx ~ctx tb_container_v) :: sigma_constraint_of_list_cons ~ctx ~lst:tb_container_v ~hd ~tl ) | _ -> failwith "run_inst_i : MI_iter : tb_container_blocksi_v" in ( { tb_entry_si with si_mich = elem_v :: tb_entry_si.si_mich; si_iter = tb_container_blocksi_v :: List.tl_exn tb_entry_si.si_iter; }, elem_ct @ tb_container_blocksi_ct ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_block_si; ss_constraints = tb_entry_constraints @ tb_block_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let ctx = eb_ss_id in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in (* RETURN *) { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_mem -> update_top_2_bmstack ~f:(fun (h, h2) -> let nv = match (typ_of_val h2).cc_v with | MT_set _ -> MV_mem_xsb (h, h2) | MT_map _ -> MV_mem_xmb (h, h2) | MT_big_map _ -> MV_mem_xbmb (h, h2) | _ -> failwith "run_inst_i : MI_mem : unexpected" in [ gen_custom_cc inst nv ]) ss |> running_ss_to_sr ctxt_sr | MI_get -> let ((key : mich_v cc), (cont : mich_v cc)) = get_bmstack_2 ss in update_top_2_bmstack_and_constraint ~f:(fun _ -> match (typ_of_val cont).cc_v with | MT_map _ -> let nv = MV_get_xmoy (key, cont) |> gen_custom_cc inst in ([ nv ], []) | MT_big_map _ -> let nv = MV_get_xbmo (key, cont) |> gen_custom_cc inst in ([ nv ], []) | _ -> failwith "run_inst_i : MI_get : unexpected") ss |> add_sigma_constraint_of_map_get ~ctx ~map:cont ~key |> running_ss_to_sr ctxt_sr | MI_update -> let ((key : mich_v cc), (value : mich_v cc), (cont : mich_v cc)) = get_bmstack_3 ss in let (updated_cont : mich_v cc) = (match (typ_of_val cont).cc_v with | MT_set _ -> MV_update_xbss (key, value, cont) | MT_map _ -> MV_update_xomm (key, value, cont) | MT_big_map _ -> MV_update_xobmbm (key, value, cont) | _ -> failwith "run_inst_i : MI_update : unexpected") |> gen_custom_cc inst in update_top_3_bmstack ~f:(fun _ -> [ updated_cont ]) ss |> (match (typ_of_val cont).cc_v with | MT_set _ -> Fun.id | MT_map _ -> add_sigma_constraint_of_map_update ~ctx ~map:cont ~key ~value ~updated_map:updated_cont | MT_big_map _ -> add_sigma_constraint_of_map_update ~ctx ~map:cont ~key ~value ~updated_map:updated_cont | _ -> failwith "run_inst_i : MI_update : unexpected") |> running_ss_to_sr ctxt_sr | MI_if (i1, i2) -> let cond_value : mich_v cc = List.hd_exn (get_bmstack ss) in let tb_cond_constraint : mich_f = MF_is_true (cond_value |> gen_mich_v_ctx ~ctx) in (* then branch *) let then_br_sr : se_result = update_top_1_bmstack ~f:(fun _ -> []) ss |> add_constraints ~c:[ tb_cond_constraint ] |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in (* IMPORTANT: ctxt_sr name shadowing *) let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in (* else branch *) let else_br_sr : se_result = (* ctx for else-branch *) let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_true (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> []) |> add_constraints ~c:[ eb_cond_constraint ] in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_loop i -> (* refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. *) let (outer_cutcat, inner_cutcat) = (MCC_ln_loop, MCC_lb_loop) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this LOOP - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside LOOP instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = tb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_entry_si; ss_constraints = tb_entry_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct LOOP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = eb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in (* RETURN *) { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_loop_left i -> (* refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. *) let (outer_cutcat, inner_cutcat) = (MCC_ln_loopleft, MCC_lb_loopleft) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let branch_v = get_bmstack_1 ss in let branch_t : mich_t cc = typ_of_val branch_v in let (left_elem_t, right_elem_t) : mich_t cc * mich_t cc = match branch_t.cc_v with | MT_or (t1, t2) -> (t1, t2) | _ -> failwith "run_inst_i : MI_loop_left : left_elem_t, right_elem_t" in Convert LOOP_LEFT to LOOP LOOP_LEFT { BODY } = = = PUSH bool True ; LOOP { IF_LEFT { BODY ; PUSH bool True } { RIGHT left_elem_t ; PUSH bool False } } ; IF_LEFT { PUSH bool False ; FAILWITH } { } LOOP_LEFT {BODY} === PUSH bool True; LOOP { IF_LEFT {BODY; PUSH bool True} {RIGHT left_elem_t; PUSH bool False} }; IF_LEFT {PUSH bool False; FAILWITH} { } *) let _ = ignore (blocked_mci, thenbr_mci, elsebr_mci, right_elem_t) in let gcc_inst : mich_i -> mich_i cc = gen_custom_cc inst in let gcc_inst_t : mich_t -> mich_t cc = gen_custom_cc inst in let gcc_inst_v : mich_v -> mich_v cc = gen_custom_cc inst in let typ_bool : mich_t cc = gcc_inst_t MT_bool in let push_bool_inst b = gcc_inst (MI_push (typ_bool, gcc_inst_v (MV_lit_bool b))) in let loop_inst : mich_i cc = gcc_inst (let body_true_seq : mich_i cc = gcc_inst (MI_seq (i, push_bool_inst true)) in let right_false_seq : mich_i cc = gcc_inst (MI_seq (gcc_inst (MI_right left_elem_t), push_bool_inst false)) in let if_left_inst : mich_i cc = gcc_inst (MI_if_left (body_true_seq, right_false_seq)) in MI_loop if_left_inst ) in let last_if_left_inst : mich_i cc = gcc_inst (MI_if_left ( gcc_inst (MI_seq (push_bool_inst false, gcc_inst MI_failwith)), gcc_inst (MI_drop (Bigint.of_int 0)) ) ) in let new_inst = gcc_inst (MI_seq ( push_bool_inst true, gcc_inst (MI_seq (loop_inst, last_if_left_inst)) ) ) in run_inst_i new_inst (ctxt_sr, ss) ( * 1 . Construct blocked - state (* 1. Construct blocked-state *) let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this LOOP_LEFT - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside LOOP_LEFT instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = tb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = MV_symbol (left_elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in ( { si_mich = elem_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ elem_ct @ constraints_abp ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_entry_si; ss_constraints = tb_entry_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = eb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = MV_symbol (right_elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in ( { si_mich = elem_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ elem_ct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in (* RETURN *) { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) *) | MI_lambda (t1, t2, i) -> push_bmstack ~v:(MV_lit_lambda (t1, t2, i) |> gen_custom_cc inst) ss |> running_ss_to_sr ctxt_sr | MI_exec -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let v = MV_exec (h1, h2) |> gen_custom_cc inst in ([ v ], michv_typ_constraints ~ctx ~v)) ss |> running_ss_to_sr ctxt_sr | MI_dip_n (zn, i) -> let n : int = Bigint.to_int_exn zn in let dipped_ss = let (dip_elems, new_mich) = List.split_n ss.ss_block_si.si_mich n in let new_dip = List.rev_append dip_elems ss.ss_block_si.si_dip in { ss with ss_block_si = { ss.ss_block_si with si_mich = new_mich; si_dip = new_dip }; } in let sr_i = run_inst_i i (ctxt_sr, dipped_ss) in let undip d_ss = let (mich_elems, new_dip) = List.split_n d_ss.ss_block_si.si_dip n in let new_mich = List.rev_append mich_elems d_ss.ss_block_si.si_mich in { d_ss with ss_block_si = { d_ss.ss_block_si with si_mich = new_mich; si_dip = new_dip }; } in { sr_i with sr_running = SSet.map sr_i.sr_running ~f:undip } | MI_failwith -> 1 . set block_mci 2 . enroll this sym_state to sr_terminated 2. enroll this sym_state to sr_terminated *) let bmci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_trx_exit } in { se_result_empty with sr_terminated = SSet.singleton { ss with ss_block_mci = bmci }; } | MI_cast t -> update_top_1_bmstack ~f:(fun x -> if equal_mich_t (typ_of_val x).cc_v t.cc_v then [ x ] else SeError "Not Supported Cast" |> raise) ss |> running_ss_to_sr ctxt_sr | MI_rename -> update_top_1_bmstack ~f:(fun x -> [ { x with cc_anl = inst.cc_anl } ]) ss |> running_ss_to_sr ctxt_sr | MI_concat -> ( let h = get_bmstack_1 ss in match (typ_of_val h).cc_v with | MT_string -> update_top_2_bmstack ~f:(fun (h1, h2) -> [ MV_concat_sss (h1, h2) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_bytes -> update_top_2_bmstack ~f:(fun (h1, h2) -> [ MV_concat_bbb (h1, h2) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_list { cc_v = MT_string; _ } -> update_top_1_bmstack ~f:(fun _ -> [ MV_concat_list_s h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_list { cc_v = MT_bytes; _ } -> update_top_1_bmstack ~f:(fun _ -> [ MV_concat_list_b h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | _ -> failwith "run_inst_i : MI_concat : unexpected" ) | MI_slice -> update_top_3_bmstack ~f:(fun (h1, h2, h3) -> match (typ_of_val h3).cc_v with | MT_string -> [ MV_slice_nnso (h1, h2, h3) |> gen_custom_cc inst ] | MT_bytes -> [ MV_slice_nnbo (h1, h2, h3) |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_slice : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_pack -> update_top_1_bmstack ~f:(fun h -> [ MV_pack h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_unpack t -> update_top_1_bmstack_and_constraint ~f:(fun h -> let mvcc = MV_unpack (t, h) |> gen_custom_cc inst in ([ mvcc ], michv_typ_constraints ~ctx ~v:mvcc)) ss |> running_ss_to_sr ctxt_sr | MI_add -> ( let add_gen_sr : mich_v * mich_f list -> se_result = fun (mv, csl) -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], csl)) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_int) -> add_gen_sr (MV_add_nii (h, h2), []) | (MT_int, MT_nat) -> add_gen_sr (MV_add_ini (h, h2), []) | (MT_int, MT_int) -> add_gen_sr (MV_add_iii (h, h2), []) | (MT_nat, MT_nat) -> add_gen_sr ( MV_add_nnn (h, h2), [ MF_nat_bound (MV_add_nnn (h, h2) |> gen_custom_cc inst |> gen_mich_v_ctx ~ctx); ] ) | (MT_timestamp, MT_int) -> add_gen_sr (MV_add_tit (h, h2), []) | (MT_int, MT_timestamp) -> add_gen_sr (MV_add_itt (h, h2), []) | (MT_mutez, MT_mutez) -> let nv = MV_add_mmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_add_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_add : unexpected" ) | MI_sub -> ( let sub_gen_sr : mich_v -> se_result = fun mv -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], [])) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> MV_sub_nni (h, h2) |> sub_gen_sr | (MT_nat, MT_int) -> MV_sub_nii (h, h2) |> sub_gen_sr | (MT_int, MT_nat) -> MV_sub_ini (h, h2) |> sub_gen_sr | (MT_int, MT_int) -> MV_sub_iii (h, h2) |> sub_gen_sr | (MT_timestamp, MT_timestamp) -> MV_sub_tti (h, h2) |> sub_gen_sr | (MT_timestamp, MT_int) -> MV_sub_tit (h, h2) |> sub_gen_sr | (MT_mutez, MT_mutez) -> let nv = MV_sub_mmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_sub_no_underflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_sub : unexpected" ) | MI_mul -> ( let mul_gen_sr : mich_v * mich_f list -> se_result = fun (mv, csl) -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], csl)) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> mul_gen_sr ( MV_mul_nnn (h, h2), [ MF_mutez_bound (MV_mul_nnn (h, h2) |> gen_custom_cc inst |> gen_mich_v_ctx ~ctx); ] ) | (MT_nat, MT_int) -> mul_gen_sr (MV_mul_nii (h, h2), []) | (MT_int, MT_nat) -> mul_gen_sr (MV_mul_ini (h, h2), []) | (MT_int, MT_int) -> mul_gen_sr (MV_mul_iii (h, h2), []) | (MT_mutez, MT_nat) -> let nv = MV_mul_mnm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_mul_mnm_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | (MT_nat, MT_mutez) -> let nv = MV_mul_nmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_mul_nmm_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_mul : unexpected" ) | MI_ediv -> ( let ediv_gen_sr : mich_v -> se_result = fun mv -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], [])) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> MV_ediv_nnnn (h, h2) |> ediv_gen_sr | (MT_nat, MT_int) -> MV_ediv_niin (h, h2) |> ediv_gen_sr | (MT_int, MT_nat) -> MV_ediv_inin (h, h2) |> ediv_gen_sr | (MT_int, MT_int) -> MV_ediv_iiin (h, h2) |> ediv_gen_sr | (MT_mutez, MT_nat) -> MV_ediv_mnmm (h, h2) |> ediv_gen_sr | (MT_mutez, MT_mutez) -> MV_ediv_mmnm (h, h2) |> ediv_gen_sr | _ -> failwith "run_inst_i : MI_ediv : unexpected" ) | MI_abs -> update_top_1_bmstack_and_constraint ~f:(fun h -> let mvcc = MV_abs_in h |> gen_custom_cc inst in ([ mvcc ], [ MF_nat_bound { ctx_i = ctx; ctx_v = mvcc } ])) ss |> running_ss_to_sr ctxt_sr | MI_isnat -> update_top_1_bmstack ~f:(fun x -> [ MV_isnat x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_int -> (* NOTE: no additional nat-num constraints here *) update_top_1_bmstack ~f:(fun x -> [ MV_int_of_nat x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_neg -> update_top_1_bmstack ~f:(fun h -> match (typ_of_val h).cc_v with | MT_nat -> [ MV_neg_ni h |> gen_custom_cc inst ] | MT_int -> [ MV_neg_ii h |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_neg : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_lsl -> let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_shiftleft_safe }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let nv = MV_shiftL_nnn (h1, h2) |> gen_custom_cc inst in ( [ nv ], [ MF_shiftL_nnn_rhs_in_256 ({ ctx_i = ctx; ctx_v = h1 }, { ctx_i = ctx; ctx_v = h2 }); ] )) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | MI_lsr -> let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_shiftright_safe }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let nv = MV_shiftR_nnn (h1, h2) |> gen_custom_cc inst in ( [ nv ], [ MF_shiftR_nnn_rhs_in_256 ({ ctx_i = ctx; ctx_v = h1 }, { ctx_i = ctx; ctx_v = h2 }); ] )) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | MI_or -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_or_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_or_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_or : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_and -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_and_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_and_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | (MT_int, MT_nat) -> let nv = MV_and_inn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_and : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_xor -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_xor_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_xor_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_xor : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_not -> update_top_1_bmstack ~f:(fun h -> match (typ_of_val h).cc_v with | MT_bool -> [ MV_not_bb h |> gen_custom_cc inst ] | MT_nat -> [ MV_not_ni h |> gen_custom_cc inst ] | MT_int -> [ MV_not_ii h |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_not : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_compare -> update_top_2_bmstack ~f:(fun (x, y) -> [ MV_compare (x, y) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_eq -> update_top_1_bmstack ~f:(fun x -> [ MV_eq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_neq -> update_top_1_bmstack ~f:(fun x -> [ MV_neq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_lt -> update_top_1_bmstack ~f:(fun x -> [ MV_lt_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_gt -> update_top_1_bmstack ~f:(fun x -> [ MV_gt_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_le -> update_top_1_bmstack ~f:(fun x -> [ MV_leq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_ge -> update_top_1_bmstack ~f:(fun x -> [ MV_geq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_self -> push_bmstack ~v:ss.ss_block_si.si_param.ti_contract ss |> running_ss_to_sr ctxt_sr | MI_contract t -> update_top_1_bmstack ~f:(fun x -> [ MV_contract_of_address (t, x) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_transfer_tokens -> update_top_3_bmstack ~f:(fun (x, y, z) -> [ MV_transfer_tokens (x, y, z) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_set_delegate -> update_top_1_bmstack ~f:(fun x -> [ MV_set_delegate x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr (* | MI_create_account -> TODO *) | MI_create_contract (t1, t2, i) -> let (lambda : mich_v cc) = let (t_op_list : mich_t cc) = MT_list (gen_custom_cc inst MT_operation) |> gen_custom_cc inst in let (t_input : mich_t cc) = MT_pair (t1, t2) |> gen_custom_cc inst in let (t_output : mich_t cc) = MT_pair (t_op_list, t2) |> gen_custom_cc inst in MV_lit_lambda (t_input, t_output, i) |> gen_custom_cc inst in let (addr : mich_v cc) = MV_symbol (MT_address |> gen_custom_cc inst, MSC_new_contract) |> gen_custom_cc inst in update_top_3_bmstack ~f:(fun (kh_opt, z, s) -> [ MV_create_contract (t1, t2, lambda, kh_opt, z, s, addr) |> gen_custom_cc inst; addr; ]) ss |> running_ss_to_sr ctxt_sr | MI_implicit_account -> update_top_1_bmstack ~f:(fun h -> [ MV_implicit_account h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_now -> push_bmstack ~v:ss.ss_block_si.si_param.ti_time ss |> running_ss_to_sr ctxt_sr | MI_amount -> let amount_v = ss.ss_block_si.si_param.ti_amount in push_bmstack ss ~v:amount_v |> add_typ_constraints ~ctx ~v:amount_v |> running_ss_to_sr ctxt_sr | MI_balance -> let balance_v = ss.ss_block_si.si_balance in push_bmstack ss ~v:balance_v |> add_typ_constraints ~ctx ~v:balance_v |> running_ss_to_sr ctxt_sr | MI_check_signature -> update_top_3_bmstack ~f:(fun (h1, h2, h3) -> [ MV_check_signature (h1, h2, h3) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_blake2b -> update_top_1_bmstack ~f:(fun h -> [ MV_blake2b h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_sha256 -> update_top_1_bmstack ~f:(fun h -> [ MV_sha256 h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_sha512 -> update_top_1_bmstack ~f:(fun h -> [ MV_sha512 h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_hash_key -> update_top_1_bmstack ~f:(fun h -> [ MV_hash_key h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr (* | MI_steps_to_quota -> TODO *) | MI_source -> let source_v = ss.ss_block_si.si_param.ti_source in push_bmstack ss ~v:source_v |> running_ss_to_sr ctxt_sr | MI_sender -> let sender_v = ss.ss_block_si.si_param.ti_sender in push_bmstack ss ~v:sender_v |> running_ss_to_sr ctxt_sr | MI_address -> update_top_1_bmstack ~f:(fun h -> [ MV_address_of_contract h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_chain_id -> push_bmstack ss ~v:(MV_lit_chain_id "TzChain" |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_unpair -> (* let _ = (* DEBUG *) print_endline "unpair enter" in *) update_top_1_bmstack_and_constraint ~f:(fun x -> let (a_fl, a_v) = MV_car x |> gen_custom_cc inst |> TzUtil.opt_mvcc ~ctx in let (d_fl, d_v) = MV_cdr x |> gen_custom_cc inst |> TzUtil.opt_mvcc ~ctx in ([ a_v; d_v ], a_fl @ d_fl)) ss |> running_ss_to_sr ctxt_sr | MI_micse_check i -> dealing with micse - check - bring running states from the result of " i " , and convert them to queries . - se_result might be go wrong when any loop - like instructions ( LOOP , LOOP_LEFT , ITER , MAP ) are inserted in micse - check instruction . - bring running states from the result of "i", and convert them to queries. - se_result might be go wrong when any loop-like instructions (LOOP, LOOP_LEFT, ITER, MAP) are inserted in micse-check instruction. *) let micse_check_se_result : se_result = run_inst_i i (ctxt_sr, ss) in (* IMPORTANT: ctxt_sr name shadowing *) let ctxt_sr = ctxt_sr_update ctxt_sr micse_check_se_result in { ctxt_sr with sr_running = SSet.singleton ss; If MI_micse_check allows loop - like instructions , it should be considered to add blocked - states in micse_check_se_result in return value . considered to add blocked-states in micse_check_se_result in return value. *) sr_blocked = SSet.empty; sr_queries = SSet.map micse_check_se_result.sr_running ~f:(fun rs -> { rs with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_assertion }; } ); sr_terminated = SSet.empty; } | _ -> failwith ("run_inst_i : wildcard match triggered : " ^ (sexp_of_mich_i inst.cc_v |> SexpUtil.tz_cc_sexp_form |> Sexp.to_string) ) (* function run_inst_i end *) let run_inst_entry : Tz.mich_t Tz.cc * Tz.mich_t Tz.cc * Tz.mich_i Tz.cc -> se_result * Tz.sym_state = let open Tz in let open TzUtil in fun (pt, st, c) -> let final_blocking : sym_state -> sym_state = fun ss -> let ctx = ss.ss_id in let (op_mtz_fl, op_mtz_v) : mich_f list * mich_v cc = MV_car (List.hd_exn ss.ss_block_si.si_mich) |> gen_custom_cc c |> TzUtil.opt_mvcc ~ctx |> fun (fl, mvcc) -> let ((lst_fl : mich_f list), (lst : mich_v cc list), (tl : mich_v cc)) = v_of_list ~ctx mvcc in let ((mtz_fl : mich_f list), (mtz_vl : mich_v cc option list)) = List.fold lst ~init:([], []) ~f:(fun (mtz_fl, mtz_vl) opv -> let ((fl : mich_f list), (v_opt : mich_v cc option)) = mtz_of_op ~ctx opv in (fl @ mtz_fl, v_opt :: mtz_vl) ) in let (mtz_v : mich_v cc) = List.fold mtz_vl ~init:(MV_mtz_of_op_list tl |> gen_custom_cc c) ~f:(fun acc vopt -> if Option.is_some vopt then MV_add_mmm (acc, Option.value_exn vopt) |> gen_custom_cc acc else acc) in (lst_fl @ mtz_fl @ fl, mtz_v) in let new_balance : mich_v cc = MV_sub_mmm (ss.ss_block_si.si_balance, op_mtz_v) |> gen_custom_cc c in let new_bc_balance : mich_v cc = MV_add_mmm (ss.ss_block_si.si_bc_balance, op_mtz_v) |> gen_custom_cc c in { (* If operation-mutez-subtraction policy turned on, we need to add balance-related constraint here. *) ss with ss_block_mci = { mci_loc = c.cc_loc; mci_cutcat = MCC_trx_exit }; ss_block_si = { ss.ss_block_si with si_balance = new_balance; si_bc_balance = new_bc_balance; }; ss_constraints = MF_and op_mtz_fl :: mtz_comes_from_constraint ~ctx ~mtz_v:op_mtz_v ~from_v:ss.ss_block_si.si_balance :: amount_balance_mutez_constraints ~ctx ~amount_v:op_mtz_v ~balance_v:new_balance ~bc_balance_v:new_bc_balance @ ss.ss_constraints; } in let (initial_sr, initial_ss) = run_inst_initial_se_result (pt, st, c) in let result_raw = run_inst c initial_sr in (* let _ = (* DEBUG *) print_endline ("result_raw running = " ^ (SSet.length result_raw.sr_running |> string_of_int) ^ ", blocked = " ^ (SSet.length result_raw.sr_blocked |> string_of_int) ) in *) let result = { result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map result_raw.sr_running ~f:final_blocking) result_raw.sr_blocked; } in let ss_constraint_optimization : sym_state -> sym_state = fun ss -> { ss with ss_constraints = ss.ss_constraints |> List.map ~f:opt_mf |> List.stable_dedup; } in let result_constraint_optimized = { result with sr_blocked = SSet.map result.sr_blocked ~f:ss_constraint_optimization; sr_queries = SSet.map result.sr_queries ~f:ss_constraint_optimization |> SSet.filter ~f:(fun ss -> if Option.is_none !Utils.Argument.query_pick then true else ( let ((picked_lin : int), (picked_col : int)) = Option.value_exn !Utils.Argument.query_pick in match ss.ss_block_mci.mci_loc with | CCLOC_Pos (p1, _) when p1.lin = picked_lin && p1.col = picked_col -> true | _ -> false ) ); } in (* let _ = (* DEBUG *) let increase_depth_str : int -> string -> string = fun d s -> let (tab : string) = String.make d '\t' in tab ^ String.substr_replace_all s ~pattern:"\n" ~with_:("\n" ^ tab) in let string_of_mf : mich_f -> string = (fun mf -> sexp_of_mich_f mf |> SexpUtil.to_string) in let string_of_mflst : mich_f list -> string list = (fun mfl -> List.map mfl ~f:string_of_mf) in let string_of_mvcc : mich_v cc -> string = (fun mvcc -> sexp_of_mich_v mvcc.cc_v |> SexpUtil.to_string) in let string_of_mvcclst : mich_v cc list -> string list = (fun mvl -> List.map mvl ~f:string_of_mvcc) in let string_of_sid : sym_state_id -> string = fun sid -> Printf.sprintf "[%s]" (List.map sid ~f:string_of_int |> String.concat ~sep:"; ") in let string_of_mci : mich_cut_info -> string = (fun mci -> sexp_of_mich_cut_info mci |> SexpUtil.to_string) in let string_of_si : sym_image -> string = fun si -> Printf.sprintf "> MICH:\n\t[\n%s\n\t]" (string_of_mvcclst si.si_mich |> String.concat ~sep:" ;\n" |> increase_depth_str 2 ) in let string_of_ss : sym_state -> string = fun ss -> Printf.sprintf "> ID: %s\n\n> START: \n\t> MCI: %s\n\t> SI: \n\t\t[\n%s\n\t\t]\n\n> BLOCK: \n\t> MCI: %s\n\t> SI: \n\t\t[\n%s\n\t\t]\n\n> CONSTRAINT: \n\t[\n%s\n\t]" (string_of_sid ss.ss_id) (string_of_mci ss.ss_start_mci) (string_of_si ss.ss_start_si |> increase_depth_str 3) (string_of_mci ss.ss_block_mci) (string_of_si ss.ss_block_si |> increase_depth_str 3) (string_of_mflst ss.ss_constraints |> String.concat ~sep:" ;\n" |> increase_depth_str 2 ) in SSet.fold result_constraint_optimized.sr_blocked ~init:0 ~f:(fun id ss -> Utils.Log.debug (fun m -> m "BLOCK_STATE [#%d]\n%s" id (string_of_ss ss) ); id + 1 ) |> ignore; SSet.fold result_constraint_optimized.sr_queries ~init:0 ~f:(fun id ss -> Utils.Log.debug (fun m -> m "QUERY_STATE [#%d]\n%s" id (string_of_ss ss) ); id + 1 ) |> ignore; exit 0 in *) (result_constraint_optimized, initial_ss) (* function run_inst_entry end *)
null
https://raw.githubusercontent.com/kupl/MicSE/3e757ce3dff47e1984c5fdcfda6bec29ec3cc6f4/lib/se.ml
ocaml
Se is a symbolic execution module based on Tz.sym_state definition symbolic states caches - accumulates which loop/lambdas passed caches - count integer to assign sym_state_id (start with 0) **************************************************************************** **************************************************************************** module SSGraph end **************************************************************************** **************************************************************************** 1. amount, balance, and bc_balance are mutez values function sigma_constraint_of_list_nil end function sigma_constraint_of_map_empty end Design Note: This method for evaluating sigma of map is incomplete. The sum of elements which get from the map should be less than or equal to sigma of map. (i.e., map[A] + map[B] <= ∑map) function sigma_constraint_of_map_get end function sigma_constraint_of_list_cons end function sigma_constraint_of_map_update end function add_sigma_constraint_of_list_nil end function add_sigma_constraint_of_map_empty end function add_sigma_constraint_of_map_get end function add_sigma_constraint_of_list_cons end function add_sigma_constraint_of_map_update end **************************************************************************** Symbolic Run Instruction **************************************************************************** initial mich_cut_info beginning trx-image beginning sym-image function run_inst_initial_se_result end utilities : sym_state <-> se_result utilities : context-se_result update (fun ctxt_sr new_sr -> se_result_pointwise_union new_sr ctxt_sr) utilities : symbolic stack generator FUNCTION BEGIN VERY VERY NAIVE OPTIMIZATION BEGIN - optimize only block_si.mich_stack's top value additional_constraints ss_constraints = additional_constraints @ ss.ss_constraints; VERY VERY NAIVE OPTIMIZATION END let _ = (* DEBUG then branch IMPORTANT: ctxt_sr name shadowing else branch ctx for else-branch then branch IMPORTANT: ctxt_sr name shadowing else branch ctx for else-branch then branch IMPORTANT: ctxt_sr name shadowing else branch ctx for else-branch RETURN refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. let mapkey_ct : mich_f list = (* Precondition : container's type is map @ mapkey_ct RETURN then branch IMPORTANT: ctxt_sr name shadowing else branch ctx for else-branch refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. RETURN refer MI_map case instead if you want to see the most detailed symbolic execution among loop instructions. 1. Construct blocked-state RETURN NOTE: no additional nat-num constraints here | MI_create_account -> TODO | MI_steps_to_quota -> TODO let _ = (* DEBUG IMPORTANT: ctxt_sr name shadowing function run_inst_i end If operation-mutez-subtraction policy turned on, we need to add balance-related constraint here. let _ = (* DEBUG let _ = (* DEBUG function run_inst_entry end
exception SeError of string open! Core Set of Tz.sym_state & Set of module SSet = Core.Set.Make (Tz.SymState_cmp) module MciSet = Core.Set.Make (Tz.MichCutInfo_cmp) module MFSet = Core.Set.Make (Tz.MichF_cmp) type se_result = { sr_running : SSet.t; sr_blocked : SSet.t; sr_queries : SSet.t; sr_terminated : SSet.t; sr_entered_loops : MciSet.t; sr_entered_lmbds : MciSet.t; sr_sid_counter : int; } [@@deriving sexp, compare, equal] let se_result_empty : se_result = { sr_running = SSet.empty; sr_blocked = SSet.empty; sr_queries = SSet.empty; sr_terminated = SSet.empty; sr_entered_loops = MciSet.empty; sr_entered_lmbds = MciSet.empty; sr_sid_counter = 0; } let se_result_pointwise_union : se_result -> se_result -> se_result = fun r1 r2 -> { sr_running = SSet.union r1.sr_running r2.sr_running; sr_blocked = SSet.union r1.sr_blocked r2.sr_blocked; sr_queries = SSet.union r1.sr_queries r2.sr_queries; sr_terminated = SSet.union r1.sr_terminated r2.sr_terminated; sr_entered_loops = MciSet.union r1.sr_entered_loops r2.sr_entered_loops; sr_entered_lmbds = MciSet.union r1.sr_entered_lmbds r2.sr_entered_lmbds; sr_sid_counter = max r1.sr_sid_counter r2.sr_sid_counter; } SymState as Graph module SidMap = Core.Map.Make (Int) let construct_sid_checkmap : SSet.t -> Tz.sym_state SidMap.t = fun sset -> SSet.fold sset ~init:SidMap.empty ~f:(fun accmap ss -> SidMap.add_exn accmap ~key:(List.hd_exn ss.ss_id) ~data:ss ) module SSGraph = struct open Tz module RMCIMap = Core.Map.Make (Tz.RMichCutInfo_cmp) type 'a ps_pair = { pred : 'a; succ : 'a; } [@@deriving sexp, compare, equal] type mci_view = SSet.t ps_pair RMCIMap.t [@@deriving sexp, compare, equal] let construct_mci_view : basic_blocks:SSet.t -> mci_view = let empty_cp = { pred = SSet.empty; succ = SSet.empty } in fun ~basic_blocks -> SSet.fold basic_blocks ~init:RMCIMap.empty ~f:(fun accm ss -> let (start_rmci, block_rmci) = ( TzUtil.get_reduced_mci ss.ss_start_mci, TzUtil.get_reduced_mci ss.ss_block_mci ) in accm 1 . use symstate 's start - rmci - symstate is start - rmci 's successor (fun m -> RMCIMap.update m start_rmci ~f:(function | None -> { empty_cp with succ = SSet.singleton ss } | Some pspr -> { pspr with succ = SSet.add pspr.succ ss } )) 2 . use symstate 's block - rmci - symstate is block - rmci 's predecessor fun m -> RMCIMap.update m block_rmci ~f:(function | None -> { empty_cp with pred = SSet.singleton ss } | Some pspr -> { pspr with pred = SSet.add pspr.pred ss } ) ) let ss_view_pred : m_view:mci_view -> sym_state -> SSet.t = fun ~m_view ss -> (RMCIMap.find_exn m_view (TzUtil.get_reduced_mci ss.ss_start_mci)).pred let ss_view_succ : m_view:mci_view -> sym_state -> SSet.t = fun ~m_view ss -> (RMCIMap.find_exn m_view (TzUtil.get_reduced_mci ss.ss_block_mci)).succ end Utilities : Constraint let add_constraints : c:Tz.mich_f list -> Tz.sym_state -> Tz.sym_state = (fun ~c ss -> { ss with ss_constraints = c @ ss.ss_constraints }) let mtz_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> if equal_mich_t t.cc_v MT_mutez then MF_mutez_bound (gen_mich_v_ctx v ~ctx) else MF_true let add_mtz_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ mtz_constraint_if_it_is_or_true ~ctx ~tv ] ss let nat_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> if equal_mich_t t.cc_v MT_nat then MF_nat_bound (gen_mich_v_ctx v ~ctx) else MF_true let add_nat_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ nat_constraint_if_it_is_or_true ~ctx ~tv ] ss let map_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> match t.cc_v with | MT_map _ | MT_big_map _ -> MF_map_default_value (gen_mich_v_ctx v ~ctx) | _ -> MF_true let add_map_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ map_constraint_if_it_is_or_true ~ctx ~tv ] ss let set_constraint_if_it_is_or_true : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~tv:(t, v) -> match t.cc_v with | MT_set _ -> MF_set_default_value (gen_mich_v_ctx v ~ctx) | _ -> MF_true let add_set_constraint_if_it_is : ctx:Tz.mich_sym_ctxt -> tv:Tz.mich_t Tz.cc * Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~tv ss -> add_constraints ~c:[ set_constraint_if_it_is_or_true ~ctx ~tv ] ss let michv_typ_constraints : ctx:Tz.mich_sym_ctxt -> v:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~v -> let tv = (typ_of_val v, v) in [ mtz_constraint_if_it_is_or_true ~ctx ~tv; nat_constraint_if_it_is_or_true ~ctx ~tv; map_constraint_if_it_is_or_true ~ctx ~tv; set_constraint_if_it_is_or_true ~ctx ~tv; ] let add_typ_constraints : ctx:Tz.mich_sym_ctxt -> v:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = (fun ~ctx ~v ss -> add_constraints ~c:(michv_typ_constraints ~ctx ~v) ss) let amount_balance_mutez_constraints : ctx:Tz.mich_sym_ctxt -> amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> bc_balance_v:Tz.mich_v Tz.cc -> Tz.mich_f list = fun ~ctx ~amount_v ~balance_v ~bc_balance_v -> let open Tz in let open TzUtil in [ 1 . amount , balance , and bc_balance are mutez values MF_mutez_bound (gen_mich_v_ctx ~ctx amount_v); MF_mutez_bound (gen_mich_v_ctx ~ctx balance_v); MF_mutez_bound (gen_mich_v_ctx ~ctx bc_balance_v); 2 . ( balance + bc_balance ) is also mutez value MF_mutez_bound (gen_mich_v_ctx ~ctx (MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc) ); ] let mtz_comes_from_constraint : ctx:Tz.mich_sym_ctxt -> mtz_v:Tz.mich_v Tz.cc -> from_v:Tz.mich_v Tz.cc -> Tz.mich_f = fun ~ctx ~mtz_v ~from_v -> let open Tz in let open TzUtil in MF_is_true (gen_mich_v_ctx ~ctx (MV_leq_ib (mtz_v, from_v) |> gen_dummy_cc)) let lt_2_63_constraint : ctx:Tz.mich_sym_ctxt -> Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx mv -> MF_eq ( gen_mich_v_ctx ~ctx mv, gen_mich_v_ctx ~ctx (MV_lit_mutez (Bigint.of_int64 Int64.max_value) |> gen_dummy_cc) ) let amount_balance_mutez_constraints : amount_v : balance_v : Tz.mich_v Tz.cc - > bc_balance_v : Tz.mich_v Tz.cc - > Tz.mich_f list = fun ~amount_v ~balance_v ~bc_balance_v - > let open Tz in [ ( * 1 . amount , balance , and bc_balance are mutez values amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> bc_balance_v:Tz.mich_v Tz.cc -> Tz.mich_f list = fun ~amount_v ~balance_v ~bc_balance_v -> let open Tz in [ MF_mutez_bound amount_v; MF_mutez_bound balance_v; MF_mutez_bound bc_balance_v; 2 . amount is less - or - equal than bc_balance MF_is_true (MV_leq_ib (amount_v, bc_balance_v) |> gen_dummy_cc); 3 . ( balance + bc_balance ) is also mutez value MF_mutez_bound (MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc); 4 . ( balance + bc_balance ) is equal to total - mutez - amount (let lit_total_mutez_amount = MV_lit_mutez (Bigint.of_int64 Int64.max_value) |> gen_dummy_cc in MF_eq ( MV_add_mmm (balance_v, bc_balance_v) |> gen_dummy_cc, lit_total_mutez_amount ) ); ] *) let ge_balance_amount_in_non_trx_entry_constraint : ctx:Tz.mich_sym_ctxt -> amount_v:Tz.mich_v Tz.cc -> balance_v:Tz.mich_v Tz.cc -> Tz.mich_f = let open Tz in let open TzUtil in fun ~ctx ~amount_v ~balance_v -> MF_is_true (gen_mich_v_ctx ~ctx (MV_geq_ib (balance_v, amount_v) |> gen_dummy_cc)) let sigma_constraint_of_list_nil : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~lst -> let (zero : mich_v cc) = MV_lit_nat Bigint.zero |> gen_custom_cc lst in let (zero_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx zero in let (set_of_sigma_lst : mich_v cc list) = sigma_of_cont lst in List.map set_of_sigma_lst ~f:(fun sigma -> let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in MF_eq (sigma_ctx, zero_ctx) :: michv_typ_constraints ~ctx ~v:sigma ) |> List.join let sigma_constraint_of_map_empty : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map -> let (zero : mich_v cc) = MV_lit_nat Bigint.zero |> gen_custom_cc map in let (zero_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx zero in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in List.map set_of_sigma_map ~f:(fun sigma -> let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in MF_eq (sigma_ctx, zero_ctx) :: michv_typ_constraints ~ctx ~v:sigma ) |> List.join let sigma_constraint_of_map_get : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map ~key -> let (value : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_get_xmoy (key, map) | MT_big_map _ -> MV_get_xbmo (key, map) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc map in let (none : mich_v cc) = MV_none (typ_of_val value |> get_innertyp) |> gen_custom_cc value in let (others : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_update_xomm (key, none, map) | MT_big_map _ -> MV_update_xobmbm (key, none, map) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc map in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in let (set_of_sigma_others_map : mich_v cc list) = sigma_of_cont others in List.map2 set_of_sigma_map set_of_sigma_others_map ~f:(fun sigma sigma_others -> let ((acc_elem : mich_f list), (value_elem : mich_v cc)) = MV_unlift_option value |> gen_custom_cc map |> acc_of_sigma ~sigma ~ctx in let (get_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx value in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (add_ctx : mich_v_cc_ctx) = (match (typ_of_val value_elem).cc_v with | MT_int -> MV_add_iii (value_elem, sigma_others) | MT_nat -> MV_add_nnn (value_elem, sigma_others) | MT_mutez -> MV_add_mnn (value_elem, sigma_others) | _ -> SeError "sigma_constraint_of_map_get : wrong type" |> raise) |> gen_custom_cc sigma_others |> gen_mich_v_ctx ~ctx in MF_imply ( MF_not (MF_is_none get_ctx), MF_and (MF_eq (sigma_ctx, add_ctx) :: acc_elem) ) :: michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:sigma_others ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_map_get : Unequal_lengths" |> raise let sigma_constraint_of_list_cons : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> hd:Tz.mich_v Tz.cc -> tl:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~lst ~hd ~tl -> let (set_of_sigma_lst : mich_v cc list) = sigma_of_cont lst in let (set_of_sigma_tl : mich_v cc list) = sigma_of_cont tl in List.map2 set_of_sigma_lst set_of_sigma_tl ~f:(fun sigma new_sigma -> let ((acc_elem : mich_f list), (value_elem : mich_v cc)) = (acc_of_sigma ~sigma ~ctx) hd in let (addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_int -> MV_add_iii (value_elem, new_sigma) | MT_nat -> MV_add_nnn (value_elem, new_sigma) | MT_mutez -> MV_add_mnn (value_elem, new_sigma) | _ -> SeError "sigma_constraint_of_list_cons : not supported" |> raise) |> gen_custom_cc new_sigma in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx addition in (MF_eq (sigma_ctx, addition_ctx) :: acc_elem) @ michv_typ_constraints ~ctx ~v:addition @ michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:new_sigma ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_list_cons : Unequal_lengths" |> raise let sigma_constraint_of_map_update : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> value:Tz.mich_v Tz.cc -> updated_map:Tz.mich_v Tz.cc -> Tz.mich_f list = let open Tz in let open TzUtil in fun ~ctx ~map ~key ~value ~updated_map -> let (old_value : mich_v cc) = (match (typ_of_val map).cc_v with | MT_map _ -> MV_get_xmoy (key, map) | MT_big_map _ -> MV_get_xbmo (key, map) | _ -> SeError "sigma_constraint_of_map_update : wrong type" |> raise) |> gen_custom_cc map in let (get_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx old_value in let (update_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx value in let (set_of_sigma_map : mich_v cc list) = sigma_of_cont map in let (set_of_sigma_updated_map : mich_v cc list) = sigma_of_cont updated_map in List.map2 set_of_sigma_map set_of_sigma_updated_map ~f:(fun sigma new_sigma -> let ((acc_old_elem : mich_f list), (value_old_elem : mich_v cc)) = MV_unlift_option old_value |> gen_custom_cc map |> acc_of_sigma ~sigma:new_sigma ~ctx in let ((acc_new_elem : mich_f list), (value_new_elem : mich_v cc)) = MV_unlift_option value |> gen_custom_cc value |> acc_of_sigma ~sigma ~ctx in let (old_addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_mutez -> MV_add_mnn (value_new_elem, sigma) | MT_nat -> MV_add_nnn (value_new_elem, sigma) | MT_int -> MV_add_iii (value_new_elem, sigma) | _ -> SeError "sigma_constraint_of_map_update : not supported" |> raise) |> gen_custom_cc new_sigma in let (new_addition : mich_v cc) = (match (typ_of_val sigma).cc_v with | MT_mutez -> MV_add_mnn (value_old_elem, new_sigma) | MT_nat -> MV_add_nnn (value_old_elem, new_sigma) | MT_int -> MV_add_iii (value_old_elem, new_sigma) | _ -> SeError "sigma_constraint_of_map_update : not supported" |> raise) |> gen_custom_cc new_sigma in let (sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx sigma in let (new_sigma_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx new_sigma in let (old_addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx old_addition in let (new_addition_ctx : mich_v_cc_ctx) = gen_mich_v_ctx ~ctx new_addition in MF_and [ MF_imply ( MF_and [ MF_is_none get_ctx; MF_is_none update_ctx ], MF_and [ MF_eq (sigma_ctx, new_sigma_ctx) ] ); MF_imply ( MF_and [ MF_is_none get_ctx; MF_not (MF_is_none update_ctx) ], MF_and ([ MF_eq (old_addition_ctx, new_sigma_ctx) ] @ acc_new_elem @ michv_typ_constraints ~ctx ~v:old_addition ) ); MF_imply ( MF_and [ MF_not (MF_is_none get_ctx); MF_is_none update_ctx ], MF_and ([ MF_eq (sigma_ctx, new_addition_ctx) ] @ acc_old_elem @ michv_typ_constraints ~ctx ~v:new_addition ) ); MF_imply ( MF_and [ MF_not (MF_is_none get_ctx); MF_not (MF_is_none update_ctx) ], MF_and ([ MF_eq (old_addition_ctx, new_addition_ctx) ] @ acc_old_elem @ acc_new_elem @ michv_typ_constraints ~ctx ~v:old_addition @ michv_typ_constraints ~ctx ~v:new_addition ) ); ] :: michv_typ_constraints ~ctx ~v:sigma @ michv_typ_constraints ~ctx ~v:new_sigma ) |> function | Ok fll -> List.join fll | Unequal_lengths -> SeError "sigma_constraint_of_map_update : Unequal_lengths" |> raise let add_sigma_constraint_of_list_nil : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~lst ss -> add_constraints ~c:(sigma_constraint_of_list_nil ~ctx ~lst) ss let add_sigma_constraint_of_map_empty : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~map ss -> add_constraints ~c:(sigma_constraint_of_map_empty ~ctx ~map) ss let add_sigma_constraint_of_map_get : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = fun ~ctx ~map ~key ss -> add_constraints ~c:(sigma_constraint_of_map_get ~ctx ~map ~key) ss let add_sigma_constraint_of_list_cons : ctx:Tz.mich_sym_ctxt -> lst:Tz.mich_v Tz.cc -> hd:Tz.mich_v Tz.cc -> tl:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = let open Tz in let open TzUtil in fun ~ctx ~lst ~hd ~tl ss -> if not (equal_mich_t (typ_of_val lst).cc_v (typ_of_val tl).cc_v) then SeError "add_sigma_constraint_of_list_cons : wrong type" |> raise else add_constraints ~c:(sigma_constraint_of_list_cons ~ctx ~lst ~hd ~tl) ss let add_sigma_constraint_of_map_update : ctx:Tz.mich_sym_ctxt -> map:Tz.mich_v Tz.cc -> key:Tz.mich_v Tz.cc -> value:Tz.mich_v Tz.cc -> updated_map:Tz.mich_v Tz.cc -> Tz.sym_state -> Tz.sym_state = let open Tz in let open TzUtil in fun ~ctx ~map ~key ~value ~updated_map ss -> if not (equal_mich_t (typ_of_val map).cc_v (typ_of_val updated_map).cc_v) then SeError "add_sigma_constraint_of_map_update : wrong type" |> raise else add_constraints ~c:(sigma_constraint_of_map_update ~ctx ~map ~key ~value ~updated_map) ss let run_inst_initial_se_result : Tz.mich_t Tz.cc * Tz.mich_t Tz.cc * Tz.mich_i Tz.cc -> se_result * Tz.sym_state = let open Tz in let open TzUtil in fun (param_tcc, strg_tcc, code) -> sid_counter & sym_ctxt let scounter = 0 in let sctxt = [ scounter ] in let ctx = sctxt in mich_t cc values let cur_contract_tcc = MT_contract param_tcc |> gen_dummy_cc and addr_tcc = MT_address |> gen_dummy_cc and mutez_tcc = MT_mutez |> gen_dummy_cc and time_tcc = MT_timestamp |> gen_dummy_cc and paramstrg_tcc = MT_pair (param_tcc, strg_tcc) |> gen_dummy_cc in let init_mci = { mci_loc = code.cc_loc; mci_cutcat = MCC_trx_entry } in let param_v = MV_symbol (param_tcc, MSC_param) |> gen_dummy_cc in let beginning_ti : trx_image = { ti_contract = MV_symbol (cur_contract_tcc, MSC_contract) |> gen_dummy_cc; ti_source = MV_symbol (addr_tcc, MSC_source) |> gen_dummy_cc; ti_sender = MV_symbol (addr_tcc, MSC_sender) |> gen_dummy_cc; ti_param = param_v; ti_amount = MV_symbol (mutez_tcc, MSC_amount) |> gen_dummy_cc; ti_time = MV_symbol (time_tcc, MSC_time) |> gen_dummy_cc; } in let beginning_si : sym_image = { si_mich = [ MV_symbol (paramstrg_tcc, MSC_mich_stack 0) |> gen_dummy_cc ]; si_dip = []; si_map_entry = []; si_map_exit = []; si_map_mapkey = []; si_iter = []; si_balance = MV_symbol (mutez_tcc, MSC_balance) |> gen_dummy_cc; si_bc_balance = MV_symbol (mutez_tcc, MSC_bc_balance) |> gen_dummy_cc; si_param = beginning_ti; } in blocking sym - image let blocking_si : sym_image = { beginning_si with si_balance = MV_add_mmm (beginning_si.si_balance, beginning_ti.ti_amount) |> gen_dummy_cc; si_bc_balance = MV_sub_mmm (beginning_si.si_bc_balance, beginning_ti.ti_amount) |> gen_dummy_cc; } in let initial_sym_state : sym_state = { ss_id = sctxt; ss_start_mci = init_mci; ss_block_mci = init_mci; ss_start_si = beginning_si; ss_block_si = blocking_si; ss_constraints = 1 . first stack 's CAR is parameter - value MF_eq ( gen_mich_v_ctx ~ctx beginning_ti.ti_param, gen_mich_v_ctx ~ctx (MV_car (List.hd_exn beginning_si.si_mich) |> gen_dummy_cc) ) 2 . If parameter value is mutez or nat , add constraints michv_typ_constraints ~ctx ~v:param_v @ [ 3 . Amount comes from Bc - Balance mtz_comes_from_constraint ~ctx ~mtz_v:beginning_ti.ti_amount ~from_v:beginning_si.si_bc_balance; ] 4 . amount & balance & bc_balance constraints amount_balance_mutez_constraints ~ctx ~amount_v:beginning_ti.ti_amount ~balance_v:beginning_si.si_balance ~bc_balance_v:beginning_si.si_bc_balance; } in let initial_se_result : se_result = { se_result_empty with sr_running = SSet.singleton initial_sym_state; sr_sid_counter = scounter + 1; } in (initial_se_result, initial_sym_state) let rec run_inst : Tz.mich_i Tz.cc -> se_result -> se_result = fun inst sr -> SSet.fold sr.sr_running ~init:{ sr with sr_running = SSet.empty } ~f:(fun acc_sr ss -> se_result_pointwise_union (run_inst_i inst (acc_sr, ss)) acc_sr ) and run_inst_i : Tz.mich_i Tz.cc -> se_result * Tz.sym_state -> se_result = let open Tz in let open TzUtil in utilties : : blocked - mich - stack let get_bmstack : sym_state -> mich_v cc list = (fun ss -> ss.ss_block_si.si_mich) in let get_bmstack_1 : sym_state -> mich_v cc = fun ss -> match get_bmstack ss with | h :: _ -> h | _ -> failwith "get_bmstack_1 : unexpected" in let get_bmstack_2 : sym_state -> mich_v cc * mich_v cc = fun ss -> match get_bmstack ss with | h1 :: h2 :: _ -> (h1, h2) | _ -> failwith "get_bmstack_2 : unexpected" in let get_bmstack_3 : sym_state -> mich_v cc * mich_v cc * mich_v cc = fun ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: _ -> (h1, h2, h3) | _ -> failwith "get_bmstack_3 : unexpected" in let set_bmstack : sym_state -> mich_v cc list -> sym_state = fun ss st -> { ss with ss_block_si = { ss.ss_block_si with si_mich = st } } in let update_bmstack : f:(mich_v cc list -> mich_v cc list) -> sym_state -> sym_state = (fun ~f ss -> get_bmstack ss |> f |> set_bmstack ss) in let push_bmstack : v:mich_v cc -> sym_state -> sym_state = (fun ~v ss -> update_bmstack ~f:(List.cons v) ss) in let update_top_1_bmstack : f:(mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | hd :: tl -> f hd @ tl |> set_bmstack ss | _ -> failwith "update_top_1_bmstack : unexpected" in let update_top_2_bmstack : f:(mich_v cc * mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: tl -> f (h1, h2) @ tl |> set_bmstack ss | _ -> failwith "update_top_2_bmstack : unexpected" in let update_top_3_bmstack : f:(mich_v cc * mich_v cc * mich_v cc -> mich_v cc list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: tl -> f (h1, h2, h3) @ tl |> set_bmstack ss | _ -> failwith "update_top_e_bmstack : unexpected" in let set_bmstack_and_constraint : sym_state -> mich_v cc list -> mich_f list -> sym_state = fun ss st cs -> { ss with ss_block_si = { ss.ss_block_si with si_mich = st }; ss_constraints = cs; } in let update_top_1_bmstack_and_constraint : f:(mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | hd :: tl -> let (st, cs) = f hd in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_1_bmstack_and_constraint : unexpected" in let update_top_2_bmstack_and_constraint : f:(mich_v cc * mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: tl -> let (st, cs) = f (h1, h2) in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_2_bmstack_and_constraint : unexpected" in let update_top_3_bmstack_and_constraint : f:(mich_v cc * mich_v cc * mich_v cc - > mich_v cc list * mich_f list ) - > sym_state - > sym_state = fun ~f ss - > match get_bmstack ss with | h1 : : h2 : : h3 : : tl - > let ( st , cs ) = f ( h1 , h2 , h3 ) in set_bmstack_and_constraint ss ( ) ( cs @ ss.ss_constraints ) | _ - > failwith " update_top_3_bmstack_and_constraint : unexpected " in f:(mich_v cc * mich_v cc * mich_v cc -> mich_v cc list * mich_f list) -> sym_state -> sym_state = fun ~f ss -> match get_bmstack ss with | h1 :: h2 :: h3 :: tl -> let (st, cs) = f (h1, h2, h3) in set_bmstack_and_constraint ss (st @ tl) (cs @ ss.ss_constraints) | _ -> failwith "update_top_3_bmstack_and_constraint : unexpected" in *) let running_ss_to_sr : se_result -> sym_state -> se_result = (fun ctxt_sr ss -> { ctxt_sr with sr_running = SSet.singleton ss }) in let ctxt_sr_update : se_result -> se_result -> se_result = fun ctxt_sr new_sr -> { ctxt_sr with sr_entered_loops = MciSet.union ctxt_sr.sr_entered_loops new_sr.sr_entered_loops; sr_entered_lmbds = MciSet.union ctxt_sr.sr_entered_lmbds new_sr.sr_entered_lmbds; sr_sid_counter = max ctxt_sr.sr_sid_counter new_sr.sr_sid_counter; } in let ctxt_sr_sid_counter_incr : se_result -> se_result = fun ctxt_sr -> { ctxt_sr with sr_sid_counter = ctxt_sr.sr_sid_counter + 1 } in let generate_symstack : f:(int -> mich_sym_category) -> ctx:mich_sym_ctxt -> ccmaker:('a -> 'a cc) -> mich_v cc list -> mich_v cc list * mich_f list = fun ~f ~ctx ~ccmaker st -> let len = List.length st in let vl = List.mapi ~f:(fun i v -> let sc = f (len - i - 1) in MV_symbol (typ_of_val v, sc) |> ccmaker) st in let ctl = List.fold vl ~init:[] ~f:(fun accl v -> michv_typ_constraints ~ctx ~v @ accl ) in (vl, ctl) in utilities : extract paramter type from sym - state let param_typ_of_ss : sym_state -> mich_t cc = fun ss -> match (typ_of_val ss.ss_start_si.si_param.ti_contract).cc_v with | MT_contract t -> t | _ -> failwith "run_inst_i : param_typ_of_ss : unexpected" in fun inst (ctxt_sr, ss) -> let ss = match ss.ss_block_si.si_mich with | [] -> ([], []) | h :: t -> let (cl, v) = opt_mvcc ~ctx:ss.ss_id h in (cl, v :: t) in { ss with ss_block_si = { ss.ss_block_si with si_mich = optimized_stack } } in Utils.Log.debug (fun m -> m "Current MCI: %s\n\tCurrent SID: %d\n\tMichStack Length: %d\n\tStack: \n\t\t[%s]" (inst.cc_loc |> sexp_of_ccp_loc |> Sexp.to_string) ctxt_sr.sr_sid_counter (List.length ss.ss_block_si.si_mich) (List.map ss.ss_block_si.si_mich ~f:(fun v -> Tz.sexp_of_mich_v v.cc_v |> SexpUtil.to_string ) |> String.concat ~sep:"; " ) ) in *) let ctx = ss.ss_id in match inst.cc_v with | MI_seq (i1, i2) -> run_inst_i i1 (ctxt_sr, ss) |> run_inst i2 | MI_drop zn -> if Bigint.equal zn Bigint.zero then running_ss_to_sr ctxt_sr ss else update_bmstack ss ~f:(fun x -> List.split_n x (Bigint.to_int_exn zn) |> snd ) |> running_ss_to_sr ctxt_sr | MI_dup zn -> update_bmstack ss ~f:(fun x -> List.nth_exn x (Bigint.to_int_exn zn - 1) :: x ) |> running_ss_to_sr ctxt_sr | MI_swap -> update_bmstack ss ~f:(function | h1 :: h2 :: tl -> h2 :: h1 :: tl | _ -> failwith "run_inst_i : MI_swap : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_dig zn -> update_bmstack ss ~f:(fun x -> match List.split_n x (Bigint.to_int_exn zn) with | (hdlst, tlhd :: tltl) -> (tlhd :: hdlst) @ tltl | _ -> failwith "run_inst_i : MI_dig : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_dug zn -> update_bmstack ss ~f:(fun x -> match List.split_n x (Bigint.to_int_exn zn + 1) with | (hdhd :: hdtl, tl) -> hdtl @ (hdhd :: tl) | _ -> failwith "run_inst_i : MI_dug : unexpected" ) |> running_ss_to_sr ctxt_sr | MI_push (_, v) -> push_bmstack ss ~v |> add_typ_constraints ~ctx ~v |> running_ss_to_sr ctxt_sr | MI_some -> update_top_1_bmstack ~f:(fun x -> [ MV_some x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_none t -> push_bmstack ss ~v:(MV_none t |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_unit -> push_bmstack ss ~v:(MV_unit |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_if_none (i1, i2) -> let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_none (gen_mich_v_ctx ~ctx cond_value) in let then_br_sr : se_result = update_top_1_bmstack ~f:(fun _ -> []) ss |> add_constraints ~c:[ tb_cond_constraint ] |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in let else_br_sr : se_result = let unlifted_cond_value = MV_unlift_option cond_value |> gen_custom_cc inst in let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_none (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) |> add_constraints ~c:[ eb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_pair -> update_top_2_bmstack ~f:(fun (x, y) -> [ MV_pair (x, y) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_car -> update_top_1_bmstack_and_constraint ~f:(fun x -> let nv = MV_car x |> gen_custom_cc inst in ([ nv ], michv_typ_constraints ~ctx ~v:nv)) ss |> running_ss_to_sr ctxt_sr | MI_cdr -> update_top_1_bmstack_and_constraint ~f:(fun x -> let nv = MV_cdr x |> gen_custom_cc inst in ([ nv ], michv_typ_constraints ~ctx ~v:nv)) ss |> running_ss_to_sr ctxt_sr | MI_left t -> update_top_1_bmstack ~f:(fun h - > [ ( t , h ) | > gen_custom_cc inst ] ) ~f:(fun h -> let ty = MT_or (typ_of_val h, t) |> gen_custom_cc inst in [ MV_left (ty, h) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_right t -> update_top_1_bmstack ~f:(fun h - > [ ( t , h ) | > gen_custom_cc inst ] ) ~f:(fun h -> let ty = MT_or (t, typ_of_val h) |> gen_custom_cc inst in [ MV_right (ty, h) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_if_left (i1, i2) -> let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_left (gen_mich_v_ctx ~ctx cond_value) in let then_br_sr : se_result = let unlifted_cond_value = MV_unlift_left cond_value |> gen_custom_cc inst in update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) ss |> add_constraints ~c:[ tb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in let else_br_sr : se_result = let unlifted_cond_value = MV_unlift_right cond_value |> gen_custom_cc inst in let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_left (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value ]) |> add_constraints ~c:[ eb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_nil t -> let (lst : mich_v cc) = MV_nil t |> gen_custom_cc inst in push_bmstack ~v:lst ss |> add_sigma_constraint_of_list_nil ~ctx ~lst |> running_ss_to_sr ctxt_sr | MI_cons -> let ((hd : mich_v cc), (tl : mich_v cc)) = get_bmstack_2 ss in let (lst : mich_v cc) = MV_cons (hd, tl) |> gen_custom_cc inst in update_top_2_bmstack ~f:(fun _ -> [ lst ]) ss |> add_sigma_constraint_of_list_cons ~ctx ~lst ~hd ~tl |> running_ss_to_sr ctxt_sr | MI_if_cons (i1, i2) -> IF_CONS receives list - container only let cond_value : mich_v cc = get_bmstack_1 ss in let tb_cond_constraint : mich_f = MF_is_cons (gen_mich_v_ctx ~ctx cond_value) in let then_br_sr : se_result = let unlifted_cond_value_hd = MV_hd_l cond_value |> gen_custom_cc inst in let unlifted_cond_value_tl = MV_tl_l cond_value |> gen_custom_cc inst in update_top_1_bmstack ~f:(fun _ -> [ unlifted_cond_value_hd; unlifted_cond_value_tl ]) ss |> add_constraints ~c:[ tb_cond_constraint ] |> add_typ_constraints ~ctx ~v:unlifted_cond_value_hd |> add_sigma_constraint_of_list_cons ~ctx ~lst:cond_value ~hd:unlifted_cond_value_hd ~tl:unlifted_cond_value_tl It is important to update of else - branch symbolic - state |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in let else_br_sr : se_result = let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_cons (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> []) |> add_constraints ~c:[ eb_cond_constraint ] |> add_sigma_constraint_of_list_nil ~ctx ~lst:cond_value in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_size -> let size_gen mv : mich_v cc list * mich_f list = let mvcc = gen_custom_cc inst mv in ([ mvcc ], [ MF_nat_bound { ctx_i = ss.ss_id; ctx_v = mvcc } ]) in update_top_1_bmstack_and_constraint ~f:(fun h -> match (typ_of_val h).cc_v with | MT_set _ -> MV_size_s h |> size_gen | MT_map _ -> MV_size_m h |> size_gen | MT_list _ -> MV_size_l h |> size_gen | MT_string -> MV_size_str h |> size_gen | MT_bytes -> MV_size_b h |> size_gen | _ -> failwith "run_inst_i : MI_size : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_empty_set t -> push_bmstack ~v:(MV_empty_set t |> gen_custom_cc inst) ss |> running_ss_to_sr ctxt_sr | MI_empty_map (t1, t2) -> let (map : mich_v cc) = MV_empty_map (t1, t2) |> gen_custom_cc inst in push_bmstack ~v:map ss |> add_sigma_constraint_of_map_empty ~ctx ~map |> running_ss_to_sr ctxt_sr | MI_empty_big_map (t1, t2) -> let (map : mich_v cc) = MV_empty_big_map (t1, t2) |> gen_custom_cc inst in push_bmstack ~v:map ss |> add_sigma_constraint_of_map_empty ~ctx ~map |> running_ss_to_sr ctxt_sr | MI_map i -> let (outer_cutcat, inner_cutcat) = (MCC_ln_map, MCC_lb_map) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let container_v = get_bmstack_1 ss in let container_t : mich_t cc = typ_of_val container_v in let elem_t : mich_t cc = match container_t.cc_v with | MT_list e -> e | MT_map (kt, vt) -> MT_pair (kt, vt) |> gen_custom_cc container_v | _ -> failwith "run_inst_i : MI_map : elem_t" in let rest_stack = List.tl_exn (get_bmstack ss) in let out_elem_t = List.hd_exn (Te.typ_run_inst ~param_t:(param_typ_of_ss ss) i (elem_t :: List.map ~f:typ_of_val rest_stack) ) in let out_container_t : mich_t cc = match container_t.cc_v with | MT_list _ -> MT_list out_elem_t |> gen_custom_cc inst | MT_map (kt, _) -> MT_map (kt, out_elem_t) |> gen_custom_cc inst | _ -> failwith "run_inst_i : MI_map : out_container_t" in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this MAP - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside MAP instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let bsi = blocked_state.ss_block_si in let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let tb_container_v = MV_symbol ( container_t, MSC_map_entry_stack (List.length bsi.si_map_entry) ) |> gen_custom_cc inst in let tb_out_container_v = MV_symbol ( out_container_t, MSC_map_exit_stack (List.length bsi.si_map_exit) ) |> gen_custom_cc inst in let ctx = tb_ss_id in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let (mapkey_v, mapkey_ct) : mich_v cc list * mich_f list = match container_t.cc_v with | MT_map (kt, _) -> let v = MV_symbol ( kt, MSC_map_mapkey_stack (List.length bsi.si_map_mapkey) ) |> gen_custom_cc inst in ([ v ], michv_typ_constraints ~ctx ~v) | _ -> ([], [ MF_true ]) in ( { si_mich = michst; si_dip = dipst; si_map_entry = tb_container_v :: mapentryst; si_map_exit = tb_out_container_v :: mapexitst; si_map_mapkey = mapkey_v @ mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ mapkey_ct @ constraints_abp ) in let (tb_block_si, tb_block_constraints) : sym_image * mich_f list = let elem_v = MV_symbol (elem_t, MSC_mich_stack (List.length tb_entry_si.si_mich)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in let (tb_container_blocksi_v, tb_container_blocksi_ct) : mich_v cc * mich_f list = match container_t.cc_v with | MT_map (_, mapelem_t) -> let (key : mich_v cc) = List.hd_exn tb_entry_si.si_map_mapkey in let (value_unopt : mich_v cc) = MV_cdr elem_v |> gen_custom_cc inst in let (updated_map : mich_v cc) = MV_update_xomm ( key, MV_none mapelem_t |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in let (get : mich_v cc) = MV_get_xmoy (key, tb_container_v) |> gen_custom_cc inst in let (get_unopt : mich_v cc) = MV_unlift_option get |> gen_custom_cc inst in let (mem : mich_v cc) = MV_mem_xmb (key, tb_container_v) |> gen_custom_cc inst in ( updated_map, MF_eq ( gen_mich_v_ctx ~ctx get_unopt, gen_mich_v_ctx ~ctx value_unopt ) :: MF_is_true (gen_mich_v_ctx ~ctx mem) :: michv_typ_constraints ~ctx ~v:key @ sigma_constraint_of_map_update ~ctx ~map:tb_container_v ~key ~value:(MV_none mapelem_t |> gen_custom_cc inst) ~updated_map ) | MT_list _ -> let (hd : mich_v cc) = MV_hd_l tb_container_v |> gen_custom_cc inst in let (tl : mich_v cc) = MV_tl_l tb_container_v |> gen_custom_cc inst in ( tl, MF_eq (gen_mich_v_ctx ~ctx hd, gen_mich_v_ctx ~ctx elem_v) :: MF_is_cons (gen_mich_v_ctx ~ctx tb_container_v) :: sigma_constraint_of_list_cons ~ctx ~lst:tb_container_v ~hd ~tl ) | _ -> failwith "run_inst_i : MI_map : tb_container_blocksi_v" in ( { tb_entry_si with si_mich = elem_v :: tb_entry_si.si_mich; si_map_entry = tb_container_blocksi_v :: List.tl_exn tb_entry_si.si_map_entry; }, elem_ct @ tb_container_blocksi_ct ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_block_si; ss_constraints = tb_entry_constraints @ tb_block_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states let tb_exit_container_block_v : sym_image -> mich_v cc = fun { si_mich; si_map_exit; si_map_mapkey; _ } -> let ec = List.hd_exn si_map_exit in let ev = List.hd_exn si_mich in match (typ_of_val ec).cc_v with | MT_map _ -> MV_update_xomm (List.hd_exn si_map_mapkey, MV_some ev |> gen_custom_cc inst, ec) |> gen_custom_cc inst | MT_list _ -> MV_cons (ev, ec) |> gen_custom_cc inst | _ -> failwith "run_inst_i : MI_map : tb_exit_container_block_v" in { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> let (exit_container : mich_v cc) = tb_exit_container_block_v rss.ss_block_si in let (container_constraints : mich_f list) = michv_typ_constraints ~ctx ~v:exit_container in { rss with ss_block_si = { rss.ss_block_si with si_mich = List.tl_exn rss.ss_block_si.si_mich; si_map_exit = exit_container :: List.tl_exn rss.ss_block_si.si_map_exit; }; ss_block_mci = thenbr_mci; ss_constraints = container_constraints @ rss.ss_constraints; } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let ctx = eb_ss_id in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let eb_out_container_v = MV_symbol (out_container_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ michv_typ_constraints ~ctx ~v:eb_out_container_v @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = eb_out_container_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_iter i -> let (outer_cutcat, inner_cutcat) = (MCC_ln_iter, MCC_lb_iter) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let container_v = get_bmstack_1 ss in let container_t : mich_t cc = typ_of_val container_v in let elem_t : mich_t cc = match container_t.cc_v with | MT_list e -> e | MT_set e -> e | MT_map (kt, vt) -> MT_pair (kt, vt) |> gen_custom_cc container_v | _ -> failwith "run_inst_i : MI_map : elem_t" in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this ITER - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside ITER instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let bsi = blocked_state.ss_block_si in let tb_container_v = MV_symbol (container_t, MSC_iter_stack (List.length bsi.si_map_entry)) |> gen_custom_cc inst in let ctx = tb_ss_id in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = ( elem_t , MSC_mich_stack ( ) ) | > gen_custom_cc inst in MV_symbol (elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in *) let = michv_typ_constraints ~v : elem_v in match container_t.cc_v with | MT_map _ -> let mapkey = MV_car elem_v |> gen_custom_cc inst in [ 1 . key is not the key of the container MF_not (MF_is_true (MV_mem_xmb (mapkey, tb_container_v) |> gen_dummy_cc |> gen_mich_v_ctx ~ctx ) ); ] | _ -> [] in *) ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = tb_container_v :: iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ @ constraints_abp ) in let (tb_block_si, tb_block_constraints) : sym_image * mich_f list = let elem_v = MV_symbol (elem_t, MSC_mich_stack (List.length tb_entry_si.si_mich)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in let (tb_container_blocksi_v, tb_container_blocksi_ct) : mich_v cc * mich_f list = match container_t.cc_v with | MT_map (_, mapelem_t) -> let (key : mich_v cc) = MV_car elem_v |> gen_custom_cc inst in let (value_unopt : mich_v cc) = MV_cdr elem_v |> gen_custom_cc inst in let (updated_map : mich_v cc) = MV_update_xomm ( key, MV_none mapelem_t |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in let (get : mich_v cc) = MV_get_xmoy (key, tb_container_v) |> gen_custom_cc inst in let (mem : mich_v cc) = MV_mem_xmb (key, tb_container_v) |> gen_custom_cc inst in ( updated_map, MF_eq ( gen_mich_v_ctx ~ctx (MV_unlift_option get |> gen_custom_cc inst), gen_mich_v_ctx ~ctx value_unopt ) :: MF_is_true (gen_mich_v_ctx ~ctx mem) :: MF_not (MF_is_none (gen_mich_v_ctx ~ctx get)) :: michv_typ_constraints ~ctx ~v:key @ sigma_constraint_of_map_update ~ctx ~map:tb_container_v ~key ~value:(MV_none mapelem_t |> gen_custom_cc inst) ~updated_map ) | MT_set _ -> let (mem : mich_v cc) = MV_mem_xsb (elem_v, tb_container_v) |> gen_custom_cc inst in let (updated_set : mich_v cc) = MV_update_xbss ( elem_v, MV_lit_bool true |> gen_custom_cc inst, tb_container_v ) |> gen_custom_cc inst in (updated_set, [ MF_is_true (gen_mich_v_ctx ~ctx mem) ]) | MT_list _ -> let (hd : mich_v cc) = MV_hd_l tb_container_v |> gen_custom_cc inst in let (tl : mich_v cc) = MV_tl_l tb_container_v |> gen_custom_cc inst in ( tl, MF_eq (gen_mich_v_ctx ~ctx hd, gen_mich_v_ctx ~ctx elem_v) :: MF_is_cons (gen_mich_v_ctx ~ctx tb_container_v) :: sigma_constraint_of_list_cons ~ctx ~lst:tb_container_v ~hd ~tl ) | _ -> failwith "run_inst_i : MI_iter : tb_container_blocksi_v" in ( { tb_entry_si with si_mich = elem_v :: tb_entry_si.si_mich; si_iter = tb_container_blocksi_v :: List.tl_exn tb_entry_si.si_iter; }, elem_ct @ tb_container_blocksi_ct ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_block_si; ss_constraints = tb_entry_constraints @ tb_block_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let ctx = eb_ss_id in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_mem -> update_top_2_bmstack ~f:(fun (h, h2) -> let nv = match (typ_of_val h2).cc_v with | MT_set _ -> MV_mem_xsb (h, h2) | MT_map _ -> MV_mem_xmb (h, h2) | MT_big_map _ -> MV_mem_xbmb (h, h2) | _ -> failwith "run_inst_i : MI_mem : unexpected" in [ gen_custom_cc inst nv ]) ss |> running_ss_to_sr ctxt_sr | MI_get -> let ((key : mich_v cc), (cont : mich_v cc)) = get_bmstack_2 ss in update_top_2_bmstack_and_constraint ~f:(fun _ -> match (typ_of_val cont).cc_v with | MT_map _ -> let nv = MV_get_xmoy (key, cont) |> gen_custom_cc inst in ([ nv ], []) | MT_big_map _ -> let nv = MV_get_xbmo (key, cont) |> gen_custom_cc inst in ([ nv ], []) | _ -> failwith "run_inst_i : MI_get : unexpected") ss |> add_sigma_constraint_of_map_get ~ctx ~map:cont ~key |> running_ss_to_sr ctxt_sr | MI_update -> let ((key : mich_v cc), (value : mich_v cc), (cont : mich_v cc)) = get_bmstack_3 ss in let (updated_cont : mich_v cc) = (match (typ_of_val cont).cc_v with | MT_set _ -> MV_update_xbss (key, value, cont) | MT_map _ -> MV_update_xomm (key, value, cont) | MT_big_map _ -> MV_update_xobmbm (key, value, cont) | _ -> failwith "run_inst_i : MI_update : unexpected") |> gen_custom_cc inst in update_top_3_bmstack ~f:(fun _ -> [ updated_cont ]) ss |> (match (typ_of_val cont).cc_v with | MT_set _ -> Fun.id | MT_map _ -> add_sigma_constraint_of_map_update ~ctx ~map:cont ~key ~value ~updated_map:updated_cont | MT_big_map _ -> add_sigma_constraint_of_map_update ~ctx ~map:cont ~key ~value ~updated_map:updated_cont | _ -> failwith "run_inst_i : MI_update : unexpected") |> running_ss_to_sr ctxt_sr | MI_if (i1, i2) -> let cond_value : mich_v cc = List.hd_exn (get_bmstack ss) in let tb_cond_constraint : mich_f = MF_is_true (cond_value |> gen_mich_v_ctx ~ctx) in let then_br_sr : se_result = update_top_1_bmstack ~f:(fun _ -> []) ss |> add_constraints ~c:[ tb_cond_constraint ] |> (fun ssss -> run_inst_i i1 (ctxt_sr, ssss)) in let ctxt_sr = ctxt_sr_update ctxt_sr then_br_sr in let else_br_sr : se_result = let ctx = [ ctxt_sr.sr_sid_counter ] in increase sid_counter since takes new let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in let eb_cond_constraint : mich_f = MF_not (MF_is_true (gen_mich_v_ctx ~ctx cond_value)) in let else_br_base_ss = sym_state_symbol_context_swap ~ctx ss in let else_br_ss = else_br_base_ss |> update_top_1_bmstack ~f:(fun _ -> []) |> add_constraints ~c:[ eb_cond_constraint ] in run_inst_i i2 (ctxt_sr, else_br_ss) in se_result_pointwise_union then_br_sr else_br_sr | MI_loop i -> let (outer_cutcat, inner_cutcat) = (MCC_ln_loop, MCC_lb_loop) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this LOOP - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside LOOP instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = tb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_entry_si; ss_constraints = tb_entry_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct LOOP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = eb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in ( { si_mich = michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) | MI_loop_left i -> let (outer_cutcat, inner_cutcat) = (MCC_ln_loopleft, MCC_lb_loopleft) in let (blocked_mci, thenbr_mci, elsebr_mci) = ( { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = inner_cutcat }, { mci_loc = inst.cc_loc; mci_cutcat = outer_cutcat } ) in let branch_v = get_bmstack_1 ss in let branch_t : mich_t cc = typ_of_val branch_v in let (left_elem_t, right_elem_t) : mich_t cc * mich_t cc = match branch_t.cc_v with | MT_or (t1, t2) -> (t1, t2) | _ -> failwith "run_inst_i : MI_loop_left : left_elem_t, right_elem_t" in Convert LOOP_LEFT to LOOP LOOP_LEFT { BODY } = = = PUSH bool True ; LOOP { IF_LEFT { BODY ; PUSH bool True } { RIGHT left_elem_t ; PUSH bool False } } ; IF_LEFT { PUSH bool False ; FAILWITH } { } LOOP_LEFT {BODY} === PUSH bool True; LOOP { IF_LEFT {BODY; PUSH bool True} {RIGHT left_elem_t; PUSH bool False} }; IF_LEFT {PUSH bool False; FAILWITH} { } *) let _ = ignore (blocked_mci, thenbr_mci, elsebr_mci, right_elem_t) in let gcc_inst : mich_i -> mich_i cc = gen_custom_cc inst in let gcc_inst_t : mich_t -> mich_t cc = gen_custom_cc inst in let gcc_inst_v : mich_v -> mich_v cc = gen_custom_cc inst in let typ_bool : mich_t cc = gcc_inst_t MT_bool in let push_bool_inst b = gcc_inst (MI_push (typ_bool, gcc_inst_v (MV_lit_bool b))) in let loop_inst : mich_i cc = gcc_inst (let body_true_seq : mich_i cc = gcc_inst (MI_seq (i, push_bool_inst true)) in let right_false_seq : mich_i cc = gcc_inst (MI_seq (gcc_inst (MI_right left_elem_t), push_bool_inst false)) in let if_left_inst : mich_i cc = gcc_inst (MI_if_left (body_true_seq, right_false_seq)) in MI_loop if_left_inst ) in let last_if_left_inst : mich_i cc = gcc_inst (MI_if_left ( gcc_inst (MI_seq (push_bool_inst false, gcc_inst MI_failwith)), gcc_inst (MI_drop (Bigint.of_int 0)) ) ) in let new_inst = gcc_inst (MI_seq ( push_bool_inst true, gcc_inst (MI_seq (loop_inst, last_if_left_inst)) ) ) in run_inst_i new_inst (ctxt_sr, ss) ( * 1 . Construct blocked - state let blocked_state : sym_state = { ss with ss_block_mci = blocked_mci } in 2 . If this LOOP_LEFT - instruction is the instruction already met before , return only blocked - state . if MciSet.mem ctxt_sr.sr_entered_loops blocked_mci then { ctxt_sr with sr_blocked = SSet.singleton blocked_state } else ( 2 . + . update ctxt_sr - add entered - loop let ctxt_sr : se_result = { ctxt_sr with sr_entered_loops = MciSet.add ctxt_sr.sr_entered_loops blocked_mci; } in 3 . run - instruction inside LOOP_LEFT instruction let tb_result : se_result = let tb_ss_id = [ ctxt_sr.sr_sid_counter ] in 3.1 . construct entry sym - state let tb_entry_ss : sym_state = let tb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (tb_entry_si, tb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = tb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:tb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:tb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:tb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = MV_symbol (left_elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in ( { si_mich = elem_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = tb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ elem_ct @ constraints_abp ) in { ss_id = tb_ss_id; ss_start_mci = thenbr_mci; ss_block_mci = thenbr_mci; ss_start_si = tb_entry_si; ss_block_si = tb_entry_si; ss_constraints = tb_entry_constraints; } in be aware - between " after tb_symstate construction " and " before run - inst " , update ctxt_sr ( increase sid - counter ) - becuase new sym - state constructed before . update ctxt_sr (increase sid-counter) - becuase new sym-state constructed before. *) let ctxt_sr = ctxt_sr_sid_counter_incr ctxt_sr in 3.2 . run_inst_i recursive call let tb_sr_result_raw : se_result = run_inst_i i (ctxt_sr, tb_entry_ss) in 3.3 . transform running states to blocked states { tb_sr_result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map tb_sr_result_raw.sr_running ~f:(fun rss -> { rss with ss_block_mci = thenbr_mci } ) ) tb_sr_result_raw.sr_blocked; } in 3 . + . update ctxt_sr - override it using tb_result - it is okay to override since tb_result uses previous ctxt_sr in recursive call . - it is okay to override since tb_result uses previous ctxt_sr in recursive call. *) let ctxt_sr : se_result = tb_result in 4 . construct MAP instruction escaping sym - state ( else - branch ) let eb_symstate : sym_state = let eb_ss_id = [ ctxt_sr.sr_sid_counter ] in let eb_trx_image : trx_image = blocked_state.ss_block_si.si_param in let (eb_entry_si, eb_entry_constraints) : sym_image * mich_f list = let bsi = blocked_state.ss_block_si in let ctx = eb_ss_id in let ccmaker = gen_custom_cc inst in let (michst, michct) = generate_symstack ~f:(fun x -> MSC_mich_stack x) ~ctx ~ccmaker (List.tl_exn bsi.si_mich) in let (dipst, dipct) = generate_symstack ~f:(fun x -> MSC_dip_stack x) ~ctx ~ccmaker bsi.si_dip in let (mapentryst, mapentryct) = generate_symstack ~f:(fun x -> MSC_map_entry_stack x) ~ctx ~ccmaker bsi.si_map_entry in let (mapexitst, mapexitct) = generate_symstack ~f:(fun x -> MSC_map_exit_stack x) ~ctx ~ccmaker bsi.si_map_exit in let (mapkeyst, mapkeyct) = generate_symstack ~f:(fun x -> MSC_map_mapkey_stack x) ~ctx ~ccmaker bsi.si_map_mapkey in let (iterst, iterct) = generate_symstack ~f:(fun x -> MSC_iter_stack x) ~ctx ~ccmaker bsi.si_iter in let balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_balance) |> gen_custom_cc inst in let bc_balance_v : mich_v cc = MV_symbol (MT_mutez |> gen_custom_cc inst, MSC_bc_balance) |> gen_custom_cc inst in let constraints_abp = ge_balance_amount_in_non_trx_entry_constraint ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v :: michv_typ_constraints ~ctx ~v:eb_trx_image.ti_param @ [ mtz_comes_from_constraint ~ctx ~mtz_v:eb_trx_image.ti_amount ~from_v:balance_v; ] @ amount_balance_mutez_constraints ~ctx ~amount_v:eb_trx_image.ti_amount ~balance_v ~bc_balance_v in let elem_v = MV_symbol (right_elem_t, MSC_mich_stack (List.length michst)) |> gen_custom_cc inst in let elem_ct = michv_typ_constraints ~ctx ~v:elem_v in ( { si_mich = elem_v :: michst; si_dip = dipst; si_map_entry = mapentryst; si_map_exit = mapexitst; si_map_mapkey = mapkeyst; si_iter = iterst; si_balance = balance_v; si_bc_balance = bc_balance_v; si_param = eb_trx_image; }, michct @ dipct @ mapentryct @ mapexitct @ mapkeyct @ iterct @ elem_ct @ constraints_abp ) in { ss_id = eb_ss_id; ss_start_mci = elsebr_mci; ss_block_mci = elsebr_mci; ss_start_si = eb_entry_si; ss_block_si = eb_entry_si; ss_constraints = eb_entry_constraints; } in 4 . + . update ctxt_sr - increase sid - counter - becuase new sym - state constructed before . - becuase new sym-state constructed before. *) let ctxt_sr : se_result = ctxt_sr_sid_counter_incr ctxt_sr in { remember - current ctxt_sr contains tb_result in " 3 . + . " ctxt_sr with sr_running = SSet.singleton eb_symstate; sr_blocked = SSet.add ctxt_sr.sr_blocked blocked_state; } ) *) | MI_lambda (t1, t2, i) -> push_bmstack ~v:(MV_lit_lambda (t1, t2, i) |> gen_custom_cc inst) ss |> running_ss_to_sr ctxt_sr | MI_exec -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let v = MV_exec (h1, h2) |> gen_custom_cc inst in ([ v ], michv_typ_constraints ~ctx ~v)) ss |> running_ss_to_sr ctxt_sr | MI_dip_n (zn, i) -> let n : int = Bigint.to_int_exn zn in let dipped_ss = let (dip_elems, new_mich) = List.split_n ss.ss_block_si.si_mich n in let new_dip = List.rev_append dip_elems ss.ss_block_si.si_dip in { ss with ss_block_si = { ss.ss_block_si with si_mich = new_mich; si_dip = new_dip }; } in let sr_i = run_inst_i i (ctxt_sr, dipped_ss) in let undip d_ss = let (mich_elems, new_dip) = List.split_n d_ss.ss_block_si.si_dip n in let new_mich = List.rev_append mich_elems d_ss.ss_block_si.si_mich in { d_ss with ss_block_si = { d_ss.ss_block_si with si_mich = new_mich; si_dip = new_dip }; } in { sr_i with sr_running = SSet.map sr_i.sr_running ~f:undip } | MI_failwith -> 1 . set block_mci 2 . enroll this sym_state to sr_terminated 2. enroll this sym_state to sr_terminated *) let bmci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_trx_exit } in { se_result_empty with sr_terminated = SSet.singleton { ss with ss_block_mci = bmci }; } | MI_cast t -> update_top_1_bmstack ~f:(fun x -> if equal_mich_t (typ_of_val x).cc_v t.cc_v then [ x ] else SeError "Not Supported Cast" |> raise) ss |> running_ss_to_sr ctxt_sr | MI_rename -> update_top_1_bmstack ~f:(fun x -> [ { x with cc_anl = inst.cc_anl } ]) ss |> running_ss_to_sr ctxt_sr | MI_concat -> ( let h = get_bmstack_1 ss in match (typ_of_val h).cc_v with | MT_string -> update_top_2_bmstack ~f:(fun (h1, h2) -> [ MV_concat_sss (h1, h2) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_bytes -> update_top_2_bmstack ~f:(fun (h1, h2) -> [ MV_concat_bbb (h1, h2) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_list { cc_v = MT_string; _ } -> update_top_1_bmstack ~f:(fun _ -> [ MV_concat_list_s h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MT_list { cc_v = MT_bytes; _ } -> update_top_1_bmstack ~f:(fun _ -> [ MV_concat_list_b h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | _ -> failwith "run_inst_i : MI_concat : unexpected" ) | MI_slice -> update_top_3_bmstack ~f:(fun (h1, h2, h3) -> match (typ_of_val h3).cc_v with | MT_string -> [ MV_slice_nnso (h1, h2, h3) |> gen_custom_cc inst ] | MT_bytes -> [ MV_slice_nnbo (h1, h2, h3) |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_slice : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_pack -> update_top_1_bmstack ~f:(fun h -> [ MV_pack h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_unpack t -> update_top_1_bmstack_and_constraint ~f:(fun h -> let mvcc = MV_unpack (t, h) |> gen_custom_cc inst in ([ mvcc ], michv_typ_constraints ~ctx ~v:mvcc)) ss |> running_ss_to_sr ctxt_sr | MI_add -> ( let add_gen_sr : mich_v * mich_f list -> se_result = fun (mv, csl) -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], csl)) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_int) -> add_gen_sr (MV_add_nii (h, h2), []) | (MT_int, MT_nat) -> add_gen_sr (MV_add_ini (h, h2), []) | (MT_int, MT_int) -> add_gen_sr (MV_add_iii (h, h2), []) | (MT_nat, MT_nat) -> add_gen_sr ( MV_add_nnn (h, h2), [ MF_nat_bound (MV_add_nnn (h, h2) |> gen_custom_cc inst |> gen_mich_v_ctx ~ctx); ] ) | (MT_timestamp, MT_int) -> add_gen_sr (MV_add_tit (h, h2), []) | (MT_int, MT_timestamp) -> add_gen_sr (MV_add_itt (h, h2), []) | (MT_mutez, MT_mutez) -> let nv = MV_add_mmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_add_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_add : unexpected" ) | MI_sub -> ( let sub_gen_sr : mich_v -> se_result = fun mv -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], [])) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> MV_sub_nni (h, h2) |> sub_gen_sr | (MT_nat, MT_int) -> MV_sub_nii (h, h2) |> sub_gen_sr | (MT_int, MT_nat) -> MV_sub_ini (h, h2) |> sub_gen_sr | (MT_int, MT_int) -> MV_sub_iii (h, h2) |> sub_gen_sr | (MT_timestamp, MT_timestamp) -> MV_sub_tti (h, h2) |> sub_gen_sr | (MT_timestamp, MT_int) -> MV_sub_tit (h, h2) |> sub_gen_sr | (MT_mutez, MT_mutez) -> let nv = MV_sub_mmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_sub_no_underflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_sub : unexpected" ) | MI_mul -> ( let mul_gen_sr : mich_v * mich_f list -> se_result = fun (mv, csl) -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], csl)) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> mul_gen_sr ( MV_mul_nnn (h, h2), [ MF_mutez_bound (MV_mul_nnn (h, h2) |> gen_custom_cc inst |> gen_mich_v_ctx ~ctx); ] ) | (MT_nat, MT_int) -> mul_gen_sr (MV_mul_nii (h, h2), []) | (MT_int, MT_nat) -> mul_gen_sr (MV_mul_ini (h, h2), []) | (MT_int, MT_int) -> mul_gen_sr (MV_mul_iii (h, h2), []) | (MT_mutez, MT_nat) -> let nv = MV_mul_mnm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_mul_mnm_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | (MT_nat, MT_mutez) -> let nv = MV_mul_nmm (h, h2) |> gen_custom_cc inst in let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_mutez_mul_nmm_no_overflow; }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ nv ], [ MF_mutez_bound (nv |> gen_mich_v_ctx ~ctx) ])) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | _ -> failwith "run_inst_i : MI_mul : unexpected" ) | MI_ediv -> ( let ediv_gen_sr : mich_v -> se_result = fun mv -> update_top_2_bmstack_and_constraint ~f:(fun _ -> ([ mv |> gen_custom_cc inst ], [])) ss |> running_ss_to_sr ctxt_sr in let (h, h2) = get_bmstack_2 ss in match ((typ_of_val h).cc_v, (typ_of_val h2).cc_v) with | (MT_nat, MT_nat) -> MV_ediv_nnnn (h, h2) |> ediv_gen_sr | (MT_nat, MT_int) -> MV_ediv_niin (h, h2) |> ediv_gen_sr | (MT_int, MT_nat) -> MV_ediv_inin (h, h2) |> ediv_gen_sr | (MT_int, MT_int) -> MV_ediv_iiin (h, h2) |> ediv_gen_sr | (MT_mutez, MT_nat) -> MV_ediv_mnmm (h, h2) |> ediv_gen_sr | (MT_mutez, MT_mutez) -> MV_ediv_mmnm (h, h2) |> ediv_gen_sr | _ -> failwith "run_inst_i : MI_ediv : unexpected" ) | MI_abs -> update_top_1_bmstack_and_constraint ~f:(fun h -> let mvcc = MV_abs_in h |> gen_custom_cc inst in ([ mvcc ], [ MF_nat_bound { ctx_i = ctx; ctx_v = mvcc } ])) ss |> running_ss_to_sr ctxt_sr | MI_isnat -> update_top_1_bmstack ~f:(fun x -> [ MV_isnat x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_int -> update_top_1_bmstack ~f:(fun x -> [ MV_int_of_nat x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_neg -> update_top_1_bmstack ~f:(fun h -> match (typ_of_val h).cc_v with | MT_nat -> [ MV_neg_ni h |> gen_custom_cc inst ] | MT_int -> [ MV_neg_ii h |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_neg : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_lsl -> let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_shiftleft_safe }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let nv = MV_shiftL_nnn (h1, h2) |> gen_custom_cc inst in ( [ nv ], [ MF_shiftL_nnn_rhs_in_256 ({ ctx_i = ctx; ctx_v = h1 }, { ctx_i = ctx; ctx_v = h2 }); ] )) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | MI_lsr -> let qstate : sym_state = { ss with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_shiftright_safe }; } in let rstate : sym_state = update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> let nv = MV_shiftR_nnn (h1, h2) |> gen_custom_cc inst in ( [ nv ], [ MF_shiftR_nnn_rhs_in_256 ({ ctx_i = ctx; ctx_v = h1 }, { ctx_i = ctx; ctx_v = h2 }); ] )) ss in { (running_ss_to_sr ctxt_sr rstate) with sr_queries = SSet.singleton qstate; } | MI_or -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_or_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_or_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_or : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_and -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_and_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_and_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | (MT_int, MT_nat) -> let nv = MV_and_inn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_and : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_xor -> update_top_2_bmstack_and_constraint ~f:(fun (h1, h2) -> match ((typ_of_val h1).cc_v, (typ_of_val h2).cc_v) with | (MT_bool, MT_bool) -> ([ MV_xor_bbb (h1, h2) |> gen_custom_cc inst ], []) | (MT_nat, MT_nat) -> let nv = MV_xor_nnn (h1, h2) |> gen_custom_cc inst in ([ nv ], [ MF_nat_bound { ctx_i = ctx; ctx_v = nv } ]) | _ -> failwith "run_inst_i : MI_xor : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_not -> update_top_1_bmstack ~f:(fun h -> match (typ_of_val h).cc_v with | MT_bool -> [ MV_not_bb h |> gen_custom_cc inst ] | MT_nat -> [ MV_not_ni h |> gen_custom_cc inst ] | MT_int -> [ MV_not_ii h |> gen_custom_cc inst ] | _ -> failwith "run_inst_i : MI_not : unexpected") ss |> running_ss_to_sr ctxt_sr | MI_compare -> update_top_2_bmstack ~f:(fun (x, y) -> [ MV_compare (x, y) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_eq -> update_top_1_bmstack ~f:(fun x -> [ MV_eq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_neq -> update_top_1_bmstack ~f:(fun x -> [ MV_neq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_lt -> update_top_1_bmstack ~f:(fun x -> [ MV_lt_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_gt -> update_top_1_bmstack ~f:(fun x -> [ MV_gt_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_le -> update_top_1_bmstack ~f:(fun x -> [ MV_leq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_ge -> update_top_1_bmstack ~f:(fun x -> [ MV_geq_ib (x, MV_lit_int Bigint.zero |> gen_custom_cc inst) |> gen_custom_cc inst; ]) ss |> running_ss_to_sr ctxt_sr | MI_self -> push_bmstack ~v:ss.ss_block_si.si_param.ti_contract ss |> running_ss_to_sr ctxt_sr | MI_contract t -> update_top_1_bmstack ~f:(fun x -> [ MV_contract_of_address (t, x) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_transfer_tokens -> update_top_3_bmstack ~f:(fun (x, y, z) -> [ MV_transfer_tokens (x, y, z) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_set_delegate -> update_top_1_bmstack ~f:(fun x -> [ MV_set_delegate x |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_create_contract (t1, t2, i) -> let (lambda : mich_v cc) = let (t_op_list : mich_t cc) = MT_list (gen_custom_cc inst MT_operation) |> gen_custom_cc inst in let (t_input : mich_t cc) = MT_pair (t1, t2) |> gen_custom_cc inst in let (t_output : mich_t cc) = MT_pair (t_op_list, t2) |> gen_custom_cc inst in MV_lit_lambda (t_input, t_output, i) |> gen_custom_cc inst in let (addr : mich_v cc) = MV_symbol (MT_address |> gen_custom_cc inst, MSC_new_contract) |> gen_custom_cc inst in update_top_3_bmstack ~f:(fun (kh_opt, z, s) -> [ MV_create_contract (t1, t2, lambda, kh_opt, z, s, addr) |> gen_custom_cc inst; addr; ]) ss |> running_ss_to_sr ctxt_sr | MI_implicit_account -> update_top_1_bmstack ~f:(fun h -> [ MV_implicit_account h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_now -> push_bmstack ~v:ss.ss_block_si.si_param.ti_time ss |> running_ss_to_sr ctxt_sr | MI_amount -> let amount_v = ss.ss_block_si.si_param.ti_amount in push_bmstack ss ~v:amount_v |> add_typ_constraints ~ctx ~v:amount_v |> running_ss_to_sr ctxt_sr | MI_balance -> let balance_v = ss.ss_block_si.si_balance in push_bmstack ss ~v:balance_v |> add_typ_constraints ~ctx ~v:balance_v |> running_ss_to_sr ctxt_sr | MI_check_signature -> update_top_3_bmstack ~f:(fun (h1, h2, h3) -> [ MV_check_signature (h1, h2, h3) |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_blake2b -> update_top_1_bmstack ~f:(fun h -> [ MV_blake2b h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_sha256 -> update_top_1_bmstack ~f:(fun h -> [ MV_sha256 h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_sha512 -> update_top_1_bmstack ~f:(fun h -> [ MV_sha512 h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_hash_key -> update_top_1_bmstack ~f:(fun h -> [ MV_hash_key h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_source -> let source_v = ss.ss_block_si.si_param.ti_source in push_bmstack ss ~v:source_v |> running_ss_to_sr ctxt_sr | MI_sender -> let sender_v = ss.ss_block_si.si_param.ti_sender in push_bmstack ss ~v:sender_v |> running_ss_to_sr ctxt_sr | MI_address -> update_top_1_bmstack ~f:(fun h -> [ MV_address_of_contract h |> gen_custom_cc inst ]) ss |> running_ss_to_sr ctxt_sr | MI_chain_id -> push_bmstack ss ~v:(MV_lit_chain_id "TzChain" |> gen_custom_cc inst) |> running_ss_to_sr ctxt_sr | MI_unpair -> update_top_1_bmstack_and_constraint ~f:(fun x -> let (a_fl, a_v) = MV_car x |> gen_custom_cc inst |> TzUtil.opt_mvcc ~ctx in let (d_fl, d_v) = MV_cdr x |> gen_custom_cc inst |> TzUtil.opt_mvcc ~ctx in ([ a_v; d_v ], a_fl @ d_fl)) ss |> running_ss_to_sr ctxt_sr | MI_micse_check i -> dealing with micse - check - bring running states from the result of " i " , and convert them to queries . - se_result might be go wrong when any loop - like instructions ( LOOP , LOOP_LEFT , ITER , MAP ) are inserted in micse - check instruction . - bring running states from the result of "i", and convert them to queries. - se_result might be go wrong when any loop-like instructions (LOOP, LOOP_LEFT, ITER, MAP) are inserted in micse-check instruction. *) let micse_check_se_result : se_result = run_inst_i i (ctxt_sr, ss) in let ctxt_sr = ctxt_sr_update ctxt_sr micse_check_se_result in { ctxt_sr with sr_running = SSet.singleton ss; If MI_micse_check allows loop - like instructions , it should be considered to add blocked - states in micse_check_se_result in return value . considered to add blocked-states in micse_check_se_result in return value. *) sr_blocked = SSet.empty; sr_queries = SSet.map micse_check_se_result.sr_running ~f:(fun rs -> { rs with ss_block_mci = { mci_loc = inst.cc_loc; mci_cutcat = MCC_query Q_assertion }; } ); sr_terminated = SSet.empty; } | _ -> failwith ("run_inst_i : wildcard match triggered : " ^ (sexp_of_mich_i inst.cc_v |> SexpUtil.tz_cc_sexp_form |> Sexp.to_string) ) let run_inst_entry : Tz.mich_t Tz.cc * Tz.mich_t Tz.cc * Tz.mich_i Tz.cc -> se_result * Tz.sym_state = let open Tz in let open TzUtil in fun (pt, st, c) -> let final_blocking : sym_state -> sym_state = fun ss -> let ctx = ss.ss_id in let (op_mtz_fl, op_mtz_v) : mich_f list * mich_v cc = MV_car (List.hd_exn ss.ss_block_si.si_mich) |> gen_custom_cc c |> TzUtil.opt_mvcc ~ctx |> fun (fl, mvcc) -> let ((lst_fl : mich_f list), (lst : mich_v cc list), (tl : mich_v cc)) = v_of_list ~ctx mvcc in let ((mtz_fl : mich_f list), (mtz_vl : mich_v cc option list)) = List.fold lst ~init:([], []) ~f:(fun (mtz_fl, mtz_vl) opv -> let ((fl : mich_f list), (v_opt : mich_v cc option)) = mtz_of_op ~ctx opv in (fl @ mtz_fl, v_opt :: mtz_vl) ) in let (mtz_v : mich_v cc) = List.fold mtz_vl ~init:(MV_mtz_of_op_list tl |> gen_custom_cc c) ~f:(fun acc vopt -> if Option.is_some vopt then MV_add_mmm (acc, Option.value_exn vopt) |> gen_custom_cc acc else acc) in (lst_fl @ mtz_fl @ fl, mtz_v) in let new_balance : mich_v cc = MV_sub_mmm (ss.ss_block_si.si_balance, op_mtz_v) |> gen_custom_cc c in let new_bc_balance : mich_v cc = MV_add_mmm (ss.ss_block_si.si_bc_balance, op_mtz_v) |> gen_custom_cc c in { ss with ss_block_mci = { mci_loc = c.cc_loc; mci_cutcat = MCC_trx_exit }; ss_block_si = { ss.ss_block_si with si_balance = new_balance; si_bc_balance = new_bc_balance; }; ss_constraints = MF_and op_mtz_fl :: mtz_comes_from_constraint ~ctx ~mtz_v:op_mtz_v ~from_v:ss.ss_block_si.si_balance :: amount_balance_mutez_constraints ~ctx ~amount_v:op_mtz_v ~balance_v:new_balance ~bc_balance_v:new_bc_balance @ ss.ss_constraints; } in let (initial_sr, initial_ss) = run_inst_initial_se_result (pt, st, c) in let result_raw = run_inst c initial_sr in print_endline ("result_raw running = " ^ (SSet.length result_raw.sr_running |> string_of_int) ^ ", blocked = " ^ (SSet.length result_raw.sr_blocked |> string_of_int) ) in *) let result = { result_raw with sr_running = SSet.empty; sr_blocked = SSet.union (SSet.map result_raw.sr_running ~f:final_blocking) result_raw.sr_blocked; } in let ss_constraint_optimization : sym_state -> sym_state = fun ss -> { ss with ss_constraints = ss.ss_constraints |> List.map ~f:opt_mf |> List.stable_dedup; } in let result_constraint_optimized = { result with sr_blocked = SSet.map result.sr_blocked ~f:ss_constraint_optimization; sr_queries = SSet.map result.sr_queries ~f:ss_constraint_optimization |> SSet.filter ~f:(fun ss -> if Option.is_none !Utils.Argument.query_pick then true else ( let ((picked_lin : int), (picked_col : int)) = Option.value_exn !Utils.Argument.query_pick in match ss.ss_block_mci.mci_loc with | CCLOC_Pos (p1, _) when p1.lin = picked_lin && p1.col = picked_col -> true | _ -> false ) ); } in let increase_depth_str : int -> string -> string = fun d s -> let (tab : string) = String.make d '\t' in tab ^ String.substr_replace_all s ~pattern:"\n" ~with_:("\n" ^ tab) in let string_of_mf : mich_f -> string = (fun mf -> sexp_of_mich_f mf |> SexpUtil.to_string) in let string_of_mflst : mich_f list -> string list = (fun mfl -> List.map mfl ~f:string_of_mf) in let string_of_mvcc : mich_v cc -> string = (fun mvcc -> sexp_of_mich_v mvcc.cc_v |> SexpUtil.to_string) in let string_of_mvcclst : mich_v cc list -> string list = (fun mvl -> List.map mvl ~f:string_of_mvcc) in let string_of_sid : sym_state_id -> string = fun sid -> Printf.sprintf "[%s]" (List.map sid ~f:string_of_int |> String.concat ~sep:"; ") in let string_of_mci : mich_cut_info -> string = (fun mci -> sexp_of_mich_cut_info mci |> SexpUtil.to_string) in let string_of_si : sym_image -> string = fun si -> Printf.sprintf "> MICH:\n\t[\n%s\n\t]" (string_of_mvcclst si.si_mich |> String.concat ~sep:" ;\n" |> increase_depth_str 2 ) in let string_of_ss : sym_state -> string = fun ss -> Printf.sprintf "> ID: %s\n\n> START: \n\t> MCI: %s\n\t> SI: \n\t\t[\n%s\n\t\t]\n\n> BLOCK: \n\t> MCI: %s\n\t> SI: \n\t\t[\n%s\n\t\t]\n\n> CONSTRAINT: \n\t[\n%s\n\t]" (string_of_sid ss.ss_id) (string_of_mci ss.ss_start_mci) (string_of_si ss.ss_start_si |> increase_depth_str 3) (string_of_mci ss.ss_block_mci) (string_of_si ss.ss_block_si |> increase_depth_str 3) (string_of_mflst ss.ss_constraints |> String.concat ~sep:" ;\n" |> increase_depth_str 2 ) in SSet.fold result_constraint_optimized.sr_blocked ~init:0 ~f:(fun id ss -> Utils.Log.debug (fun m -> m "BLOCK_STATE [#%d]\n%s" id (string_of_ss ss) ); id + 1 ) |> ignore; SSet.fold result_constraint_optimized.sr_queries ~init:0 ~f:(fun id ss -> Utils.Log.debug (fun m -> m "QUERY_STATE [#%d]\n%s" id (string_of_ss ss) ); id + 1 ) |> ignore; exit 0 in *) (result_constraint_optimized, initial_ss)
2b530aecd823b53f3759d141ba94c3c2d2f49df75bff259e3fa8859eb6455b4e
LaurentMazare/btc-ocaml
hash.ml
open Core.Std let hex_of_char c = if Char.('0' <= c && c <= '9') then Char.to_int c - Char.to_int '0' else if Char.('a' <= c && c <= 'f') then 10 + Char.to_int c - Char.to_int 'a' else failwithf "char %c is not hex" c () let char_of_hex i = if 0 <= i && i < 10 then Char.of_int_exn (Char.to_int '0' + i) else if 10 <= i && i < 16 then Char.of_int_exn (Char.to_int 'a' + i - 10) else failwithf "not in hex range %d" i () let of_hex str = let len = String.length str in if len % 2 = 0 then String.init (len / 2) ~f:(fun i -> Char.of_int_exn (16 * hex_of_char str.[len - 2*i - 2] + hex_of_char str.[len - 2*i - 1])) else failwith "Input string size is odd" let to_hex str = let len = String.length str in String.init (2*len) ~f:(fun i -> let c = str.[len-1 - i / 2] |> Char.to_int in if i % 2 = 0 then char_of_hex (c / 16) else char_of_hex (c % 16) ) let difficulty t = let rec loop acc index = if index < 0 then acc else let c = String.get t index |> Char.to_int in loop (256. *. acc +. float c) (index - 1) in 1.1579e77 ~ 2 ^ 256 1.1579e77 /. loop 0. (String.length t - 1) include String let sexp_of_t t = Sexp.Atom (to_hex t) let t_of_sexp = function | Sexp.Atom s -> of_hex s | _ -> failwith "Atom expected" let consume iobuf = Iobuf.Consume.string iobuf ~len:32 let fill iobuf str = Iobuf.Fill.tail_padded_fixed_string iobuf str ~len:32 ~padding:'\000' let zero = String.of_char_list (List.init 32 ~f:(fun _ -> '\000'))
null
https://raw.githubusercontent.com/LaurentMazare/btc-ocaml/0616d7853d807e50dc9ff83c6783b80f71640da2/hash.ml
ocaml
open Core.Std let hex_of_char c = if Char.('0' <= c && c <= '9') then Char.to_int c - Char.to_int '0' else if Char.('a' <= c && c <= 'f') then 10 + Char.to_int c - Char.to_int 'a' else failwithf "char %c is not hex" c () let char_of_hex i = if 0 <= i && i < 10 then Char.of_int_exn (Char.to_int '0' + i) else if 10 <= i && i < 16 then Char.of_int_exn (Char.to_int 'a' + i - 10) else failwithf "not in hex range %d" i () let of_hex str = let len = String.length str in if len % 2 = 0 then String.init (len / 2) ~f:(fun i -> Char.of_int_exn (16 * hex_of_char str.[len - 2*i - 2] + hex_of_char str.[len - 2*i - 1])) else failwith "Input string size is odd" let to_hex str = let len = String.length str in String.init (2*len) ~f:(fun i -> let c = str.[len-1 - i / 2] |> Char.to_int in if i % 2 = 0 then char_of_hex (c / 16) else char_of_hex (c % 16) ) let difficulty t = let rec loop acc index = if index < 0 then acc else let c = String.get t index |> Char.to_int in loop (256. *. acc +. float c) (index - 1) in 1.1579e77 ~ 2 ^ 256 1.1579e77 /. loop 0. (String.length t - 1) include String let sexp_of_t t = Sexp.Atom (to_hex t) let t_of_sexp = function | Sexp.Atom s -> of_hex s | _ -> failwith "Atom expected" let consume iobuf = Iobuf.Consume.string iobuf ~len:32 let fill iobuf str = Iobuf.Fill.tail_padded_fixed_string iobuf str ~len:32 ~padding:'\000' let zero = String.of_char_list (List.init 32 ~f:(fun _ -> '\000'))
0273b430baa5feffc16fbe560b2a6e0520c3ec3df411de149cff3543735bedd1
elaforge/karya
Constants.hs
Copyright 2013 -- This program is distributed under the terms of the GNU General Public -- License 3.0, see COPYING or -3.0.txt module Perform.Lilypond.Constants where import qualified Data.Map as Map import qualified Data.Text as Text import qualified Util.Seq as Seq import qualified Derive.Env as Env import qualified Derive.ScoreT as ScoreT import qualified Derive.ShowVal as ShowVal import qualified Derive.Typecheck as Typecheck import qualified Instrument.Common as Common import qualified Instrument.Inst as Inst import qualified Instrument.InstT as InstT import Global import Types -- * ly-global instrument -- | This is a pseudo-instrument used to mark notes which are actually global lilypond directives . E.g. , changes , page breaks , movement titles . ly_global :: ScoreT.Instrument ly_global = ScoreT.Instrument "ly-global" ly_qualified :: InstT.Qualified ly_qualified = InstT.Qualified "ly" "global" ly_synth :: code -> Inst.SynthDecl code ly_synth code = Inst.SynthDecl "ly" "Fake synth for fake lilypond instrument." [ ("global" , Inst.Inst (Inst.Dummy dummy_doc) (Common.doc #= doc $ Common.common code) ) ] where dummy_doc = "fake instrument for lilypond directives" doc = "The lilypond deriver will automatically allocate `>ly-global`, and\ \ instruments with global lilypond directives will get this instrument." -- * code fragments -- | A free-standing code fragment is merged in with its nearest data FreeCodePosition = FreePrepend | FreeAppend deriving (Eq, Ord, Show, Enum, Bounded) instance Typecheck.Typecheck FreeCodePosition instance Typecheck.ToVal FreeCodePosition instance ShowVal.ShowVal FreeCodePosition where show_val FreePrepend = "prepend" show_val FreeAppend = "append" -- | A code fragment that has to be attached to notes. data CodePosition = CodePosition Attach Position Distribution deriving (Eq, Ord, Show) -- | Chord goes before or after the whole chord, Note goes before or after the -- individual pitch within the chord. data Attach = Chord | Note deriving (Eq, Ord, Show) data Position = Prepend | Append deriving (Eq, Ord, Show) all_positions :: [CodePosition] all_positions = [ CodePosition a p d | a <- [Chord, Note], p <- [Prepend, Append], d <- [First, Last, All] ] -- | If the note is split into multiple tied notes, which ones should get the -- code? data Distribution = First | Last | All deriving (Eq, Ord, Show) instance Pretty FreeCodePosition where pretty = showt instance Pretty CodePosition where pretty = showt position_key :: CodePosition -> Env.Key position_key (CodePosition attach pos distribution) = Text.intercalate "-" $ "ly" : [ case attach of Chord -> "chord" Note -> "note" , case pos of Prepend -> "prepend" Append -> "append" , case distribution of First -> "first" Last -> "last" All -> "all" ] key_position :: Env.Key -> Maybe CodePosition key_position k = Map.lookup k m where m = Map.fromList $ Seq.key_on position_key all_positions environ_code :: Env.Environ -> [(CodePosition, Text)] environ_code env = [ (code, val) | (Just code, Just val) <- map (bimap key_position Typecheck.from_val_simple) (Env.to_list env) ] with_code :: CodePosition -> Text -> Env.Environ -> Env.Environ with_code pos code env = Env.insert_val key (old <> code) env where old = fromMaybe "" $ Env.maybe_val key env key = position_key pos free_code_key :: FreeCodePosition -> Env.Key free_code_key FreePrepend = "ly-prepend" free_code_key FreeAppend = "ly-append" key_free_code :: Env.Key -> Maybe FreeCodePosition key_free_code "ly-prepend" = Just FreePrepend key_free_code "ly-append" = Just FreeAppend key_free_code _ = Nothing environ_free_code :: Env.Environ -> [(FreeCodePosition, Text)] environ_free_code env = [ (code, val) | (Just code, Just val) <- map (bimap key_free_code Typecheck.from_val_simple) (Env.to_list env) ] with_free_code :: FreeCodePosition -> Text -> Env.Environ -> Env.Environ with_free_code pos code = Env.insert_val (free_code_key pos) code -- ** other env keys -- | String: append after the pitch, and before the duration. This is for -- pitch modifiers like reminder accidentals (!) and cautionary accidentals ( ? ) . TODO this is n't integrated with ' CodePosition ' , but maybe could be . -- Would Prepend make any sense? v_append_pitch :: Env.Key v_append_pitch = "ly-append-pitch" | String : \"^\ " or \"_\ " , manually sets tie direction , if this note is -- tied. v_tie_direction :: Env.Key v_tie_direction = "ly-tie-direction" -- * tuplet -- | Set the env vars that signals that the lilypond converter should make -- the following notes into a tuplet. set_tuplet :: RealTime -- ^ score_dur is the visible duration in the score -> RealTime -- ^ real_dur is the duration it actually consumes, so 3 quarters into 1 whole will be 3/4 . -> Env.Environ set_tuplet score_dur real_dur = Env.from_list [ ("ly-tuplet-score-dur", Typecheck.to_val score_dur) , ("ly-tuplet-real-dur", Typecheck.to_val real_dur) ] get_tuplet :: Env.Environ -> Maybe (RealTime, RealTime) get_tuplet env = (,) <$> get "ly-tuplet-score-dur" <*> get "ly-tuplet-real-dur" where get k = Env.maybe_val k env -- * tremolo -- | This marks a tremolo event, which triggers special treatment for -- coincident notes. v_tremolo :: Env.Key v_tremolo = "ly-tremolo" -- * ly-global | String : should be parseable by , -- e.g. @\'3/4\'@. Used only on @>ly-global@ events. v_meter :: Env.Key v_meter = "ly-meter" -- | String: this has the same format as 'v_meter', but it affects the rhythmic -- spelling for the instrument. v_subdivision :: Env.Key v_subdivision = "ly-subdivision" -- | String: Gives the title of a new movement. An event with 'ly_global' -- instrument and this env val will cause a movement break. v_movement :: Env.Key v_movement = "ly-movement" -- * common code -- | Emit Ped___^___/ style pedal markings. mixed_pedal_style :: Text mixed_pedal_style = "\\set Staff.pedalSustainStyle = #'mixed"
null
https://raw.githubusercontent.com/elaforge/karya/89d1651424c35e564138d93424a157ff87457245/Perform/Lilypond/Constants.hs
haskell
This program is distributed under the terms of the GNU General Public License 3.0, see COPYING or -3.0.txt * ly-global instrument | This is a pseudo-instrument used to mark notes which are actually global * code fragments | A free-standing code fragment is merged in with its nearest | A code fragment that has to be attached to notes. | Chord goes before or after the whole chord, Note goes before or after the individual pitch within the chord. | If the note is split into multiple tied notes, which ones should get the code? ** other env keys | String: append after the pitch, and before the duration. This is for pitch modifiers like reminder accidentals (!) and cautionary accidentals Would Prepend make any sense? tied. * tuplet | Set the env vars that signals that the lilypond converter should make the following notes into a tuplet. ^ score_dur is the visible duration in the score ^ real_dur is the duration it actually consumes, so * tremolo | This marks a tremolo event, which triggers special treatment for coincident notes. * ly-global e.g. @\'3/4\'@. Used only on @>ly-global@ events. | String: this has the same format as 'v_meter', but it affects the rhythmic spelling for the instrument. | String: Gives the title of a new movement. An event with 'ly_global' instrument and this env val will cause a movement break. * common code | Emit Ped___^___/ style pedal markings.
Copyright 2013 module Perform.Lilypond.Constants where import qualified Data.Map as Map import qualified Data.Text as Text import qualified Util.Seq as Seq import qualified Derive.Env as Env import qualified Derive.ScoreT as ScoreT import qualified Derive.ShowVal as ShowVal import qualified Derive.Typecheck as Typecheck import qualified Instrument.Common as Common import qualified Instrument.Inst as Inst import qualified Instrument.InstT as InstT import Global import Types lilypond directives . E.g. , changes , page breaks , movement titles . ly_global :: ScoreT.Instrument ly_global = ScoreT.Instrument "ly-global" ly_qualified :: InstT.Qualified ly_qualified = InstT.Qualified "ly" "global" ly_synth :: code -> Inst.SynthDecl code ly_synth code = Inst.SynthDecl "ly" "Fake synth for fake lilypond instrument." [ ("global" , Inst.Inst (Inst.Dummy dummy_doc) (Common.doc #= doc $ Common.common code) ) ] where dummy_doc = "fake instrument for lilypond directives" doc = "The lilypond deriver will automatically allocate `>ly-global`, and\ \ instruments with global lilypond directives will get this instrument." data FreeCodePosition = FreePrepend | FreeAppend deriving (Eq, Ord, Show, Enum, Bounded) instance Typecheck.Typecheck FreeCodePosition instance Typecheck.ToVal FreeCodePosition instance ShowVal.ShowVal FreeCodePosition where show_val FreePrepend = "prepend" show_val FreeAppend = "append" data CodePosition = CodePosition Attach Position Distribution deriving (Eq, Ord, Show) data Attach = Chord | Note deriving (Eq, Ord, Show) data Position = Prepend | Append deriving (Eq, Ord, Show) all_positions :: [CodePosition] all_positions = [ CodePosition a p d | a <- [Chord, Note], p <- [Prepend, Append], d <- [First, Last, All] ] data Distribution = First | Last | All deriving (Eq, Ord, Show) instance Pretty FreeCodePosition where pretty = showt instance Pretty CodePosition where pretty = showt position_key :: CodePosition -> Env.Key position_key (CodePosition attach pos distribution) = Text.intercalate "-" $ "ly" : [ case attach of Chord -> "chord" Note -> "note" , case pos of Prepend -> "prepend" Append -> "append" , case distribution of First -> "first" Last -> "last" All -> "all" ] key_position :: Env.Key -> Maybe CodePosition key_position k = Map.lookup k m where m = Map.fromList $ Seq.key_on position_key all_positions environ_code :: Env.Environ -> [(CodePosition, Text)] environ_code env = [ (code, val) | (Just code, Just val) <- map (bimap key_position Typecheck.from_val_simple) (Env.to_list env) ] with_code :: CodePosition -> Text -> Env.Environ -> Env.Environ with_code pos code env = Env.insert_val key (old <> code) env where old = fromMaybe "" $ Env.maybe_val key env key = position_key pos free_code_key :: FreeCodePosition -> Env.Key free_code_key FreePrepend = "ly-prepend" free_code_key FreeAppend = "ly-append" key_free_code :: Env.Key -> Maybe FreeCodePosition key_free_code "ly-prepend" = Just FreePrepend key_free_code "ly-append" = Just FreeAppend key_free_code _ = Nothing environ_free_code :: Env.Environ -> [(FreeCodePosition, Text)] environ_free_code env = [ (code, val) | (Just code, Just val) <- map (bimap key_free_code Typecheck.from_val_simple) (Env.to_list env) ] with_free_code :: FreeCodePosition -> Text -> Env.Environ -> Env.Environ with_free_code pos code = Env.insert_val (free_code_key pos) code ( ? ) . TODO this is n't integrated with ' CodePosition ' , but maybe could be . v_append_pitch :: Env.Key v_append_pitch = "ly-append-pitch" | String : \"^\ " or \"_\ " , manually sets tie direction , if this note is v_tie_direction :: Env.Key v_tie_direction = "ly-tie-direction" 3 quarters into 1 whole will be 3/4 . -> Env.Environ set_tuplet score_dur real_dur = Env.from_list [ ("ly-tuplet-score-dur", Typecheck.to_val score_dur) , ("ly-tuplet-real-dur", Typecheck.to_val real_dur) ] get_tuplet :: Env.Environ -> Maybe (RealTime, RealTime) get_tuplet env = (,) <$> get "ly-tuplet-score-dur" <*> get "ly-tuplet-real-dur" where get k = Env.maybe_val k env v_tremolo :: Env.Key v_tremolo = "ly-tremolo" | String : should be parseable by , v_meter :: Env.Key v_meter = "ly-meter" v_subdivision :: Env.Key v_subdivision = "ly-subdivision" v_movement :: Env.Key v_movement = "ly-movement" mixed_pedal_style :: Text mixed_pedal_style = "\\set Staff.pedalSustainStyle = #'mixed"
ecaad3e13471f5c735ea441cc60e609de447d78d5c4d28864696fa7776bcfb5d
replikativ/tablehike
build.clj
(ns build (:refer-clojure :exclude [test]) (:require [borkdude.gh-release-artifact :as gh] [clojure.tools.build.api :as b] [deps-deploy.deps-deploy :as dd] [clojure.string :as str]) (:import (clojure.lang ExceptionInfo))) (def lib 'io.replikativ/tablehike) (def version (format "0.1.%s" (b/git-count-revs nil))) (def current-commit (gh/current-commit)) (def class-dir "target/classes") (def basis (b/create-basis {:project "deps.edn"})) (def jar-file (format "target/%s-%s.jar" (name lib) version)) (defn clean [_] (b/delete {:path "target"})) (defn sha [{:keys [dir path] :or {dir "."}}] (-> {:command-args (cond-> ["git" "rev-parse" "HEAD"] path (conj "--" path)) :dir (.getPath (b/resolve-path dir)) :out :capture} b/process :out str/trim)) (defn jar [_] (b/write-pom {:class-dir class-dir :src-pom "./template/pom.xml" :lib lib :version version :basis basis :src-dirs ["src"] :scm {:tag (sha nil)}}) (b/copy-dir {:src-dirs ["src"] :target-dir class-dir}) (b/jar {:class-dir class-dir :jar-file jar-file})) (defn install [_] (clean nil) (jar nil) (b/install {:basis (b/create-basis {}) :lib lib :version version :jar-file jar-file :class-dir class-dir})) (defn release [_] (-> (try (gh/overwrite-asset {:org "replikativ" :repo (name lib) :tag version :commit current-commit :file jar-file :content-type "application/java-archive"}) (catch ExceptionInfo e (assoc (ex-data e) :failure? true))) :url println)) (defn deploy "Don't forget to set CLOJARS_USERNAME and CLOJARS_PASSWORD env vars." [_] (dd/deploy {:installer :remote :artifact jar-file :pom-file (b/pom-path {:lib lib :class-dir class-dir})}))
null
https://raw.githubusercontent.com/replikativ/tablehike/cd945e8c120a55056efc120d6bddb44b40c72b70/build.clj
clojure
(ns build (:refer-clojure :exclude [test]) (:require [borkdude.gh-release-artifact :as gh] [clojure.tools.build.api :as b] [deps-deploy.deps-deploy :as dd] [clojure.string :as str]) (:import (clojure.lang ExceptionInfo))) (def lib 'io.replikativ/tablehike) (def version (format "0.1.%s" (b/git-count-revs nil))) (def current-commit (gh/current-commit)) (def class-dir "target/classes") (def basis (b/create-basis {:project "deps.edn"})) (def jar-file (format "target/%s-%s.jar" (name lib) version)) (defn clean [_] (b/delete {:path "target"})) (defn sha [{:keys [dir path] :or {dir "."}}] (-> {:command-args (cond-> ["git" "rev-parse" "HEAD"] path (conj "--" path)) :dir (.getPath (b/resolve-path dir)) :out :capture} b/process :out str/trim)) (defn jar [_] (b/write-pom {:class-dir class-dir :src-pom "./template/pom.xml" :lib lib :version version :basis basis :src-dirs ["src"] :scm {:tag (sha nil)}}) (b/copy-dir {:src-dirs ["src"] :target-dir class-dir}) (b/jar {:class-dir class-dir :jar-file jar-file})) (defn install [_] (clean nil) (jar nil) (b/install {:basis (b/create-basis {}) :lib lib :version version :jar-file jar-file :class-dir class-dir})) (defn release [_] (-> (try (gh/overwrite-asset {:org "replikativ" :repo (name lib) :tag version :commit current-commit :file jar-file :content-type "application/java-archive"}) (catch ExceptionInfo e (assoc (ex-data e) :failure? true))) :url println)) (defn deploy "Don't forget to set CLOJARS_USERNAME and CLOJARS_PASSWORD env vars." [_] (dd/deploy {:installer :remote :artifact jar-file :pom-file (b/pom-path {:lib lib :class-dir class-dir})}))
e55bd58f77de5d139aa347055f3021425d4509d11850c8641d0ac9be81310c91
jacekschae/learn-datomic-course-files
db.clj
(ns cheffy.recipe.db (:require [datomic.client.api :as d]) (:import (java.util UUID))) (def recipe-pattern [:recipe/recipe-id :recipe/prep-time :recipe/display-name :recipe/image-url :recipe/public? :recipe/favorite-count {:recipe/owner [:account/account-id :account/display-name]} {:recipe/steps [:step/step-id :step/description :step/sort-order]} {:recipe/ingredients [:ingredient/ingredient-id :ingredient/display-name :ingredient/amount :ingredient/measure :ingredient/sort-order]}]) (defn find-all-recipes [{:keys [db]} {:keys [account-id]}] (let [public (mapv first (d/q '[:find (pull ?e pattern) :in $ pattern :where [?e :recipe/public? true]] db recipe-pattern))] (if account-id (let [drafts (mapv first (d/q '[:find (pull ?e pattern) :in $ ?account-id pattern :where [?owner :account/account-id ?account-id] [?e :recipe/owner ?owner] [?e :recipe/public? false]] db account-id recipe-pattern))] {:drafts drafts :public public}) {:public public}))) (comment (find-all-recipes {:db (d/db (:conn user/datomic))} {:account-id "auth0|5fbf7db6271d5e0076903601"}) ; public (mapv first (let [db (d/db (:conn user/datomic))] (d/q '[:find (pull ?e pattern) :in $ pattern :where [?e :recipe/public? true]] db recipe-pattern))) ; drafts (mapv first (let [db (d/db (:conn user/datomic)) account-id "auth0|5fbf7db6271d5e0076903601"] (d/q '[:find (pull ?e pattern) :in $ ?account-id pattern :where [?owner :account/account-id ?account-id] [?e :recipe/owner ?owner] [?e :recipe/public? false]] db account-id recipe-pattern))) ) (defn transact-recipe [{:keys [conn]} {:keys [recipe-id account-id name public prep-time img]}] (d/transact conn {:tx-data [{:recipe/recipe-id recipe-id :recipe/display-name name :recipe/public? (or public false) :recipe/prep-time prep-time :recipe/image-url img :recipe/owner [:account/account-id account-id]}]})) (defn find-recipe-by-id [{:keys [db]} {:keys [recipe-id]}] (ffirst (d/q '[:find (pull ?e pattern) :in $ ?recipe-id pattern :where [?e :recipe/recipe-id ?recipe-id]] db recipe-id recipe-pattern))) (comment (ffirst (let [db (d/db (:conn user/datomic))] (d/q '[:find (pull ?e pattern) :in $ ?recipe-id pattern :where [?e :recipe/recipe-id ?recipe-id]] db #uuid"a1995316-80ea-4a98-939d-7c6295e4bb46" recipe-pattern))) ) (defn retract-recipe [{:keys [conn]} {:keys [recipe-id]}] (d/transact conn {:tx-data [[:db/retractEntity [:recipe/recipe-id recipe-id]]]})) (defn transact-step [{:keys [conn]} {:keys [recipe-id step-id description sort]}] (d/transact conn {:tx-data [{:recipe/recipe-id (UUID/fromString recipe-id) :recipe/steps [{:step/step-id (UUID/fromString step-id) :step/description description :step/sort-order sort}]}]})) (defn retract-step [{:keys [conn]} {:keys [step-id]}] (d/transact conn {:tx-data [[:db/retractEntity [:step/step-id (UUID/fromString step-id)]]]})) (defn transact-ingredient []) (defn retract-ingredient []) (defn favorite-recipe []) (defn unfavorite-recipe [])
null
https://raw.githubusercontent.com/jacekschae/learn-datomic-course-files/941a0b2492bfaebe38564aed6a426b0ef87ccdca/increments/31-ingredients/src/main/cheffy/recipe/db.clj
clojure
public drafts
(ns cheffy.recipe.db (:require [datomic.client.api :as d]) (:import (java.util UUID))) (def recipe-pattern [:recipe/recipe-id :recipe/prep-time :recipe/display-name :recipe/image-url :recipe/public? :recipe/favorite-count {:recipe/owner [:account/account-id :account/display-name]} {:recipe/steps [:step/step-id :step/description :step/sort-order]} {:recipe/ingredients [:ingredient/ingredient-id :ingredient/display-name :ingredient/amount :ingredient/measure :ingredient/sort-order]}]) (defn find-all-recipes [{:keys [db]} {:keys [account-id]}] (let [public (mapv first (d/q '[:find (pull ?e pattern) :in $ pattern :where [?e :recipe/public? true]] db recipe-pattern))] (if account-id (let [drafts (mapv first (d/q '[:find (pull ?e pattern) :in $ ?account-id pattern :where [?owner :account/account-id ?account-id] [?e :recipe/owner ?owner] [?e :recipe/public? false]] db account-id recipe-pattern))] {:drafts drafts :public public}) {:public public}))) (comment (find-all-recipes {:db (d/db (:conn user/datomic))} {:account-id "auth0|5fbf7db6271d5e0076903601"}) (mapv first (let [db (d/db (:conn user/datomic))] (d/q '[:find (pull ?e pattern) :in $ pattern :where [?e :recipe/public? true]] db recipe-pattern))) (mapv first (let [db (d/db (:conn user/datomic)) account-id "auth0|5fbf7db6271d5e0076903601"] (d/q '[:find (pull ?e pattern) :in $ ?account-id pattern :where [?owner :account/account-id ?account-id] [?e :recipe/owner ?owner] [?e :recipe/public? false]] db account-id recipe-pattern))) ) (defn transact-recipe [{:keys [conn]} {:keys [recipe-id account-id name public prep-time img]}] (d/transact conn {:tx-data [{:recipe/recipe-id recipe-id :recipe/display-name name :recipe/public? (or public false) :recipe/prep-time prep-time :recipe/image-url img :recipe/owner [:account/account-id account-id]}]})) (defn find-recipe-by-id [{:keys [db]} {:keys [recipe-id]}] (ffirst (d/q '[:find (pull ?e pattern) :in $ ?recipe-id pattern :where [?e :recipe/recipe-id ?recipe-id]] db recipe-id recipe-pattern))) (comment (ffirst (let [db (d/db (:conn user/datomic))] (d/q '[:find (pull ?e pattern) :in $ ?recipe-id pattern :where [?e :recipe/recipe-id ?recipe-id]] db #uuid"a1995316-80ea-4a98-939d-7c6295e4bb46" recipe-pattern))) ) (defn retract-recipe [{:keys [conn]} {:keys [recipe-id]}] (d/transact conn {:tx-data [[:db/retractEntity [:recipe/recipe-id recipe-id]]]})) (defn transact-step [{:keys [conn]} {:keys [recipe-id step-id description sort]}] (d/transact conn {:tx-data [{:recipe/recipe-id (UUID/fromString recipe-id) :recipe/steps [{:step/step-id (UUID/fromString step-id) :step/description description :step/sort-order sort}]}]})) (defn retract-step [{:keys [conn]} {:keys [step-id]}] (d/transact conn {:tx-data [[:db/retractEntity [:step/step-id (UUID/fromString step-id)]]]})) (defn transact-ingredient []) (defn retract-ingredient []) (defn favorite-recipe []) (defn unfavorite-recipe [])
a6d6dfbb0f76263bd7c7963c1271a717d61216201811f79bba8deeea5bcbbca6
takikawa/racket-ppa
info.rkt
(module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote (("base" #:version "6.8.0.2") "compatibility-lib" "draw-lib" ("drracket-plugin-lib" #:version "1.1") "errortrace-lib" "html-lib" "images-gui-lib" "images-lib" "net-lib" "pconvert-lib" "plai-lib" "r5rs-lib" "sandbox-lib" "scheme-lib" "scribble-lib" ("simple-tree-text-markup-lib" #:version "1.1") "slideshow-lib" "snip-lib" "srfi-lite-lib" ("string-constants-lib" #:version "1.20") "typed-racket-lib" "typed-racket-more" "web-server-lib" "wxme-lib" ("gui-lib" #:version "1.52") "deinprogramm-signature" "pict-lib"))) (define build-deps (quote ("racket-index" "at-exp-lib" ("rackunit-lib" #:version "1.10")))) (define pkg-desc "implementation (no documentation) part of \"htdp\"") (define pkg-authors (quote (matthias mflatt robby ""))) (define version "1.8") (define license (quote (Apache-2.0 OR MIT)))))
null
https://raw.githubusercontent.com/takikawa/racket-ppa/caff086a1cd48208815cec2a22645a3091c11d4c/share/pkgs/htdp-lib/info.rkt
racket
(module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote (("base" #:version "6.8.0.2") "compatibility-lib" "draw-lib" ("drracket-plugin-lib" #:version "1.1") "errortrace-lib" "html-lib" "images-gui-lib" "images-lib" "net-lib" "pconvert-lib" "plai-lib" "r5rs-lib" "sandbox-lib" "scheme-lib" "scribble-lib" ("simple-tree-text-markup-lib" #:version "1.1") "slideshow-lib" "snip-lib" "srfi-lite-lib" ("string-constants-lib" #:version "1.20") "typed-racket-lib" "typed-racket-more" "web-server-lib" "wxme-lib" ("gui-lib" #:version "1.52") "deinprogramm-signature" "pict-lib"))) (define build-deps (quote ("racket-index" "at-exp-lib" ("rackunit-lib" #:version "1.10")))) (define pkg-desc "implementation (no documentation) part of \"htdp\"") (define pkg-authors (quote (matthias mflatt robby ""))) (define version "1.8") (define license (quote (Apache-2.0 OR MIT)))))
3a8297926aef1e92e2ab8f2190d1226286eb3ac7721e3e67e781d375a6696e92
arttuka/reagent-material-ui
propane_tank_rounded.cljs
(ns reagent-mui.icons.propane-tank-rounded "Imports @mui/icons-material/PropaneTankRounded as a Reagent component." (:require-macros [reagent-mui.util :refer [create-svg-icon e]]) (:require [react :as react] ["@mui/material/SvgIcon" :as SvgIcon] [reagent-mui.util])) (def propane-tank-rounded (create-svg-icon (e "path" #js {"d" "M4 15v3c0 2.21 1.79 4 4 4h8c2.21 0 4-1.79 4-4v-3H4zm16-2v-3c0-1.86-1.28-3.41-3-3.86V4c0-1.1-.9-2-2-2H9c-1.1 0-2 .9-2 2v2.14c-1.72.45-3 2-3 3.86v3h16zM9 4h6v2h-2c0-.55-.45-1-1-1s-1 .45-1 1H9V4z"}) "PropaneTankRounded"))
null
https://raw.githubusercontent.com/arttuka/reagent-material-ui/c7cd0d7c661ab9df5b0aed0213a6653a9a3f28ea/src/icons/reagent_mui/icons/propane_tank_rounded.cljs
clojure
(ns reagent-mui.icons.propane-tank-rounded "Imports @mui/icons-material/PropaneTankRounded as a Reagent component." (:require-macros [reagent-mui.util :refer [create-svg-icon e]]) (:require [react :as react] ["@mui/material/SvgIcon" :as SvgIcon] [reagent-mui.util])) (def propane-tank-rounded (create-svg-icon (e "path" #js {"d" "M4 15v3c0 2.21 1.79 4 4 4h8c2.21 0 4-1.79 4-4v-3H4zm16-2v-3c0-1.86-1.28-3.41-3-3.86V4c0-1.1-.9-2-2-2H9c-1.1 0-2 .9-2 2v2.14c-1.72.45-3 2-3 3.86v3h16zM9 4h6v2h-2c0-.55-.45-1-1-1s-1 .45-1 1H9V4z"}) "PropaneTankRounded"))
9f4b015cb483fdcc1c049ca9a9b6f7497b73089a4adc519536f7b69bc9a68323
input-output-hk/ouroboros-network
Serialisation.hs
# LANGUAGE FlexibleInstances # # LANGUAGE GADTs # {-# LANGUAGE LambdaCase #-} # LANGUAGE MultiParamTypeClasses # # LANGUAGE OverloadedStrings # # LANGUAGE RecordWildCards # {-# LANGUAGE ScopedTypeVariables #-} # OPTIONS_GHC -Wno - orphans # module Ouroboros.Consensus.Byron.Node.Serialisation () where import qualified Codec.CBOR.Decoding as CBOR import qualified Codec.CBOR.Encoding as CBOR import Codec.Serialise (decode, encode) import Control.Monad.Except import qualified Data.ByteString.Lazy as Lazy import qualified Data.ByteString.Short as Short import Cardano.Ledger.Binary (fromByronCBOR, toByronCBOR) import Cardano.Ledger.Binary.Plain import qualified Cardano.Chain.Block as CC import qualified Cardano.Chain.Byron.API as CC import Ouroboros.Network.Block (Serialised (..), unwrapCBORinCBOR, wrapCBORinCBOR) import Ouroboros.Network.SizeInBytes (SizeInBytes (..)) import Ouroboros.Consensus.Block import Ouroboros.Consensus.HeaderValidation import Ouroboros.Consensus.Ledger.SupportsMempool (GenTxId) import Ouroboros.Consensus.Node.Run import Ouroboros.Consensus.Node.Serialisation import Ouroboros.Consensus.Protocol.PBFT.State (PBftState) import Ouroboros.Consensus.Storage.Serialisation import Ouroboros.Consensus.Byron.Ledger import Ouroboros.Consensus.Byron.Ledger.Conversions import Ouroboros.Consensus.Byron.Protocol ------------------------------------------------------------------------------ EncodeDisk & DecodeDisk ------------------------------------------------------------------------------ EncodeDisk & DecodeDisk -------------------------------------------------------------------------------} instance HasBinaryBlockInfo ByronBlock where getBinaryBlockInfo = byronBinaryBlockInfo instance SerialiseDiskConstraints ByronBlock instance EncodeDisk ByronBlock ByronBlock where encodeDisk _ = encodeByronBlock instance DecodeDisk ByronBlock (Lazy.ByteString -> ByronBlock) where decodeDisk ccfg = decodeByronBlock (getByronEpochSlots ccfg) instance EncodeDisk ByronBlock (LedgerState ByronBlock) where encodeDisk _ = encodeByronLedgerState instance DecodeDisk ByronBlock (LedgerState ByronBlock) where decodeDisk _ = decodeByronLedgerState | @'ChainDepState ' ( ' BlockProtocol ' ' ByronBlock')@ instance EncodeDisk ByronBlock (PBftState PBftByronCrypto) where encodeDisk _ = encodeByronChainDepState | @'ChainDepState ' ( ' BlockProtocol ' ' ByronBlock')@ instance DecodeDisk ByronBlock (PBftState PBftByronCrypto) where decodeDisk _ = decodeByronChainDepState instance EncodeDisk ByronBlock (AnnTip ByronBlock) where encodeDisk _ = encodeByronAnnTip instance DecodeDisk ByronBlock (AnnTip ByronBlock) where decodeDisk _ = decodeByronAnnTip ------------------------------------------------------------------------------ SerialiseNodeToNode ------------------------------------------------------------------------------ SerialiseNodeToNode -------------------------------------------------------------------------------} instance SerialiseNodeToNodeConstraints ByronBlock where estimateBlockSize = byronHeaderBlockSizeHint -- | CBOR-in-CBOR for the annotation. This also makes it compatible with the -- wrapped ('Serialised') variant. instance SerialiseNodeToNode ByronBlock ByronBlock where encodeNodeToNode _ _ = wrapCBORinCBOR encodeByronBlock decodeNodeToNode ccfg _ = unwrapCBORinCBOR (decodeByronBlock epochSlots) where epochSlots = getByronEpochSlots ccfg instance SerialiseNodeToNode ByronBlock (Header ByronBlock) where encodeNodeToNode ccfg = \case ByronNodeToNodeVersion1 -> wrapCBORinCBOR $ encodeUnsizedHeader . fst . splitSizeHint ByronNodeToNodeVersion2 -> encodeDisk ccfg . unnest decodeNodeToNode ccfg = \case ByronNodeToNodeVersion1 -> unwrapCBORinCBOR $ (flip joinSizeHint fakeByronBlockSizeHint .) <$> decodeUnsizedHeader epochSlots ByronNodeToNodeVersion2 -> nest <$> decodeDisk ccfg where epochSlots = getByronEpochSlots ccfg -- | 'Serialised' uses CBOR-in-CBOR by default. instance SerialiseNodeToNode ByronBlock (Serialised ByronBlock) -- Default instance instance SerialiseNodeToNode ByronBlock (SerialisedHeader ByronBlock) where encodeNodeToNode ccfg version = case version of -- Drop the context and add the tag, encode that using CBOR-in-CBOR ByronNodeToNodeVersion1 -> encode . Serialised . addV1Envelope . aux . serialisedHeaderToDepPair where aux :: GenDepPair Serialised (f blk) -> (SomeSecond f blk, Lazy.ByteString) aux (GenDepPair ix (Serialised bytes)) = (SomeSecond ix, bytes) ByronNodeToNodeVersion2 -> encodeDisk ccfg decodeNodeToNode ccfg version = case version of ByronNodeToNodeVersion1 -> do bs <- unSerialised <$> decode either fail (return . SerialisedHeaderFromDepPair) $ runExcept $ aux <$> dropV1Envelope bs where aux :: (SomeSecond f blk, Lazy.ByteString) -> GenDepPair Serialised (f blk) aux (SomeSecond ix, bytes) = GenDepPair ix (Serialised bytes) ByronNodeToNodeVersion2 -> decodeDisk ccfg -- | No CBOR-in-CBOR, because we check for canonical encodings, which means we -- can use the recomputed encoding for the annotation. instance SerialiseNodeToNode ByronBlock (GenTx ByronBlock) where encodeNodeToNode _ _ = encodeByronGenTx decodeNodeToNode _ _ = decodeByronGenTx instance SerialiseNodeToNode ByronBlock (GenTxId ByronBlock) where encodeNodeToNode _ _ = encodeByronGenTxId decodeNodeToNode _ _ = decodeByronGenTxId {------------------------------------------------------------------------------- SerialiseNodeToClient -------------------------------------------------------------------------------} instance SerialiseNodeToClientConstraints ByronBlock -- | CBOR-in-CBOR for the annotation. This also makes it compatible with the -- wrapped ('Serialised') variant. instance SerialiseNodeToClient ByronBlock ByronBlock where encodeNodeToClient _ _ = wrapCBORinCBOR encodeByronBlock decodeNodeToClient ccfg _ = unwrapCBORinCBOR (decodeByronBlock epochSlots) where epochSlots = getByronEpochSlots ccfg -- | 'Serialised' uses CBOR-in-CBOR by default. instance SerialiseNodeToClient ByronBlock (Serialised ByronBlock) -- Default instance -- | No CBOR-in-CBOR, because we check for canonical encodings, which means we -- can use the recomputed encoding for the annotation. instance SerialiseNodeToClient ByronBlock (GenTx ByronBlock) where encodeNodeToClient _ _ = encodeByronGenTx decodeNodeToClient _ _ = decodeByronGenTx instance SerialiseNodeToClient ByronBlock (GenTxId ByronBlock) where encodeNodeToClient _ _ = encodeByronGenTxId decodeNodeToClient _ _ = decodeByronGenTxId instance SerialiseNodeToClient ByronBlock SlotNo where encodeNodeToClient _ _ = toByronCBOR decodeNodeToClient _ _ = fromByronCBOR | @'ApplyTxErr ' ' ByronBlock'@ instance SerialiseNodeToClient ByronBlock CC.ApplyMempoolPayloadErr where encodeNodeToClient _ _ = encodeByronApplyTxError decodeNodeToClient _ _ = decodeByronApplyTxError instance SerialiseNodeToClient ByronBlock (SomeSecond BlockQuery ByronBlock) where encodeNodeToClient _ _ (SomeSecond q) = encodeByronQuery q decodeNodeToClient _ _ = decodeByronQuery instance SerialiseResult ByronBlock (BlockQuery ByronBlock) where encodeResult _ _ = encodeByronResult decodeResult _ _ = decodeByronResult {------------------------------------------------------------------------------- Nested contents -------------------------------------------------------------------------------} instance ReconstructNestedCtxt Header ByronBlock where reconstructPrefixLen _ = PrefixLen 2 reconstructNestedCtxt _proxy prefix size = The first byte is @encodeListLen 2@ , the second ( index 1 ) is 0 for EBB , 1 for regular block case Short.index prefix 1 of 0 -> SomeSecond $ NestedCtxt (CtxtByronBoundary size) 1 -> SomeSecond $ NestedCtxt (CtxtByronRegular size) _ -> error $ "invalid ByronBlock with prefix: " <> show prefix instance EncodeDiskDepIx (NestedCtxt Header) ByronBlock where encodeDiskDepIx _ccfg (SomeSecond (NestedCtxt ctxt)) = mconcat [ CBOR.encodeListLen 2 , case ctxt of CtxtByronBoundary size -> mconcat [ CBOR.encodeWord8 0 , CBOR.encodeWord32 (getSizeInBytes size) ] CtxtByronRegular size -> mconcat [ CBOR.encodeWord8 1 , CBOR.encodeWord32 (getSizeInBytes size) ] ] instance EncodeDiskDep (NestedCtxt Header) ByronBlock where encodeDiskDep _ccfg (NestedCtxt ctxt) h = case ctxt of CtxtByronRegular _size -> encodeByronRegularHeader h CtxtByronBoundary _size -> We do n't encode the ' SlotNo ' -- This is important, because this encoder/decoder must be compatible with the raw bytes as stored on disk as part of a block . encodeByronBoundaryHeader (snd h) instance DecodeDiskDepIx (NestedCtxt Header) ByronBlock where decodeDiskDepIx _ccfg = do enforceSize "decodeDiskDepIx ByronBlock" 2 CBOR.decodeWord8 >>= \case 0 -> SomeSecond . NestedCtxt . CtxtByronBoundary . SizeInBytes <$> CBOR.decodeWord32 1 -> SomeSecond . NestedCtxt . CtxtByronRegular . SizeInBytes <$> CBOR.decodeWord32 t -> cborError $ DecoderErrorUnknownTag "decodeDiskDepIx ByronBlock" t instance DecodeDiskDep (NestedCtxt Header) ByronBlock where decodeDiskDep ByronCodecConfig{..} (NestedCtxt ctxt) = case ctxt of CtxtByronRegular _size -> decodeByronRegularHeader getByronEpochSlots CtxtByronBoundary _size -> auxBoundary <$> decodeByronBoundaryHeader where auxBoundary :: (Lazy.ByteString -> RawBoundaryHeader) -> (Lazy.ByteString -> (SlotNo, RawBoundaryHeader)) auxBoundary f bs = (slotNo, hdr) where hdr :: RawBoundaryHeader hdr = f bs slotNo :: SlotNo slotNo = fromByronSlotNo $ CC.boundaryBlockSlot getByronEpochSlots (CC.boundaryEpoch hdr)
null
https://raw.githubusercontent.com/input-output-hk/ouroboros-network/17889be3e1b6d9b5ee86022b91729837051e6fbb/ouroboros-consensus-byron/src/Ouroboros/Consensus/Byron/Node/Serialisation.hs
haskell
# LANGUAGE LambdaCase # # LANGUAGE ScopedTypeVariables # ---------------------------------------------------------------------------- ---------------------------------------------------------------------------- -----------------------------------------------------------------------------} ---------------------------------------------------------------------------- ---------------------------------------------------------------------------- -----------------------------------------------------------------------------} | CBOR-in-CBOR for the annotation. This also makes it compatible with the wrapped ('Serialised') variant. | 'Serialised' uses CBOR-in-CBOR by default. Default instance Drop the context and add the tag, encode that using CBOR-in-CBOR | No CBOR-in-CBOR, because we check for canonical encodings, which means we can use the recomputed encoding for the annotation. ------------------------------------------------------------------------------ SerialiseNodeToClient ------------------------------------------------------------------------------ | CBOR-in-CBOR for the annotation. This also makes it compatible with the wrapped ('Serialised') variant. | 'Serialised' uses CBOR-in-CBOR by default. Default instance | No CBOR-in-CBOR, because we check for canonical encodings, which means we can use the recomputed encoding for the annotation. ------------------------------------------------------------------------------ Nested contents ------------------------------------------------------------------------------ This is important, because this encoder/decoder must be compatible
# LANGUAGE FlexibleInstances # # LANGUAGE GADTs # # LANGUAGE MultiParamTypeClasses # # LANGUAGE OverloadedStrings # # LANGUAGE RecordWildCards # # OPTIONS_GHC -Wno - orphans # module Ouroboros.Consensus.Byron.Node.Serialisation () where import qualified Codec.CBOR.Decoding as CBOR import qualified Codec.CBOR.Encoding as CBOR import Codec.Serialise (decode, encode) import Control.Monad.Except import qualified Data.ByteString.Lazy as Lazy import qualified Data.ByteString.Short as Short import Cardano.Ledger.Binary (fromByronCBOR, toByronCBOR) import Cardano.Ledger.Binary.Plain import qualified Cardano.Chain.Block as CC import qualified Cardano.Chain.Byron.API as CC import Ouroboros.Network.Block (Serialised (..), unwrapCBORinCBOR, wrapCBORinCBOR) import Ouroboros.Network.SizeInBytes (SizeInBytes (..)) import Ouroboros.Consensus.Block import Ouroboros.Consensus.HeaderValidation import Ouroboros.Consensus.Ledger.SupportsMempool (GenTxId) import Ouroboros.Consensus.Node.Run import Ouroboros.Consensus.Node.Serialisation import Ouroboros.Consensus.Protocol.PBFT.State (PBftState) import Ouroboros.Consensus.Storage.Serialisation import Ouroboros.Consensus.Byron.Ledger import Ouroboros.Consensus.Byron.Ledger.Conversions import Ouroboros.Consensus.Byron.Protocol EncodeDisk & DecodeDisk EncodeDisk & DecodeDisk instance HasBinaryBlockInfo ByronBlock where getBinaryBlockInfo = byronBinaryBlockInfo instance SerialiseDiskConstraints ByronBlock instance EncodeDisk ByronBlock ByronBlock where encodeDisk _ = encodeByronBlock instance DecodeDisk ByronBlock (Lazy.ByteString -> ByronBlock) where decodeDisk ccfg = decodeByronBlock (getByronEpochSlots ccfg) instance EncodeDisk ByronBlock (LedgerState ByronBlock) where encodeDisk _ = encodeByronLedgerState instance DecodeDisk ByronBlock (LedgerState ByronBlock) where decodeDisk _ = decodeByronLedgerState | @'ChainDepState ' ( ' BlockProtocol ' ' ByronBlock')@ instance EncodeDisk ByronBlock (PBftState PBftByronCrypto) where encodeDisk _ = encodeByronChainDepState | @'ChainDepState ' ( ' BlockProtocol ' ' ByronBlock')@ instance DecodeDisk ByronBlock (PBftState PBftByronCrypto) where decodeDisk _ = decodeByronChainDepState instance EncodeDisk ByronBlock (AnnTip ByronBlock) where encodeDisk _ = encodeByronAnnTip instance DecodeDisk ByronBlock (AnnTip ByronBlock) where decodeDisk _ = decodeByronAnnTip SerialiseNodeToNode SerialiseNodeToNode instance SerialiseNodeToNodeConstraints ByronBlock where estimateBlockSize = byronHeaderBlockSizeHint instance SerialiseNodeToNode ByronBlock ByronBlock where encodeNodeToNode _ _ = wrapCBORinCBOR encodeByronBlock decodeNodeToNode ccfg _ = unwrapCBORinCBOR (decodeByronBlock epochSlots) where epochSlots = getByronEpochSlots ccfg instance SerialiseNodeToNode ByronBlock (Header ByronBlock) where encodeNodeToNode ccfg = \case ByronNodeToNodeVersion1 -> wrapCBORinCBOR $ encodeUnsizedHeader . fst . splitSizeHint ByronNodeToNodeVersion2 -> encodeDisk ccfg . unnest decodeNodeToNode ccfg = \case ByronNodeToNodeVersion1 -> unwrapCBORinCBOR $ (flip joinSizeHint fakeByronBlockSizeHint .) <$> decodeUnsizedHeader epochSlots ByronNodeToNodeVersion2 -> nest <$> decodeDisk ccfg where epochSlots = getByronEpochSlots ccfg instance SerialiseNodeToNode ByronBlock (Serialised ByronBlock) instance SerialiseNodeToNode ByronBlock (SerialisedHeader ByronBlock) where encodeNodeToNode ccfg version = case version of ByronNodeToNodeVersion1 -> encode . Serialised . addV1Envelope . aux . serialisedHeaderToDepPair where aux :: GenDepPair Serialised (f blk) -> (SomeSecond f blk, Lazy.ByteString) aux (GenDepPair ix (Serialised bytes)) = (SomeSecond ix, bytes) ByronNodeToNodeVersion2 -> encodeDisk ccfg decodeNodeToNode ccfg version = case version of ByronNodeToNodeVersion1 -> do bs <- unSerialised <$> decode either fail (return . SerialisedHeaderFromDepPair) $ runExcept $ aux <$> dropV1Envelope bs where aux :: (SomeSecond f blk, Lazy.ByteString) -> GenDepPair Serialised (f blk) aux (SomeSecond ix, bytes) = GenDepPair ix (Serialised bytes) ByronNodeToNodeVersion2 -> decodeDisk ccfg instance SerialiseNodeToNode ByronBlock (GenTx ByronBlock) where encodeNodeToNode _ _ = encodeByronGenTx decodeNodeToNode _ _ = decodeByronGenTx instance SerialiseNodeToNode ByronBlock (GenTxId ByronBlock) where encodeNodeToNode _ _ = encodeByronGenTxId decodeNodeToNode _ _ = decodeByronGenTxId instance SerialiseNodeToClientConstraints ByronBlock instance SerialiseNodeToClient ByronBlock ByronBlock where encodeNodeToClient _ _ = wrapCBORinCBOR encodeByronBlock decodeNodeToClient ccfg _ = unwrapCBORinCBOR (decodeByronBlock epochSlots) where epochSlots = getByronEpochSlots ccfg instance SerialiseNodeToClient ByronBlock (Serialised ByronBlock) instance SerialiseNodeToClient ByronBlock (GenTx ByronBlock) where encodeNodeToClient _ _ = encodeByronGenTx decodeNodeToClient _ _ = decodeByronGenTx instance SerialiseNodeToClient ByronBlock (GenTxId ByronBlock) where encodeNodeToClient _ _ = encodeByronGenTxId decodeNodeToClient _ _ = decodeByronGenTxId instance SerialiseNodeToClient ByronBlock SlotNo where encodeNodeToClient _ _ = toByronCBOR decodeNodeToClient _ _ = fromByronCBOR | @'ApplyTxErr ' ' ByronBlock'@ instance SerialiseNodeToClient ByronBlock CC.ApplyMempoolPayloadErr where encodeNodeToClient _ _ = encodeByronApplyTxError decodeNodeToClient _ _ = decodeByronApplyTxError instance SerialiseNodeToClient ByronBlock (SomeSecond BlockQuery ByronBlock) where encodeNodeToClient _ _ (SomeSecond q) = encodeByronQuery q decodeNodeToClient _ _ = decodeByronQuery instance SerialiseResult ByronBlock (BlockQuery ByronBlock) where encodeResult _ _ = encodeByronResult decodeResult _ _ = decodeByronResult instance ReconstructNestedCtxt Header ByronBlock where reconstructPrefixLen _ = PrefixLen 2 reconstructNestedCtxt _proxy prefix size = The first byte is @encodeListLen 2@ , the second ( index 1 ) is 0 for EBB , 1 for regular block case Short.index prefix 1 of 0 -> SomeSecond $ NestedCtxt (CtxtByronBoundary size) 1 -> SomeSecond $ NestedCtxt (CtxtByronRegular size) _ -> error $ "invalid ByronBlock with prefix: " <> show prefix instance EncodeDiskDepIx (NestedCtxt Header) ByronBlock where encodeDiskDepIx _ccfg (SomeSecond (NestedCtxt ctxt)) = mconcat [ CBOR.encodeListLen 2 , case ctxt of CtxtByronBoundary size -> mconcat [ CBOR.encodeWord8 0 , CBOR.encodeWord32 (getSizeInBytes size) ] CtxtByronRegular size -> mconcat [ CBOR.encodeWord8 1 , CBOR.encodeWord32 (getSizeInBytes size) ] ] instance EncodeDiskDep (NestedCtxt Header) ByronBlock where encodeDiskDep _ccfg (NestedCtxt ctxt) h = case ctxt of CtxtByronRegular _size -> encodeByronRegularHeader h CtxtByronBoundary _size -> We do n't encode the ' SlotNo ' with the raw bytes as stored on disk as part of a block . encodeByronBoundaryHeader (snd h) instance DecodeDiskDepIx (NestedCtxt Header) ByronBlock where decodeDiskDepIx _ccfg = do enforceSize "decodeDiskDepIx ByronBlock" 2 CBOR.decodeWord8 >>= \case 0 -> SomeSecond . NestedCtxt . CtxtByronBoundary . SizeInBytes <$> CBOR.decodeWord32 1 -> SomeSecond . NestedCtxt . CtxtByronRegular . SizeInBytes <$> CBOR.decodeWord32 t -> cborError $ DecoderErrorUnknownTag "decodeDiskDepIx ByronBlock" t instance DecodeDiskDep (NestedCtxt Header) ByronBlock where decodeDiskDep ByronCodecConfig{..} (NestedCtxt ctxt) = case ctxt of CtxtByronRegular _size -> decodeByronRegularHeader getByronEpochSlots CtxtByronBoundary _size -> auxBoundary <$> decodeByronBoundaryHeader where auxBoundary :: (Lazy.ByteString -> RawBoundaryHeader) -> (Lazy.ByteString -> (SlotNo, RawBoundaryHeader)) auxBoundary f bs = (slotNo, hdr) where hdr :: RawBoundaryHeader hdr = f bs slotNo :: SlotNo slotNo = fromByronSlotNo $ CC.boundaryBlockSlot getByronEpochSlots (CC.boundaryEpoch hdr)
bcb768886ec3c4a7d7b82f809288f52cfda59794c425f28cd220c0a591b47fad
matsen/pplacer
test_json.ml
open Ppatteries open OUnit open Test_util let suite = List.map (fun fname -> let name = Filename.basename fname in name >:: match String.sub name 0 4 with | "pass" -> fun () -> let parsed = Json.of_file fname in let roundtrip = Json.of_string (Json.to_string parsed) in json_equal parsed roundtrip | "fail" -> fun () -> "parsing didn't fail" @? begin try let _ = Json.of_file fname in false with | Sparse.Parse_error _ -> true end | _ -> failwith (Printf.sprintf "unexpected json file %s" fname) ) (get_dir_contents ~pred:(flip MaybeZipped.check_suffix "jtest") (tests_dir ^ "data/json") |> List.of_enum)
null
https://raw.githubusercontent.com/matsen/pplacer/f40a363e962cca7131f1f2d372262e0081ff1190/tests/json/test_json.ml
ocaml
open Ppatteries open OUnit open Test_util let suite = List.map (fun fname -> let name = Filename.basename fname in name >:: match String.sub name 0 4 with | "pass" -> fun () -> let parsed = Json.of_file fname in let roundtrip = Json.of_string (Json.to_string parsed) in json_equal parsed roundtrip | "fail" -> fun () -> "parsing didn't fail" @? begin try let _ = Json.of_file fname in false with | Sparse.Parse_error _ -> true end | _ -> failwith (Printf.sprintf "unexpected json file %s" fname) ) (get_dir_contents ~pred:(flip MaybeZipped.check_suffix "jtest") (tests_dir ^ "data/json") |> List.of_enum)
6543a9e8bc6bde9157941492fd40fff7124c7e515b615e57b31878abec380904
pikatchu/LinearML
estOptim.ml
Copyright ( c ) 2011 , All rights reserved . Redistribution and use in source and binary forms , with or without modification , are permitted provided that the following conditions are met : 1 . Redistributions of source code must retain the above copyright notice , this list of conditions and the following disclaimer . 2 . Redistributions in binary form must reproduce the above copyright notice , this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution . 3 . Neither the name of nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . Copyright (c) 2011, Julien Verlaguet All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Julien Verlaguet nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *) open Utils open Est module BlockOccs = struct let add t x = let n = try IMap.find x t with Not_found -> 0 in let n = n+1 in IMap.add x n t let rec def df = let t = List.fold_left block IMap.empty df.df_body in add t ((List.hd df.df_body).bl_id) and block t bl = let t = List.fold_left equation t bl.bl_eqs in let t = ret t bl.bl_ret in t and equation t (_, e) = expr t e and ret t = function | Lreturn _ -> assert false | Return _ -> t | Jump lbl -> add t lbl | If (_, lbl1, lbl2) -> let t = add t lbl1 in let t = add t lbl2 in t | Match (_, al) -> List.fold_left ( fun t (_, l) -> add t l ) t al and expr t = function | Eif (_, lbl1, lbl2) -> let t = add t lbl1 in let t = add t lbl2 in t | Ecall lbl -> add t lbl | _ -> t end module Redirect = struct let add_block t bl = if bl.bl_eqs = [] && bl.bl_phi = [] then match bl.bl_ret with | Jump lbl -> IMap.add bl.bl_id lbl t | _ -> t else t let get x t = try IMap.find x t with Not_found -> x let rec def df = let t = List.fold_left add_block IMap.empty df.df_body in let body = List.map (block t) df.df_body in { df with df_body = body } and block t bl = let ret = return t bl.bl_ret in { bl with bl_phi = List.map (phi t) bl.bl_phi ; bl_ret = ret ; } and phi t (x, ty, l) = x, ty, List.map (fun (x, lbl) -> x, get lbl t) l and return t = function | Jump lbl -> Jump (get lbl t) | If (x, lbl1, lbl2) -> If (x, get lbl1 t, get lbl2 t) | Match (e, al) -> Match (e, List.map (action t) al) | x -> x and action t (p, lbl) = p, get lbl t end module InlineBlocks = struct let get_occur x t = try IMap.find x t with Not_found - > 0 let add_block acc bl = IMap.add bl.bl_id bl acc let rec def df = let t = BlockOccs.def df in let bls = List.fold_left add_block IMap.empty df.df_body in let body = ( block bls t ) df.df_body [ ] in { df with df_body = body } and block bls t bl acc = let eqs = equation bls t bl.bl_eqs in { bl with bl_eqs = eqs } : : and equation bls t = function | [ ] - > [ ] | [ Jump lbl ] when get_occur lbl t = 1 - > ( IMap.find lbl bls).bl_eqs | x : : rl - > x : : equation bls t rl end let get_occur x t = try IMap.find x t with Not_found -> 0 let add_block acc bl = IMap.add bl.bl_id bl acc let rec def df = let t = BlockOccs.def df in let bls = List.fold_left add_block IMap.empty df.df_body in let body = List.fold_right (block bls t) df.df_body [] in { df with df_body = body } and block bls t bl acc = let eqs = equation bls t bl.bl_eqs in { bl with bl_eqs = eqs } :: acc and equation bls t = function | [] -> [] | [Jump lbl] when get_occur lbl t = 1 -> (IMap.find lbl bls).bl_eqs | x :: rl -> x :: equation bls t rl end *) module Remove = struct let get_occur x t = try IMap.find x t with Not_found - > 0 let rec def df = let t = BlockOccs.def df in let body = ( block t ) df.df_body [ ] in { df with df_body = body } and block t bl acc = if get_occur bl.bl_id t = 0 then acc else bl : : acc end let get_occur x t = try IMap.find x t with Not_found -> 0 let rec def df = let t = BlockOccs.def df in let body = List.fold_right (block t) df.df_body [] in { df with df_body = body } and block t bl acc = if get_occur bl.bl_id t = 0 then acc else bl :: acc end *) let rec def df = let df = Redirect.def df in (* let df = InlineBlocks.def df in let df = Remove.def df in *) df
null
https://raw.githubusercontent.com/pikatchu/LinearML/76da04134b9eb3a9ca4252e9cb41d412b50a072a/compiler/estOptim.ml
ocaml
let df = InlineBlocks.def df in let df = Remove.def df in
Copyright ( c ) 2011 , All rights reserved . Redistribution and use in source and binary forms , with or without modification , are permitted provided that the following conditions are met : 1 . Redistributions of source code must retain the above copyright notice , this list of conditions and the following disclaimer . 2 . Redistributions in binary form must reproduce the above copyright notice , this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution . 3 . Neither the name of nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . Copyright (c) 2011, Julien Verlaguet All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Julien Verlaguet nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *) open Utils open Est module BlockOccs = struct let add t x = let n = try IMap.find x t with Not_found -> 0 in let n = n+1 in IMap.add x n t let rec def df = let t = List.fold_left block IMap.empty df.df_body in add t ((List.hd df.df_body).bl_id) and block t bl = let t = List.fold_left equation t bl.bl_eqs in let t = ret t bl.bl_ret in t and equation t (_, e) = expr t e and ret t = function | Lreturn _ -> assert false | Return _ -> t | Jump lbl -> add t lbl | If (_, lbl1, lbl2) -> let t = add t lbl1 in let t = add t lbl2 in t | Match (_, al) -> List.fold_left ( fun t (_, l) -> add t l ) t al and expr t = function | Eif (_, lbl1, lbl2) -> let t = add t lbl1 in let t = add t lbl2 in t | Ecall lbl -> add t lbl | _ -> t end module Redirect = struct let add_block t bl = if bl.bl_eqs = [] && bl.bl_phi = [] then match bl.bl_ret with | Jump lbl -> IMap.add bl.bl_id lbl t | _ -> t else t let get x t = try IMap.find x t with Not_found -> x let rec def df = let t = List.fold_left add_block IMap.empty df.df_body in let body = List.map (block t) df.df_body in { df with df_body = body } and block t bl = let ret = return t bl.bl_ret in { bl with bl_phi = List.map (phi t) bl.bl_phi ; bl_ret = ret ; } and phi t (x, ty, l) = x, ty, List.map (fun (x, lbl) -> x, get lbl t) l and return t = function | Jump lbl -> Jump (get lbl t) | If (x, lbl1, lbl2) -> If (x, get lbl1 t, get lbl2 t) | Match (e, al) -> Match (e, List.map (action t) al) | x -> x and action t (p, lbl) = p, get lbl t end module InlineBlocks = struct let get_occur x t = try IMap.find x t with Not_found - > 0 let add_block acc bl = IMap.add bl.bl_id bl acc let rec def df = let t = BlockOccs.def df in let bls = List.fold_left add_block IMap.empty df.df_body in let body = ( block bls t ) df.df_body [ ] in { df with df_body = body } and block bls t bl acc = let eqs = equation bls t bl.bl_eqs in { bl with bl_eqs = eqs } : : and equation bls t = function | [ ] - > [ ] | [ Jump lbl ] when get_occur lbl t = 1 - > ( IMap.find lbl bls).bl_eqs | x : : rl - > x : : equation bls t rl end let get_occur x t = try IMap.find x t with Not_found -> 0 let add_block acc bl = IMap.add bl.bl_id bl acc let rec def df = let t = BlockOccs.def df in let bls = List.fold_left add_block IMap.empty df.df_body in let body = List.fold_right (block bls t) df.df_body [] in { df with df_body = body } and block bls t bl acc = let eqs = equation bls t bl.bl_eqs in { bl with bl_eqs = eqs } :: acc and equation bls t = function | [] -> [] | [Jump lbl] when get_occur lbl t = 1 -> (IMap.find lbl bls).bl_eqs | x :: rl -> x :: equation bls t rl end *) module Remove = struct let get_occur x t = try IMap.find x t with Not_found - > 0 let rec def df = let t = BlockOccs.def df in let body = ( block t ) df.df_body [ ] in { df with df_body = body } and block t bl acc = if get_occur bl.bl_id t = 0 then acc else bl : : acc end let get_occur x t = try IMap.find x t with Not_found -> 0 let rec def df = let t = BlockOccs.def df in let body = List.fold_right (block t) df.df_body [] in { df with df_body = body } and block t bl acc = if get_occur bl.bl_id t = 0 then acc else bl :: acc end *) let rec def df = let df = Redirect.def df in df
c144682f931e955e1816b633331bc5a60c99edf9b18ffb08d588cdfcff3a9364
mzp/coq-ruby
dischargedhypsmap.ml
(************************************************************************) v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (************************************************************************) $ I d : dischargedhypsmap.ml 9902 2007 - 06 - 21 17:01:21Z herbelin $ open Util open Libnames open Names open Term open Reduction open Declarations open Environ open Inductive open Libobject open Lib open Nametab type discharged_hyps = section_path list let discharged_hyps_map = ref Spmap.empty let set_discharged_hyps sp hyps = discharged_hyps_map := Spmap.add sp hyps !discharged_hyps_map let get_discharged_hyps sp = try Spmap.find sp !discharged_hyps_map with Not_found -> [] (*s Registration as global tables and rollback. *) let init () = discharged_hyps_map := Spmap.empty let freeze () = !discharged_hyps_map let unfreeze dhm = discharged_hyps_map := dhm let _ = Summary.declare_summary "discharged_hypothesis" { Summary.freeze_function = freeze; Summary.unfreeze_function = unfreeze; Summary.init_function = init; Summary.survive_module = false; Summary.survive_section = true }
null
https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/library/dischargedhypsmap.ml
ocaml
********************************************************************** // * This file is distributed under the terms of the * GNU Lesser General Public License Version 2.1 ********************************************************************** s Registration as global tables and rollback.
v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * $ I d : dischargedhypsmap.ml 9902 2007 - 06 - 21 17:01:21Z herbelin $ open Util open Libnames open Names open Term open Reduction open Declarations open Environ open Inductive open Libobject open Lib open Nametab type discharged_hyps = section_path list let discharged_hyps_map = ref Spmap.empty let set_discharged_hyps sp hyps = discharged_hyps_map := Spmap.add sp hyps !discharged_hyps_map let get_discharged_hyps sp = try Spmap.find sp !discharged_hyps_map with Not_found -> [] let init () = discharged_hyps_map := Spmap.empty let freeze () = !discharged_hyps_map let unfreeze dhm = discharged_hyps_map := dhm let _ = Summary.declare_summary "discharged_hypothesis" { Summary.freeze_function = freeze; Summary.unfreeze_function = unfreeze; Summary.init_function = init; Summary.survive_module = false; Summary.survive_section = true }
e1ba455b68dea7436ac7eb16ca4ecef4de2108e0903b0bb35c5206617cd0610e
dvanhorn/oaam
LK-instantiations.rkt
#lang racket (require (rename-in racket/generator [yield real-yield])) (require "LK.rkt" "data.rkt" "parse.rkt" "primitives.rkt" "fix.rkt" ;; different components of instantiantiations "lazy-strict.rkt" "context.rkt" "deltas.rkt" "generators.rkt" "store-passing.rkt" "imperative.rkt" "prealloc.rkt" "nonsparse.rkt" racket/splicing) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Concrete semantics (define (eval-widen b) (cond [(atomic? b) b] [else (error "Unknown base value" b)])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Potpourris of common parameterizations (define-syntax-rule (with-concrete body) (splicing-syntax-parameterize ([widen (make-rename-transformer #'eval-widen)]) body)) (define-syntax-rule (with-abstract body) (splicing-syntax-parameterize ([widen (make-rename-transformer #'flatten-value)]) body)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Potpourris of evaluators ;; "bl" (mk-set-fixpoint^ fix baseline-fixpoint baseline-ans?) (with-nonsparse (with-strict (with-0-ctx (with-whole-σ (with-σ-passing-set-monad (with-abstract (mk-analysis #:aval LK-baseline #:ans baseline-ans #:fixpoint baseline-fixpoint #:σ-passing #:wide #:set-monad))))))) (provide LK-baseline) ;; "pd" #;#; (mk-prealloc/∆s^-fixpoint prealloc/∆s-fixpoint/c prealloc/∆s-ans/c? prealloc/∆s-ans/c-v prealloc/∆s-touches-0/c) (with-nonsparse (with-lazy (with-0-ctx/prealloc (with-σ-∆s/prealloc! (with-abstract (mk-analysis #:aval LK-lazy-0cfa^/c/∆s/prealloc! #:prepare (λ (sexp) (prepare-prealloc parse-prog sexp)) #:ans prealloc/∆s-ans/c #:touches prealloc/∆s-touches-0/c #:fixpoint prealloc/∆s-fixpoint/c #:global-σ #:compiled #:wide)))))) (define LK-lazy-0cfa^/c/∆s/prealloc! values) (provide LK-lazy-0cfa^/c/∆s/prealloc!)
null
https://raw.githubusercontent.com/dvanhorn/oaam/79bc68ecb79fef45474a948deec1de90d255f307/code/LK-instantiations.rkt
racket
different components of instantiantiations Concrete semantics "bl" "pd" #;
#lang racket (require (rename-in racket/generator [yield real-yield])) (require "LK.rkt" "data.rkt" "parse.rkt" "primitives.rkt" "fix.rkt" "lazy-strict.rkt" "context.rkt" "deltas.rkt" "generators.rkt" "store-passing.rkt" "imperative.rkt" "prealloc.rkt" "nonsparse.rkt" racket/splicing) (define (eval-widen b) (cond [(atomic? b) b] [else (error "Unknown base value" b)])) Potpourris of common parameterizations (define-syntax-rule (with-concrete body) (splicing-syntax-parameterize ([widen (make-rename-transformer #'eval-widen)]) body)) (define-syntax-rule (with-abstract body) (splicing-syntax-parameterize ([widen (make-rename-transformer #'flatten-value)]) body)) Potpourris of evaluators (mk-set-fixpoint^ fix baseline-fixpoint baseline-ans?) (with-nonsparse (with-strict (with-0-ctx (with-whole-σ (with-σ-passing-set-monad (with-abstract (mk-analysis #:aval LK-baseline #:ans baseline-ans #:fixpoint baseline-fixpoint #:σ-passing #:wide #:set-monad))))))) (provide LK-baseline) (mk-prealloc/∆s^-fixpoint prealloc/∆s-fixpoint/c prealloc/∆s-ans/c? prealloc/∆s-ans/c-v prealloc/∆s-touches-0/c) (with-nonsparse (with-lazy (with-0-ctx/prealloc (with-σ-∆s/prealloc! (with-abstract (mk-analysis #:aval LK-lazy-0cfa^/c/∆s/prealloc! #:prepare (λ (sexp) (prepare-prealloc parse-prog sexp)) #:ans prealloc/∆s-ans/c #:touches prealloc/∆s-touches-0/c #:fixpoint prealloc/∆s-fixpoint/c #:global-σ #:compiled #:wide)))))) (define LK-lazy-0cfa^/c/∆s/prealloc! values) (provide LK-lazy-0cfa^/c/∆s/prealloc!)
0680e70e56843c7a6f75296abe0516faa17fb1fbd389975c042795aaf7677925
mmottl/gpr
cov_se_iso.ml
File : cov_se_iso.ml for OCaml Copyright ( C ) 2009- email : WWW : This library is free software ; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation ; either version 2.1 of the License , or ( at your option ) any later version . This library is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public License for more details . You should have received a copy of the GNU Lesser General Public License along with this library ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA OCaml-GPR - Gaussian Processes for OCaml Copyright (C) 2009- Markus Mottl email: WWW: This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *) open Interfaces open Core open Lacaml.D module Params = struct type t = { log_ell : float; log_sf2 : float } end type inducing_hyper = { ind : int; dim : int } module Eval = struct module Kernel = struct type params = Params.t type t = { params : params; inv_ell2 : float; inv_ell2_05 : float; log_sf2 : float; sf2 : float; } let create ({ Params.log_sf2; log_ell } as params) = let inv_ell2 = exp (-2. *. log_ell) in let inv_ell2_05 = -0.5 *. inv_ell2 in { params; inv_ell2; inv_ell2_05; log_sf2; sf2 = exp log_sf2 } let get_params k = k.params end open Kernel module Inducing = struct type t = mat let get_n_points = Mat.dim2 let calc_sqr_diff_mat inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let res = Mat.create m m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for r = 1 to c - 1 do for i = 1 to d do let diff = inducing.{i, c} -. inducing.{i, r} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{r, c} <- !ssqr_diff_ref; ssqr_diff_ref := 0. done; res.{c, c} <- 0. done; res let calc_upper_with_sqr_diff_mat k sqr_diff_mat = let m = Mat.dim2 sqr_diff_mat in let res = Mat.create m m in let { inv_ell2_05; log_sf2; sf2 } = k in for c = 1 to m do for r = 1 to c - 1 do res.{r, c} <- exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}); done; res.{c, c} <- sf2; done; res let calc_upper k inducing = calc_upper_with_sqr_diff_mat k (calc_sqr_diff_mat inducing) end module Input = struct type t = vec let eval { Kernel.inv_ell2_05; log_sf2 } input inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let res = Vec.create m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for i = 1 to d do let diff = input.{i} -. inducing.{i, c} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{c} <- exp (log_sf2 +. inv_ell2_05 *. !ssqr_diff_ref); ssqr_diff_ref := 0.; done; res let weighted_eval k input inducing ~coeffs = dot coeffs (eval k input inducing) let eval_one k _input = k.Kernel.sf2 end module Inputs = struct type t = mat let create = Mat.of_col_vecs let get_n_points = Mat.dim2 let choose_subset inputs indexes = Utils.choose_cols inputs indexes let create_inducing _kernel inputs = inputs let create_default_kernel_params _inputs ~n_inducing:_ = { Params.log_ell = 0.; log_sf2 = 0. } let calc_upper k inputs = Inducing.calc_upper k inputs let calc_diag k inputs = Vec.make (Mat.dim2 inputs) k.Kernel.sf2 let calc_sqr_diff_mat ~inputs ~inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let n = Mat.dim2 inputs in let res = Mat.create n m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for r = 1 to n do for i = 1 to d do let diff = inputs.{i, r} -. inducing.{i, c} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{r, c} <- !ssqr_diff_ref; ssqr_diff_ref := 0. done done; res let calc_cross_with_sqr_diff_mat k sqr_diff_mat = let { Kernel.inv_ell2_05; log_sf2 } = k in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in let res = Mat.create n m in for c = 1 to m do for r = 1 to n do res.{r, c} <- exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}) done done; res let calc_cross k ~inputs ~inducing = calc_cross_with_sqr_diff_mat k (calc_sqr_diff_mat ~inputs ~inducing) let weighted_eval k ~inputs ~inducing ~coeffs = let sqr_diff_mat = calc_sqr_diff_mat ~inputs ~inducing in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in if Vec.dim coeffs <> m then failwith "Gpr.Cov_se_iso.Eval.Inputs.weighted_eval: dim(coeffs) <> m"; let { Kernel.inv_ell2_05; log_sf2 } = k in let rec loop r acc c = if c = 0 then acc else let el = coeffs.{c} *. exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}) in loop r (acc +. el) (c - 1) in Vec.init n (fun r -> loop r 0. m) end end module Deriv = struct module Eval = Eval type gen_deriv = [ `Log_ell | `Log_sf2 ] module Hyper = struct type t = [ gen_deriv | `Inducing_hyper of inducing_hyper ] let get_all _kernel inducing _inputs = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let n_inducing_hypers = d * m in let n_all_hypers = 2 + n_inducing_hypers in let hypers = Array.create ~len:n_all_hypers `Log_ell in hypers.(1) <- `Log_sf2 ; for ind = 1 to m do let indd = (ind - 1) * d in for dim = 1 to d do let inducing_hyper = { ind; dim } in hypers.(1 + indd + dim) <- `Inducing_hyper inducing_hyper done done; hypers let get_value { Eval.Kernel.params } inducing _inputs = function | `Log_ell -> params.Params.log_ell | `Log_sf2 -> params.Params.log_sf2 | `Inducing_hyper { ind; dim } -> inducing.{dim, ind} let set_values { Eval.Kernel.params } inducing inputs hypers values = let { Params.log_ell; log_sf2 } = params in let log_ell_ref = ref log_ell in let log_sf2_ref = ref log_sf2 in let inducing_lazy = lazy (lacpy inducing) in for i = 1 to Array.length hypers do match hypers.(i - 1) with | `Log_ell -> log_ell_ref := values.{i} | `Log_sf2 -> log_sf2_ref := values.{i} | `Inducing_hyper { ind; dim } -> (Lazy.force inducing_lazy).{dim, ind} <- values.{i} done; let new_kernel = let log_ell = !log_ell_ref in Eval.Kernel.create { Params.log_ell; log_sf2 = !log_sf2_ref } in let lift lazy_value value = if Lazy.is_val lazy_value then Lazy.force lazy_value else value in let new_inducing = lift inducing_lazy inducing in new_kernel, new_inducing, inputs end type deriv_common = { kernel : Eval.Kernel.t; sqr_diff_mat : mat; eval_mat : mat; } module Inducing = struct type upper = Eval.Inducing.t * deriv_common let calc_shared_upper kernel eval_inducing = let module EI = Eval.Inducing in let sqr_diff_mat = EI.calc_sqr_diff_mat eval_inducing in let eval_mat = EI.calc_upper_with_sqr_diff_mat kernel sqr_diff_mat in eval_mat, (eval_inducing, { kernel; sqr_diff_mat; eval_mat }) let calc_deriv_upper (inducing, common) = function | `Log_sf2 -> `Factor 1. | `Log_ell -> let { sqr_diff_mat; eval_mat; kernel } = common in let m = Mat.dim1 sqr_diff_mat in let res = Mat.create m m in let { Eval.Kernel.inv_ell2 } = kernel in for c = 1 to m do for r = 1 to c - 1 do res.{r, c} <- eval_mat.{r, c} *. sqr_diff_mat.{r, c} *. inv_ell2 done; res.{c, c} <- 0.; done; `Dense res | `Inducing_hyper { ind; dim } -> let eval_mat = common.eval_mat in let m = Mat.dim2 eval_mat in let res = Mat.create 1 m in let inducing_dim = inducing.{dim, ind} in let inv_ell2 = common.kernel.Eval.Kernel.inv_ell2 in for i = 1 to ind - 1 do let ind_d = inducing.{dim, i} in res.{1, i} <- inv_ell2 *. (ind_d -. inducing_dim) *. eval_mat.{i, ind} done; res.{1, ind} <- 0.; for i = ind + 1 to m do let ind_d = inducing.{dim, i} in res.{1, i} <- inv_ell2 *. (ind_d -. inducing_dim) *. eval_mat.{ind, i} done; let rows = Sparse_indices.create 1 in rows.{1} <- ind; `Sparse_rows (res, rows) end module Inputs = struct type diag = Eval.Kernel.t type cross = Eval.Inputs.t * Eval.Inducing.t * deriv_common let calc_shared_diag k diag_eval_inputs = Eval.Inputs.calc_diag k diag_eval_inputs, k let calc_shared_cross kernel ~inputs ~inducing = let module EI = Eval.Inputs in let sqr_diff_mat = EI.calc_sqr_diff_mat ~inputs ~inducing in let eval_mat = EI.calc_cross_with_sqr_diff_mat kernel sqr_diff_mat in let shared = inputs, inducing, { kernel; sqr_diff_mat; eval_mat } in eval_mat, shared let calc_deriv_diag _diag = function | `Log_sf2 -> `Factor 1. | `Log_ell | `Inducing_hyper _ -> `Const 0. let calc_deriv_cross (inputs, inducing, common) = function | `Log_sf2 -> `Factor 1. | `Log_ell -> let { sqr_diff_mat; eval_mat; kernel } = common in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in let res = Mat.create n m in let { Eval.Kernel.inv_ell2 } = kernel in for c = 1 to m do for r = 1 to n do res.{r, c} <- eval_mat.{r, c} *. sqr_diff_mat.{r, c} *. inv_ell2 done done; `Dense res | `Inducing_hyper { ind; dim } -> let eval_mat = common.eval_mat in let n = Mat.dim1 eval_mat in let res = Mat.create n 1 in let indx_d = inducing.{dim, ind} in let inv_ell2 = common.kernel.Eval.Kernel.inv_ell2 in for r = 1 to n do let inp_d = inputs.{dim, r} in res.{r, 1} <- inv_ell2 *. (inp_d -. indx_d) *. eval_mat.{r, ind} done; let cols = Sparse_indices.create 1 in cols.{1} <- ind; `Sparse_cols (res, cols) end end
null
https://raw.githubusercontent.com/mmottl/gpr/64c4dce01b1779feff85d36d5902afa3b143bdae/src/cov_se_iso.ml
ocaml
File : cov_se_iso.ml for OCaml Copyright ( C ) 2009- email : WWW : This library is free software ; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation ; either version 2.1 of the License , or ( at your option ) any later version . This library is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public License for more details . You should have received a copy of the GNU Lesser General Public License along with this library ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA OCaml-GPR - Gaussian Processes for OCaml Copyright (C) 2009- Markus Mottl email: WWW: This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *) open Interfaces open Core open Lacaml.D module Params = struct type t = { log_ell : float; log_sf2 : float } end type inducing_hyper = { ind : int; dim : int } module Eval = struct module Kernel = struct type params = Params.t type t = { params : params; inv_ell2 : float; inv_ell2_05 : float; log_sf2 : float; sf2 : float; } let create ({ Params.log_sf2; log_ell } as params) = let inv_ell2 = exp (-2. *. log_ell) in let inv_ell2_05 = -0.5 *. inv_ell2 in { params; inv_ell2; inv_ell2_05; log_sf2; sf2 = exp log_sf2 } let get_params k = k.params end open Kernel module Inducing = struct type t = mat let get_n_points = Mat.dim2 let calc_sqr_diff_mat inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let res = Mat.create m m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for r = 1 to c - 1 do for i = 1 to d do let diff = inducing.{i, c} -. inducing.{i, r} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{r, c} <- !ssqr_diff_ref; ssqr_diff_ref := 0. done; res.{c, c} <- 0. done; res let calc_upper_with_sqr_diff_mat k sqr_diff_mat = let m = Mat.dim2 sqr_diff_mat in let res = Mat.create m m in let { inv_ell2_05; log_sf2; sf2 } = k in for c = 1 to m do for r = 1 to c - 1 do res.{r, c} <- exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}); done; res.{c, c} <- sf2; done; res let calc_upper k inducing = calc_upper_with_sqr_diff_mat k (calc_sqr_diff_mat inducing) end module Input = struct type t = vec let eval { Kernel.inv_ell2_05; log_sf2 } input inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let res = Vec.create m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for i = 1 to d do let diff = input.{i} -. inducing.{i, c} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{c} <- exp (log_sf2 +. inv_ell2_05 *. !ssqr_diff_ref); ssqr_diff_ref := 0.; done; res let weighted_eval k input inducing ~coeffs = dot coeffs (eval k input inducing) let eval_one k _input = k.Kernel.sf2 end module Inputs = struct type t = mat let create = Mat.of_col_vecs let get_n_points = Mat.dim2 let choose_subset inputs indexes = Utils.choose_cols inputs indexes let create_inducing _kernel inputs = inputs let create_default_kernel_params _inputs ~n_inducing:_ = { Params.log_ell = 0.; log_sf2 = 0. } let calc_upper k inputs = Inducing.calc_upper k inputs let calc_diag k inputs = Vec.make (Mat.dim2 inputs) k.Kernel.sf2 let calc_sqr_diff_mat ~inputs ~inducing = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let n = Mat.dim2 inputs in let res = Mat.create n m in let ssqr_diff_ref = ref 0. in for c = 1 to m do for r = 1 to n do for i = 1 to d do let diff = inputs.{i, r} -. inducing.{i, c} in ssqr_diff_ref := !ssqr_diff_ref +. diff *. diff done; res.{r, c} <- !ssqr_diff_ref; ssqr_diff_ref := 0. done done; res let calc_cross_with_sqr_diff_mat k sqr_diff_mat = let { Kernel.inv_ell2_05; log_sf2 } = k in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in let res = Mat.create n m in for c = 1 to m do for r = 1 to n do res.{r, c} <- exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}) done done; res let calc_cross k ~inputs ~inducing = calc_cross_with_sqr_diff_mat k (calc_sqr_diff_mat ~inputs ~inducing) let weighted_eval k ~inputs ~inducing ~coeffs = let sqr_diff_mat = calc_sqr_diff_mat ~inputs ~inducing in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in if Vec.dim coeffs <> m then failwith "Gpr.Cov_se_iso.Eval.Inputs.weighted_eval: dim(coeffs) <> m"; let { Kernel.inv_ell2_05; log_sf2 } = k in let rec loop r acc c = if c = 0 then acc else let el = coeffs.{c} *. exp (log_sf2 +. inv_ell2_05 *. sqr_diff_mat.{r, c}) in loop r (acc +. el) (c - 1) in Vec.init n (fun r -> loop r 0. m) end end module Deriv = struct module Eval = Eval type gen_deriv = [ `Log_ell | `Log_sf2 ] module Hyper = struct type t = [ gen_deriv | `Inducing_hyper of inducing_hyper ] let get_all _kernel inducing _inputs = let d = Mat.dim1 inducing in let m = Mat.dim2 inducing in let n_inducing_hypers = d * m in let n_all_hypers = 2 + n_inducing_hypers in let hypers = Array.create ~len:n_all_hypers `Log_ell in hypers.(1) <- `Log_sf2 ; for ind = 1 to m do let indd = (ind - 1) * d in for dim = 1 to d do let inducing_hyper = { ind; dim } in hypers.(1 + indd + dim) <- `Inducing_hyper inducing_hyper done done; hypers let get_value { Eval.Kernel.params } inducing _inputs = function | `Log_ell -> params.Params.log_ell | `Log_sf2 -> params.Params.log_sf2 | `Inducing_hyper { ind; dim } -> inducing.{dim, ind} let set_values { Eval.Kernel.params } inducing inputs hypers values = let { Params.log_ell; log_sf2 } = params in let log_ell_ref = ref log_ell in let log_sf2_ref = ref log_sf2 in let inducing_lazy = lazy (lacpy inducing) in for i = 1 to Array.length hypers do match hypers.(i - 1) with | `Log_ell -> log_ell_ref := values.{i} | `Log_sf2 -> log_sf2_ref := values.{i} | `Inducing_hyper { ind; dim } -> (Lazy.force inducing_lazy).{dim, ind} <- values.{i} done; let new_kernel = let log_ell = !log_ell_ref in Eval.Kernel.create { Params.log_ell; log_sf2 = !log_sf2_ref } in let lift lazy_value value = if Lazy.is_val lazy_value then Lazy.force lazy_value else value in let new_inducing = lift inducing_lazy inducing in new_kernel, new_inducing, inputs end type deriv_common = { kernel : Eval.Kernel.t; sqr_diff_mat : mat; eval_mat : mat; } module Inducing = struct type upper = Eval.Inducing.t * deriv_common let calc_shared_upper kernel eval_inducing = let module EI = Eval.Inducing in let sqr_diff_mat = EI.calc_sqr_diff_mat eval_inducing in let eval_mat = EI.calc_upper_with_sqr_diff_mat kernel sqr_diff_mat in eval_mat, (eval_inducing, { kernel; sqr_diff_mat; eval_mat }) let calc_deriv_upper (inducing, common) = function | `Log_sf2 -> `Factor 1. | `Log_ell -> let { sqr_diff_mat; eval_mat; kernel } = common in let m = Mat.dim1 sqr_diff_mat in let res = Mat.create m m in let { Eval.Kernel.inv_ell2 } = kernel in for c = 1 to m do for r = 1 to c - 1 do res.{r, c} <- eval_mat.{r, c} *. sqr_diff_mat.{r, c} *. inv_ell2 done; res.{c, c} <- 0.; done; `Dense res | `Inducing_hyper { ind; dim } -> let eval_mat = common.eval_mat in let m = Mat.dim2 eval_mat in let res = Mat.create 1 m in let inducing_dim = inducing.{dim, ind} in let inv_ell2 = common.kernel.Eval.Kernel.inv_ell2 in for i = 1 to ind - 1 do let ind_d = inducing.{dim, i} in res.{1, i} <- inv_ell2 *. (ind_d -. inducing_dim) *. eval_mat.{i, ind} done; res.{1, ind} <- 0.; for i = ind + 1 to m do let ind_d = inducing.{dim, i} in res.{1, i} <- inv_ell2 *. (ind_d -. inducing_dim) *. eval_mat.{ind, i} done; let rows = Sparse_indices.create 1 in rows.{1} <- ind; `Sparse_rows (res, rows) end module Inputs = struct type diag = Eval.Kernel.t type cross = Eval.Inputs.t * Eval.Inducing.t * deriv_common let calc_shared_diag k diag_eval_inputs = Eval.Inputs.calc_diag k diag_eval_inputs, k let calc_shared_cross kernel ~inputs ~inducing = let module EI = Eval.Inputs in let sqr_diff_mat = EI.calc_sqr_diff_mat ~inputs ~inducing in let eval_mat = EI.calc_cross_with_sqr_diff_mat kernel sqr_diff_mat in let shared = inputs, inducing, { kernel; sqr_diff_mat; eval_mat } in eval_mat, shared let calc_deriv_diag _diag = function | `Log_sf2 -> `Factor 1. | `Log_ell | `Inducing_hyper _ -> `Const 0. let calc_deriv_cross (inputs, inducing, common) = function | `Log_sf2 -> `Factor 1. | `Log_ell -> let { sqr_diff_mat; eval_mat; kernel } = common in let n = Mat.dim1 sqr_diff_mat in let m = Mat.dim2 sqr_diff_mat in let res = Mat.create n m in let { Eval.Kernel.inv_ell2 } = kernel in for c = 1 to m do for r = 1 to n do res.{r, c} <- eval_mat.{r, c} *. sqr_diff_mat.{r, c} *. inv_ell2 done done; `Dense res | `Inducing_hyper { ind; dim } -> let eval_mat = common.eval_mat in let n = Mat.dim1 eval_mat in let res = Mat.create n 1 in let indx_d = inducing.{dim, ind} in let inv_ell2 = common.kernel.Eval.Kernel.inv_ell2 in for r = 1 to n do let inp_d = inputs.{dim, r} in res.{r, 1} <- inv_ell2 *. (inp_d -. indx_d) *. eval_mat.{r, ind} done; let cols = Sparse_indices.create 1 in cols.{1} <- ind; `Sparse_cols (res, cols) end end
ac58264db48ecc45fbe87b4add22dede12bb42b45b1b7bfdab750455c58d5608
oliyh/pedestal-api
helpers_test.clj
(ns pedestal-api.helpers-test (:require [pedestal-api.core :as api] [pedestal-api.helpers :refer :all] [schema.core :as s] [io.pedestal.interceptor.helpers :as i] [clojure.test :refer :all])) (defbefore test-defbefore {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} [{:keys [request] :as context}] (assoc context :response request)) (deftest defbefore-test (is (= ::test-defbefore (:name test-defbefore))) (is (= (meta (api/annotate {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (i/before ::test-defbefore (fn [{:keys [request] :as context}] (assoc context :response request))))) (meta test-defbefore))) (is (= {:request {:a 1} :response {:a 1}} ((:enter test-defbefore) {:request {:a 1}})))) (def test-before (before ::test-before {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (fn [{:keys [request] :as context}] (assoc context :response request)))) (deftest before-test (is (= ::test-before (:name test-before))) (is (= (meta (api/annotate {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (i/before ::test-before (fn [{:keys [request] :as context}] (assoc context :response request))))) (meta test-before))) (is (= {:request {:a 1} :response {:a 1}} ((:enter test-before) {:request {:a 1}}))))
null
https://raw.githubusercontent.com/oliyh/pedestal-api/1a90c29e97c3cccfe59a1f9d114765c104f70c13/test/pedestal_api/helpers_test.clj
clojure
(ns pedestal-api.helpers-test (:require [pedestal-api.core :as api] [pedestal-api.helpers :refer :all] [schema.core :as s] [io.pedestal.interceptor.helpers :as i] [clojure.test :refer :all])) (defbefore test-defbefore {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} [{:keys [request] :as context}] (assoc context :response request)) (deftest defbefore-test (is (= ::test-defbefore (:name test-defbefore))) (is (= (meta (api/annotate {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (i/before ::test-defbefore (fn [{:keys [request] :as context}] (assoc context :response request))))) (meta test-defbefore))) (is (= {:request {:a 1} :response {:a 1}} ((:enter test-defbefore) {:request {:a 1}})))) (def test-before (before ::test-before {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (fn [{:keys [request] :as context}] (assoc context :response request)))) (deftest before-test (is (= ::test-before (:name test-before))) (is (= (meta (api/annotate {:summary "A test interceptor" :parameters {:body-params {:age s/Int}} :responses {200 {:body s/Str}}} (i/before ::test-before (fn [{:keys [request] :as context}] (assoc context :response request))))) (meta test-before))) (is (= {:request {:a 1} :response {:a 1}} ((:enter test-before) {:request {:a 1}}))))
564b8851306ac6a3af9fee3aef0a8a0ebd1de215d5bcf520ae366bfb0d29d377
clojure-interop/google-cloud-clients
SpeechSettings$Builder.clj
(ns com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder "Builder for SpeechSettings." (:refer-clojure :only [require comment defn ->]) (:import [com.google.cloud.speech.v1p1beta1 SpeechSettings$Builder])) (defn get-stub-settings-builder "returns: `com.google.cloud.speech.v1p1beta1.stub.SpeechStubSettings$Builder`" (^com.google.cloud.speech.v1p1beta1.stub.SpeechStubSettings$Builder [^SpeechSettings$Builder this] (-> this (.getStubSettingsBuilder)))) (defn apply-to-all-unary-methods "Applies the given settings updater function to all of the unary API methods in this service. Note: This method does not support applying settings to streaming methods. settings-updater - `com.google.api.core.ApiFunction` returns: `com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder` throws: java.lang.Exception" (^com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder [^SpeechSettings$Builder this ^com.google.api.core.ApiFunction settings-updater] (-> this (.applyToAllUnaryMethods settings-updater)))) (defn recognize-settings "Returns the builder for the settings used for calls to recognize. returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.cloud.speech.v1p1beta1.RecognizeRequest,com.google.cloud.speech.v1p1beta1.RecognizeResponse>`" (^com.google.api.gax.rpc.UnaryCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.recognizeSettings)))) (defn long-running-recognize-settings "Returns the builder for the settings used for calls to longRunningRecognize. returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.longRunningRecognizeSettings)))) (defn long-running-recognize-operation-settings "Returns the builder for the settings used for calls to longRunningRecognize. returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest,com.google.cloud.speech.v1p1beta1.LongRunningRecognizeResponse,com.google.cloud.speech.v1p1beta1.LongRunningRecognizeMetadata>`" ([^SpeechSettings$Builder this] (-> this (.longRunningRecognizeOperationSettings)))) (defn streaming-recognize-settings "Returns the builder for the settings used for calls to streamingRecognize. returns: `com.google.api.gax.rpc.StreamingCallSettings.Builder<com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest,com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse>`" (^com.google.api.gax.rpc.StreamingCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.streamingRecognizeSettings)))) (defn build "returns: `com.google.cloud.speech.v1p1beta1.SpeechSettings` throws: java.io.IOException" (^com.google.cloud.speech.v1p1beta1.SpeechSettings [^SpeechSettings$Builder this] (-> this (.build))))
null
https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.speech/src/com/google/cloud/speech/v1p1beta1/SpeechSettings%24Builder.clj
clojure
(ns com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder "Builder for SpeechSettings." (:refer-clojure :only [require comment defn ->]) (:import [com.google.cloud.speech.v1p1beta1 SpeechSettings$Builder])) (defn get-stub-settings-builder "returns: `com.google.cloud.speech.v1p1beta1.stub.SpeechStubSettings$Builder`" (^com.google.cloud.speech.v1p1beta1.stub.SpeechStubSettings$Builder [^SpeechSettings$Builder this] (-> this (.getStubSettingsBuilder)))) (defn apply-to-all-unary-methods "Applies the given settings updater function to all of the unary API methods in this service. Note: This method does not support applying settings to streaming methods. settings-updater - `com.google.api.core.ApiFunction` returns: `com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder` throws: java.lang.Exception" (^com.google.cloud.speech.v1p1beta1.SpeechSettings$Builder [^SpeechSettings$Builder this ^com.google.api.core.ApiFunction settings-updater] (-> this (.applyToAllUnaryMethods settings-updater)))) (defn recognize-settings "Returns the builder for the settings used for calls to recognize. returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.cloud.speech.v1p1beta1.RecognizeRequest,com.google.cloud.speech.v1p1beta1.RecognizeResponse>`" (^com.google.api.gax.rpc.UnaryCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.recognizeSettings)))) (defn long-running-recognize-settings "Returns the builder for the settings used for calls to longRunningRecognize. returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.longRunningRecognizeSettings)))) (defn long-running-recognize-operation-settings "Returns the builder for the settings used for calls to longRunningRecognize. returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest,com.google.cloud.speech.v1p1beta1.LongRunningRecognizeResponse,com.google.cloud.speech.v1p1beta1.LongRunningRecognizeMetadata>`" ([^SpeechSettings$Builder this] (-> this (.longRunningRecognizeOperationSettings)))) (defn streaming-recognize-settings "Returns the builder for the settings used for calls to streamingRecognize. returns: `com.google.api.gax.rpc.StreamingCallSettings.Builder<com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest,com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse>`" (^com.google.api.gax.rpc.StreamingCallSettings.Builder [^SpeechSettings$Builder this] (-> this (.streamingRecognizeSettings)))) (defn build "returns: `com.google.cloud.speech.v1p1beta1.SpeechSettings` throws: java.io.IOException" (^com.google.cloud.speech.v1p1beta1.SpeechSettings [^SpeechSettings$Builder this] (-> this (.build))))
7846f3fe9a88de9b6bc891a7a51965ad84024fd8d597256c0350187d646de94b
rd--/hsc3
expRandN.help.hs
expRandN ; two channel sin tones sinOsc ar (X.expRandNId 2 'α' 440 880) 0 * 0.1 expRandN ; n node klang synthesis let n = 240 f = X.expRandNId n 'α' 40 18000 a = X.expRandNId n 'β' 0.1 0.3 p = X.randNId n 'γ' (-1) 1 s = klangSpec_mce f a p in klang ar 1 0 s * 0.01 expRandN ; mce ... let f = X.expRandNId 2 'α' (mce2 440 441) 442 in sinOsc ar f 0 * 0.1
null
https://raw.githubusercontent.com/rd--/hsc3/60cb422f0e2049f00b7e15076b2667b85ad8f638/Help/Ugen/expRandN.help.hs
haskell
expRandN ; two channel sin tones sinOsc ar (X.expRandNId 2 'α' 440 880) 0 * 0.1 expRandN ; n node klang synthesis let n = 240 f = X.expRandNId n 'α' 40 18000 a = X.expRandNId n 'β' 0.1 0.3 p = X.randNId n 'γ' (-1) 1 s = klangSpec_mce f a p in klang ar 1 0 s * 0.01 expRandN ; mce ... let f = X.expRandNId 2 'α' (mce2 440 441) 442 in sinOsc ar f 0 * 0.1
f93df4f900192163981c8ab0b3c2685fe128828a7474b500363dec31c7aa9e24
deadcode/Learning-CL--David-Touretzky
9.1.lisp
(defun say-this () (format t "~&There are old pilots,") (format t "~&and there are bold pilots,") (format t "~&but there are no old bold pilots.")) (say-this)
null
https://raw.githubusercontent.com/deadcode/Learning-CL--David-Touretzky/b4557c33f58e382f765369971e6a4747c27ca692/Chapter%209/9.1.lisp
lisp
(defun say-this () (format t "~&There are old pilots,") (format t "~&and there are bold pilots,") (format t "~&but there are no old bold pilots.")) (say-this)
ab721a05c79bf617d8ac0eccae1b9facdc029049c295b54c767576d2532fe544
clojurewerkz/spyglass
transcoders.clj
(ns clojurewerkz.spyglass.transcoders "Transcoder is an interface for classes that convert between byte arrays and objects for storage in the cache." (:import [net.spy.memcached CachedData] [net.spy.memcached.transcoders Transcoder IntegerTranscoder LongTranscoder SerializingTranscoder WhalinTranscoder])) (defmulti make-transcoder identity) (defmethod make-transcoder :integer [_] (IntegerTranscoder.)) (defmethod make-transcoder :long [_] (LongTranscoder.)) (defmethod make-transcoder :whalin [_] (WhalinTranscoder.)) (defmethod make-transcoder :serializing [_] (SerializingTranscoder.))
null
https://raw.githubusercontent.com/clojurewerkz/spyglass/be6cad3f5ea7212eeb2ecb07a28dece7a658b710/src/clojure/clojurewerkz/spyglass/transcoders.clj
clojure
(ns clojurewerkz.spyglass.transcoders "Transcoder is an interface for classes that convert between byte arrays and objects for storage in the cache." (:import [net.spy.memcached CachedData] [net.spy.memcached.transcoders Transcoder IntegerTranscoder LongTranscoder SerializingTranscoder WhalinTranscoder])) (defmulti make-transcoder identity) (defmethod make-transcoder :integer [_] (IntegerTranscoder.)) (defmethod make-transcoder :long [_] (LongTranscoder.)) (defmethod make-transcoder :whalin [_] (WhalinTranscoder.)) (defmethod make-transcoder :serializing [_] (SerializingTranscoder.))
f83b77a3ee927d3c14c3516a8b298d7882ba7d9f7e4440417a977c6c1210b68e
BranchTaken/Hemlock
i16.mli
* 16 - bit signed integer . See { ! module : ConvertIntf } for documentation on conversion functions . See {!module:ConvertIntf} for documentation on conversion functions. *) type t include IntnbIntf.SI with type t := t val trunc_of_zint: Zint.t -> t val extend_to_zint: t -> Zint.t val narrow_of_zint_opt: Zint.t -> t option val narrow_of_zint_hlt: Zint.t -> t val trunc_of_nat: Nat.t -> t val narrow_of_nat_opt: Nat.t -> t option val widen_to_nat_opt: t -> Nat.t option val narrow_of_nat_hlt: Nat.t -> t val widen_to_nat_hlt: t -> Nat.t val trunc_of_i512: I512.t -> t val extend_to_i512: t -> I512.t val narrow_of_i512_opt: I512.t -> t option val narrow_of_i512_hlt: I512.t -> t val trunc_of_u512: U512.t -> t val narrow_of_u512_opt: U512.t -> t option val widen_to_u512_opt: t -> U512.t option val narrow_of_u512_hlt: U512.t -> t val widen_to_u512_hlt: t -> U512.t val trunc_of_i256: I256.t -> t val extend_to_i256: t -> I256.t val narrow_of_i256_opt: I256.t -> t option val narrow_of_i256_hlt: I256.t -> t val trunc_of_u256: U256.t -> t val narrow_of_u256_opt: U256.t -> t option val widen_to_u256_opt: t -> U256.t option val narrow_of_u256_hlt: U256.t -> t val widen_to_u256_hlt: t -> U256.t val trunc_of_i128: I128.t -> t val extend_to_i128: t -> I128.t val narrow_of_i128_opt: I128.t -> t option val narrow_of_i128_hlt: I128.t -> t val trunc_of_u128: U128.t -> t val narrow_of_u128_opt: U128.t -> t option val widen_to_u128_opt: t -> U128.t option val narrow_of_u128_hlt: U128.t -> t val widen_to_u128_hlt: t -> U128.t include ConvertIntf.Nb with type t := t val trunc_of_i32: I32.t -> t val extend_to_i32: t -> I32.t val narrow_of_i32_opt: I32.t -> t option val narrow_of_i32_hlt: I32.t -> t val trunc_of_u32: U32.t -> t val narrow_of_u32_opt: U32.t -> t option val widen_to_u32_opt: t -> U32.t option val narrow_of_u32_hlt: U32.t -> t val widen_to_u32_hlt: t -> U32.t
null
https://raw.githubusercontent.com/BranchTaken/Hemlock/53da5c0d9cf0c94d58b4391735d917518eec67fa/bootstrap/src/basis/i16.mli
ocaml
* 16 - bit signed integer . See { ! module : ConvertIntf } for documentation on conversion functions . See {!module:ConvertIntf} for documentation on conversion functions. *) type t include IntnbIntf.SI with type t := t val trunc_of_zint: Zint.t -> t val extend_to_zint: t -> Zint.t val narrow_of_zint_opt: Zint.t -> t option val narrow_of_zint_hlt: Zint.t -> t val trunc_of_nat: Nat.t -> t val narrow_of_nat_opt: Nat.t -> t option val widen_to_nat_opt: t -> Nat.t option val narrow_of_nat_hlt: Nat.t -> t val widen_to_nat_hlt: t -> Nat.t val trunc_of_i512: I512.t -> t val extend_to_i512: t -> I512.t val narrow_of_i512_opt: I512.t -> t option val narrow_of_i512_hlt: I512.t -> t val trunc_of_u512: U512.t -> t val narrow_of_u512_opt: U512.t -> t option val widen_to_u512_opt: t -> U512.t option val narrow_of_u512_hlt: U512.t -> t val widen_to_u512_hlt: t -> U512.t val trunc_of_i256: I256.t -> t val extend_to_i256: t -> I256.t val narrow_of_i256_opt: I256.t -> t option val narrow_of_i256_hlt: I256.t -> t val trunc_of_u256: U256.t -> t val narrow_of_u256_opt: U256.t -> t option val widen_to_u256_opt: t -> U256.t option val narrow_of_u256_hlt: U256.t -> t val widen_to_u256_hlt: t -> U256.t val trunc_of_i128: I128.t -> t val extend_to_i128: t -> I128.t val narrow_of_i128_opt: I128.t -> t option val narrow_of_i128_hlt: I128.t -> t val trunc_of_u128: U128.t -> t val narrow_of_u128_opt: U128.t -> t option val widen_to_u128_opt: t -> U128.t option val narrow_of_u128_hlt: U128.t -> t val widen_to_u128_hlt: t -> U128.t include ConvertIntf.Nb with type t := t val trunc_of_i32: I32.t -> t val extend_to_i32: t -> I32.t val narrow_of_i32_opt: I32.t -> t option val narrow_of_i32_hlt: I32.t -> t val trunc_of_u32: U32.t -> t val narrow_of_u32_opt: U32.t -> t option val widen_to_u32_opt: t -> U32.t option val narrow_of_u32_hlt: U32.t -> t val widen_to_u32_hlt: t -> U32.t
6bf9389f3f73ced18d90bd5d9fcebb2291e963726d7f7b0b6acc9087c99ea963
pfdietz/ansi-test
remove-method.lsp
;-*- Mode: Lisp -*- Author : Created : Sun May 11 19:53:37 2003 ;;;; Contains: Tests of REMOVE-METHOD (defparameter *remove-meth-gf-01* (defgeneric remove-meth-gf-01 (x))) (defparameter *remove-meth-gf-01-method-t* (defmethod remove-meth-gf-01 ((x t)) x)) (defparameter *remove-meth-gf-02* (defgeneric remove-meth-gf-02 (x))) (defparameter *remove-meth-gf-02-method-t* (defmethod remove-meth-gf-02 ((x t)) x)) ;;; remove method must not signal an error if the method ;;; does not belong to the generic function (deftest remove-method.1 (and (eqt (remove-method *remove-meth-gf-01* *remove-meth-gf-02-method-t*) *remove-meth-gf-01*) (remove-meth-gf-01 :good)) :good) ;;; Add, then remove, a method (deftest remove-method.2 (let (meth) (values (remove-meth-gf-01 10) (progn (setf meth (eval '(defmethod remove-meth-gf-01 ((x integer)) (1+ x)))) nil) (remove-meth-gf-01 10) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth)) (remove-meth-gf-01 10))) 10 nil 11 t 10) Add two disjoint methods , then remove (deftest remove-method.3 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(19 a)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x symbol)) (list x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(19 a)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(19 a)))) (19 a) (19 (a)) (20 (a)) t (20 a) t (19 a)) ;;; Remove in the other order (deftest remove-method.4 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(19 a)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x symbol)) (list x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(19 a)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(19 a)))) (19 a) (19 (a)) (20 (a)) t (19 (a)) t (19 a)) Now methods that shadow one another (deftest remove-method.5 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (9 20.0) (9 21.0) t (11 21.0) t (10 20.0)) (deftest remove-method.6 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (9 20.0) (9 21.0) t (9 20.0) t (10 20.0)) (deftest remove-method.7 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (11 21.0) (9 21.0) t (9 20.0) t (10 20.0)) (deftest remove-method.8 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (11 21.0) (9 21.0) t (11 21.0) t (10 20.0)) ;;; Adding and removing auxiliary methods (declaim (special *rmgf-03-var*)) (defparameter *remove-meth-gf-03* (defgeneric remove-meth-gf-03 (x))) (defparameter *remove-meth-gf-03-method-t* (defmethod remove-meth-gf-03 ((x t)) (list *rmgf-03-var* x))) (deftest remove-method.9 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :before ((x number)) (incf *rmgf-03-var*)))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((1 5) (1 a)) t ((1 5) (1 a))) (deftest remove-method.10 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :after ((x number)) (incf *rmgf-03-var*)))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((0 5) (1 a)) t ((1 5) (1 a))) (deftest remove-method.11 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :around ((x number)) (incf *rmgf-03-var*) (prog1 (call-next-method) (decf *rmgf-03-var*))))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((1 5) (0 a)) t ((0 5) (0 a))) ;;; Must add tests for nonstandard method combinations
null
https://raw.githubusercontent.com/pfdietz/ansi-test/3f4b9d31c3408114f0467eaeca4fd13b28e2ce31/objects/remove-method.lsp
lisp
-*- Mode: Lisp -*- Contains: Tests of REMOVE-METHOD remove method must not signal an error if the method does not belong to the generic function Add, then remove, a method Remove in the other order Adding and removing auxiliary methods Must add tests for nonstandard method combinations
Author : Created : Sun May 11 19:53:37 2003 (defparameter *remove-meth-gf-01* (defgeneric remove-meth-gf-01 (x))) (defparameter *remove-meth-gf-01-method-t* (defmethod remove-meth-gf-01 ((x t)) x)) (defparameter *remove-meth-gf-02* (defgeneric remove-meth-gf-02 (x))) (defparameter *remove-meth-gf-02-method-t* (defmethod remove-meth-gf-02 ((x t)) x)) (deftest remove-method.1 (and (eqt (remove-method *remove-meth-gf-01* *remove-meth-gf-02-method-t*) *remove-meth-gf-01*) (remove-meth-gf-01 :good)) :good) (deftest remove-method.2 (let (meth) (values (remove-meth-gf-01 10) (progn (setf meth (eval '(defmethod remove-meth-gf-01 ((x integer)) (1+ x)))) nil) (remove-meth-gf-01 10) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth)) (remove-meth-gf-01 10))) 10 nil 11 t 10) Add two disjoint methods , then remove (deftest remove-method.3 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(19 a)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x symbol)) (list x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(19 a)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(19 a)))) (19 a) (19 (a)) (20 (a)) t (20 a) t (19 a)) (deftest remove-method.4 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(19 a)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x symbol)) (list x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(19 a))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(19 a)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(19 a)))) (19 a) (19 (a)) (20 (a)) t (19 (a)) t (19 a)) Now methods that shadow one another (deftest remove-method.5 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (9 20.0) (9 21.0) t (11 21.0) t (10 20.0)) (deftest remove-method.6 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (9 20.0) (9 21.0) t (9 20.0) t (10 20.0)) (deftest remove-method.7 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (11 21.0) (9 21.0) t (9 20.0) t (10 20.0)) (deftest remove-method.8 (let (meth1 meth2) (values (mapcar #'remove-meth-gf-01 '(10 20.0)) (progn (setf meth1 (eval '(defmethod remove-meth-gf-01 ((x number)) (1+ x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (progn (setf meth2 (eval '(defmethod remove-meth-gf-01 ((x integer)) (1- x)))) (mapcar #'remove-meth-gf-01 '(10 20.0))) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth2)) (mapcar #'remove-meth-gf-01 '(10 20.0)) (eqt *remove-meth-gf-01* (remove-method *remove-meth-gf-01* meth1)) (mapcar #'remove-meth-gf-01 '(10 20.0)))) (10 20.0) (11 21.0) (9 21.0) t (11 21.0) t (10 20.0)) (declaim (special *rmgf-03-var*)) (defparameter *remove-meth-gf-03* (defgeneric remove-meth-gf-03 (x))) (defparameter *remove-meth-gf-03-method-t* (defmethod remove-meth-gf-03 ((x t)) (list *rmgf-03-var* x))) (deftest remove-method.9 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :before ((x number)) (incf *rmgf-03-var*)))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((1 5) (1 a)) t ((1 5) (1 a))) (deftest remove-method.10 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :after ((x number)) (incf *rmgf-03-var*)))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((0 5) (1 a)) t ((1 5) (1 a))) (deftest remove-method.11 (let (meth (*rmgf-03-var* 0)) (values (mapcar #'remove-meth-gf-03 '(5 a)) (progn (setf meth (eval '(defmethod remove-meth-gf-03 :around ((x number)) (incf *rmgf-03-var*) (prog1 (call-next-method) (decf *rmgf-03-var*))))) (mapcar #'remove-meth-gf-03 '(5 a))) (eqt *remove-meth-gf-03* (remove-method *remove-meth-gf-03* meth)) (mapcar #'remove-meth-gf-03 '(5 a)))) ((0 5) (0 a)) ((1 5) (0 a)) t ((0 5) (0 a)))
fa3f146e940d962cf6a4df467cba865da2a370ca7fc630ba1e6b825ae7bf44db
mattdw/stemmers
soundex.clj
(ns stemmers.test.soundex (:use clojure.test stemmers.soundex)) (deftest rupert-robert-rubin (is (= (stem "robert") "R163")) (is (= (stem "rupert") "R163")) (is (= (stem "rubin") "R150"))) (deftest ashcraft-ashcroft (is (= (stem "ashcraft") (stem "ashcroft") "A261"))) (deftest jumanji-fruittrees (is (= (stem "jumanji") "J552")) (is (= (stem "fruittress") "F636")))
null
https://raw.githubusercontent.com/mattdw/stemmers/7a29b412352ebb604058b357ba332a8b53d0565f/test/stemmers/test/soundex.clj
clojure
(ns stemmers.test.soundex (:use clojure.test stemmers.soundex)) (deftest rupert-robert-rubin (is (= (stem "robert") "R163")) (is (= (stem "rupert") "R163")) (is (= (stem "rubin") "R150"))) (deftest ashcraft-ashcroft (is (= (stem "ashcraft") (stem "ashcroft") "A261"))) (deftest jumanji-fruittrees (is (= (stem "jumanji") "J552")) (is (= (stem "fruittress") "F636")))
1e89a47e6104c7639e6818090e3e50a0e86b59d4ae77db2d9663f36f2d0f1b73
mv2devnul/taglib
iso-639-2.lisp
-*- Mode : Lisp ; show - trailing - whitespace : t ; Base : 10 ; indent - tabs : nil ; Syntax : ANSI - Common - Lisp ; Package : ISO-639 - 2 ; -*- Copyright ( c ) 2013 , . All rights reserved . (in-package #:iso-639-2) (defparameter *langs* '(:aar "Afar" :abk "Abkhazian" :ace "Achinese" :ach "Acoli" :ada "Adangme" :ady "Adyghe" :afa "Afro-Asiatic languages" :afh "Afrihili" :afr "Afrikaans" :ain "Ainu" :aka "Akan" :akk "Akkadian" :alb "Albanian" :ale "Aleut" :alg "Algonquian languages" :alt "Southern Altai" :amh "Amharic" :ang "English, Old (ca.450-1100)" :anp "Angika" :apa "Apache languages" :ara "Arabic" :arc "Official Aramaic (700-300 BCE)" :arg "Aragonese" :arm "Armenian" :arn "Mapudungun" :arp "Arapaho" :art "Artificial languages" :arw "Arawak" :asm "Assamese" :ast "Asturian" :ath "Athapascan languages" :aus "Australian languages" :ava "Avaric" :ave "Avestan" :awa "Awadhi" :aym "Aymara" :aze "Azerbaijani" :bad "Banda languages" :bai "Bamileke languages" :bak "Bashkir" :bal "Baluchi" :bam "Bambara" :ban "Balinese" :baq "Basque" :bas "Basa" :bat "Baltic languages" :bej "Beja" :bel "Belarusian" :bem "Bemba" :ben "Bengali" :ber "Berber languages" :bho "Bhojpuri" :bih "Bihari languages" :bik "Bikol" :bin "Bini" :bis "Bislama" :bla "Siksika" :bnt "Bantu languages" :bos "Bosnian" :bra "Braj" :bre "Breton" :btk "Batak languages" :bua "Buriat" :bug "Buginese" :bul "Bulgarian" :bur "Burmese" :byn "Blin" :cad "Caddo" :cai "Central American Indian languages" :car "Galibi Carib" :cat "Catalan" :cau "Caucasian languages" :ceb "Cebuano" :cel "Celtic languages" :cha "Chamorro" :chb "Chibcha" :che "Chechen" :chg "Chagatai" :chi "Chinese" :chk "Chuukese" :chm "Mari" :chn "Chinook jargon" :cho "Choctaw" :chp "Chipewyan" :chr "Cherokee" :chu "Church Slavic" :chv "Chuvash" :chy "Cheyenne" :cmc "Chamic languages" :cop "Coptic" :cor "Cornish" :cos "Corsican" :cpe "Creoles and pidgins, English based" :cpf "Creoles and pidgins, French-based" :cpp "Creoles and pidgins, Portuguese-based" :cre "Cree" :crh "Crimean Tatar" :crp "Creoles and pidgins" :csb "Kashubian" :cus "Cushitic languages" :cze "Czech" :dak "Dakota" :dan "Danish" :dar "Dargwa" :day "Land Dayak languages" :del "Delaware" :den "Slave (Athapascan)" :dgr "Dogrib" :din "Dinka" :div "Divehi" :doi "Dogri" :dra "Dravidian languages" :dsb "Lower Sorbian" :dua "Duala" :dum "Dutch, Middle (ca.1050-1350)" :dut "Dutch" :dyu "Dyula" :dzo "Dzongkha" :efi "Efik" :egy "Egyptian (Ancient)" :eka "Ekajuk" :elx "Elamite" :eng "English" :enm "English, Middle (1100-1500)" :epo "Esperanto" :est "Estonian" :ewe "Ewe" :ewo "Ewondo" :fan "Fang" :fao "Faroese" :fat "Fanti" :fij "Fijian" :fil "Filipino" :fin "Finnish" :fiu "Finno-Ugrian languages" :fon "Fon" :fre "French" :frm "French, Middle (ca.1400-1600)" :fro "French, Old (842-ca.1400)" :frr "Northern Frisian" :frs "Eastern Frisian" :fry "Western Frisian" :ful "Fulah" :fur "Friulian" :gaa "Ga" :gay "Gayo" :gba "Gbaya" :gem "Germanic languages" :geo "Georgian" :ger "German" :gez "Geez" :gil "Gilbertese" :gla "Gaelic" :gle "Irish" :glg "Galician" :glv "Manx" :gmh "German, Middle High (ca.1050-1500)" :goh "German, Old High (ca.750-1050)" :gon "Gondi" :gor "Gorontalo" :got "Gothic" :grb "Grebo" :grc "Greek, Ancient (to 1453)" :gre "Greek, Modern (1453-)" :grn "Guarani" :gsw "Swiss German" :guj "Gujarati" :gwi "Gwich'in" :hai "Haida" :hat "Haitian" :hau "Hausa" :haw "Hawaiian" :heb "Hebrew" :her "Herero" :hil "Hiligaynon" :him "Himachali languages" :hin "Hindi" :hit "Hittite" :hmn "Hmong" :hmo "Hiri Motu" :hrv "Croatian" :hsb "Upper Sorbian" :hun "Hungarian" :hup "Hupa" :iba "Iban" :ibo "Igbo" :ice "Icelandic" :ido "Ido" :iii "Sichuan Yi" :ijo "Ijo languages" :iku "Inuktitut" :ile "Interlingue" :ilo "Iloko" :ina "Interlingua (International Auxiliary Language Association)" :inc "Indic languages" :ind "Indonesian" :ine "Indo-European languages" :inh "Ingush" :ipk "Inupiaq" :ira "Iranian languages" :iro "Iroquoian languages" :ita "Italian" :jav "Javanese" :jbo "Lojban" :jpn "Japanese" :jpr "Judeo-Persian" :jrb "Judeo-Arabic" :kaa "Kara-Kalpak" :kab "Kabyle" :kac "Kachin" :kal "Kalaallisut" :kam "Kamba" :kan "Kannada" :kar "Karen languages" :kas "Kashmiri" :kau "Kanuri" :kaw "Kawi" :kaz "Kazakh" :kbd "Kabardian" :kha "Khasi" :khi "Khoisan languages" :khm "Central Khmer" :kho "Khotanese" :kik "Kikuyu" :kin "Kinyarwanda" :kir "Kirghiz" :kmb "Kimbundu" :kok "Konkani" :kom "Komi" :kon "Kongo" :kor "Korean" :kos "Kosraean" :kpe "Kpelle" :krc "Karachay-Balkar" :krl "Karelian" :kro "Kru languages" :kru "Kurukh" :kua "Kuanyama" :kum "Kumyk" :kur "Kurdish" :kut "Kutenai" :lad "Ladino" :lah "Lahnda" :lam "Lamba" :lao "Lao" :lat "Latin" :lav "Latvian" :lez "Lezghian" :lim "Limburgan" :lin "Lingala" :lit "Lithuanian" :lol "Mongo" :loz "Lozi" :ltz "Luxembourgish" :lua "Luba-Lulua" :lub "Luba-Katanga" :lug "Ganda" :lui "Luiseno" :lun "Lunda" :luo "Luo (Kenya and Tanzania)" :lus "Lushai" :mac "Macedonian" :mad "Madurese" :mag "Magahi" :mah "Marshallese" :mai "Maithili" :mak "Makasar" :mal "Malayalam" :man "Mandingo" :mao "Maori" :map "Austronesian languages" :mar "Marathi" :mas "Masai" :may "Malay" :mdf "Moksha" :mdr "Mandar" :men "Mende" :mga "Irish, Middle (900-1200)" :mic "Mi'kmaq" :min "Minangkabau" :mis "Uncoded languages" :mkh "Mon-Khmer languages" :mlg "Malagasy" :mlt "Maltese" :mnc "Manchu" :mni "Manipuri" :mno "Manobo languages" :moh "Mohawk" :mon "Mongolian" :mos "Mossi" :mul "Multiple languages" :mun "Munda languages" :mus "Creek" :mwl "Mirandese" :mwr "Marwari" :myn "Mayan languages" :myv "Erzya" :nah "Nahuatl languages" :nai "North American Indian languages" :nap "Neapolitan" :nau "Nauru" :nav "Navajo" :nbl "Ndebele, South" :nde "Ndebele, North" :ndo "Ndonga" :nds "Low German" :nep "Nepali" :new "Nepal Bhasa" :nia "Nias" :nic "Niger-Kordofanian languages" :niu "Niuean" :nno "Norwegian Nynorsk" :nob "Bokmål, Norwegian" :nog "Nogai" :non "Norse, Old" :nor "Norwegian" :nqo "N'Ko" :nso "Pedi" :nub "Nubian languages" :nwc "Classical Newari" :nya "Chichewa" :nym "Nyamwezi" :nyn "Nyankole" :nyo "Nyoro" :nzi "Nzima" :oci "Occitan (post 1500)" :oji "Ojibwa" :ori "Oriya" :orm "Oromo" :osa "Osage" :oss "Ossetian" :ota "Turkish, Ottoman (1500-1928)" :oto "Otomian languages" :paa "Papuan languages" :pag "Pangasinan" :pal "Pahlavi" :pam "Pampanga" :pan "Panjabi" :pap "Papiamento" :pau "Palauan" :peo "Persian, Old (ca.600-400 B.C.)" :per "Persian" :phi "Philippine languages" :phn "Phoenician" :pli "Pali" :pol "Polish" :pon "Pohnpeian" :por "Portuguese" :pra "Prakrit languages" :pro "Provençal, Old (to 1500)" :pus "Pushto" :qaa-"qtz Reserved for local use" :que "Quechua" :raj "Rajasthani" :rap "Rapanui" :rar "Rarotongan" :roa "Romance languages" :roh "Romansh" :rom "Romany" :rum "Romanian" :run "Rundi" :rup "Aromanian" :rus "Russian" :sad "Sandawe" :sag "Sango" :sah "Yakut" :sai "South American Indian languages" :sal "Salishan languages" :sam "Samaritan Aramaic" :san "Sanskrit" :sas "Sasak" :sat "Santali" :scn "Sicilian" :sco "Scots" :sel "Selkup" :sem "Semitic languages" :sga "Irish, Old (to 900)" :sgn "Sign Languages" :shn "Shan" :sid "Sidamo" :sin "Sinhala" :sio "Siouan languages" :sit "Sino-Tibetan languages" :sla "Slavic languages" :slo "Slovak" :slv "Slovenian" :sma "Southern Sami" :sme "Northern Sami" :smi "Sami languages" :smj "Lule Sami" :smn "Inari Sami" :smo "Samoan" :sms "Skolt Sami" :sna "Shona" :snd "Sindhi" :snk "Soninke" :sog "Sogdian" :som "Somali" :son "Songhai languages" :sot "Sotho, Southern" :spa "Spanish" :srd "Sardinian" :srn "Sranan Tongo" :srp "Serbian" :srr "Serer" :ssa "Nilo-Saharan languages" :ssw "Swati" :suk "Sukuma" :sun "Sundanese" :sus "Susu" :sux "Sumerian" :swa "Swahili" :swe "Swedish" :syc "Classical Syriac" :syr "Syriac" :tah "Tahitian" :tai "Tai languages" :tam "Tamil" :tat "Tatar" :tel "Telugu" :tem "Timne" :ter "Tereno" :tet "Tetum" :tgk "Tajik" :tgl "Tagalog" :tha "Thai" :tib "Tibetan" :tig "Tigre" :tir "Tigrinya" :tiv "Tiv" :tkl "Tokelau" :tlh "Klingon" :tli "Tlingit" :tmh "Tamashek" :tog "Tonga (Nyasa)" :ton "Tonga (Tonga Islands)" :tpi "Tok Pisin" :tsi "Tsimshian" :tsn "Tswana" :tso "Tsonga" :tuk "Turkmen" :tum "Tumbuka" :tup "Tupi languages" :tur "Turkish" :tut "Altaic languages" :tvl "Tuvalu" :twi "Twi" :tyv "Tuvinian" :udm "Udmurt" :uga "Ugaritic" :uig "Uighur" :ukr "Ukrainian" :umb "Umbundu" :und "Undetermined" :urd "Urdu" :uzb "Uzbek" :vai "Vai" :ven "Venda" :vie "Vietnamese" :vol "Volapük" :vot "Votic" :wak "Wakashan languages" :wal "Wolaitta" :war "Waray" :was "Washo" :wel "Welsh" :wen "Sorbian languages" :wln "Walloon" :wol "Wolof" :xal "Kalmyk" :xho "Xhosa" :yao "Yao" :yap "Yapese" :yid "Yiddish" :yor "Yoruba" :ypk "Yupik languages" :zap "Zapotec" :zbl "Blissymbols" :zen "Zenaga" :zgh "Standard Moroccan Tamazight" :zha "Zhuang" :znd "Zande languages" :zul "Zulu" :zun "Zuni" :zxx "No linguistic content" :zza "Zaza" :XXX "Not Used")) (defun get-iso-639-2-language (l) "Convert an ISO-639-2 language tag into a readable language." (declare #.utils:*standard-optimize-settings*) (let* ((lang (getf *langs* (make-keyword (string-upcase l))))) (if lang lang "Bad ISO-639-2 language")))
null
https://raw.githubusercontent.com/mv2devnul/taglib/915f669dbb3e14f67e7ed79869e9a4ad2859f86a/iso-639-2.lisp
lisp
show - trailing - whitespace : t ; Base : 10 ; indent - tabs : nil ; Syntax : ANSI - Common - Lisp ; Package : ISO-639 - 2 ; -*-
Copyright ( c ) 2013 , . All rights reserved . (in-package #:iso-639-2) (defparameter *langs* '(:aar "Afar" :abk "Abkhazian" :ace "Achinese" :ach "Acoli" :ada "Adangme" :ady "Adyghe" :afa "Afro-Asiatic languages" :afh "Afrihili" :afr "Afrikaans" :ain "Ainu" :aka "Akan" :akk "Akkadian" :alb "Albanian" :ale "Aleut" :alg "Algonquian languages" :alt "Southern Altai" :amh "Amharic" :ang "English, Old (ca.450-1100)" :anp "Angika" :apa "Apache languages" :ara "Arabic" :arc "Official Aramaic (700-300 BCE)" :arg "Aragonese" :arm "Armenian" :arn "Mapudungun" :arp "Arapaho" :art "Artificial languages" :arw "Arawak" :asm "Assamese" :ast "Asturian" :ath "Athapascan languages" :aus "Australian languages" :ava "Avaric" :ave "Avestan" :awa "Awadhi" :aym "Aymara" :aze "Azerbaijani" :bad "Banda languages" :bai "Bamileke languages" :bak "Bashkir" :bal "Baluchi" :bam "Bambara" :ban "Balinese" :baq "Basque" :bas "Basa" :bat "Baltic languages" :bej "Beja" :bel "Belarusian" :bem "Bemba" :ben "Bengali" :ber "Berber languages" :bho "Bhojpuri" :bih "Bihari languages" :bik "Bikol" :bin "Bini" :bis "Bislama" :bla "Siksika" :bnt "Bantu languages" :bos "Bosnian" :bra "Braj" :bre "Breton" :btk "Batak languages" :bua "Buriat" :bug "Buginese" :bul "Bulgarian" :bur "Burmese" :byn "Blin" :cad "Caddo" :cai "Central American Indian languages" :car "Galibi Carib" :cat "Catalan" :cau "Caucasian languages" :ceb "Cebuano" :cel "Celtic languages" :cha "Chamorro" :chb "Chibcha" :che "Chechen" :chg "Chagatai" :chi "Chinese" :chk "Chuukese" :chm "Mari" :chn "Chinook jargon" :cho "Choctaw" :chp "Chipewyan" :chr "Cherokee" :chu "Church Slavic" :chv "Chuvash" :chy "Cheyenne" :cmc "Chamic languages" :cop "Coptic" :cor "Cornish" :cos "Corsican" :cpe "Creoles and pidgins, English based" :cpf "Creoles and pidgins, French-based" :cpp "Creoles and pidgins, Portuguese-based" :cre "Cree" :crh "Crimean Tatar" :crp "Creoles and pidgins" :csb "Kashubian" :cus "Cushitic languages" :cze "Czech" :dak "Dakota" :dan "Danish" :dar "Dargwa" :day "Land Dayak languages" :del "Delaware" :den "Slave (Athapascan)" :dgr "Dogrib" :din "Dinka" :div "Divehi" :doi "Dogri" :dra "Dravidian languages" :dsb "Lower Sorbian" :dua "Duala" :dum "Dutch, Middle (ca.1050-1350)" :dut "Dutch" :dyu "Dyula" :dzo "Dzongkha" :efi "Efik" :egy "Egyptian (Ancient)" :eka "Ekajuk" :elx "Elamite" :eng "English" :enm "English, Middle (1100-1500)" :epo "Esperanto" :est "Estonian" :ewe "Ewe" :ewo "Ewondo" :fan "Fang" :fao "Faroese" :fat "Fanti" :fij "Fijian" :fil "Filipino" :fin "Finnish" :fiu "Finno-Ugrian languages" :fon "Fon" :fre "French" :frm "French, Middle (ca.1400-1600)" :fro "French, Old (842-ca.1400)" :frr "Northern Frisian" :frs "Eastern Frisian" :fry "Western Frisian" :ful "Fulah" :fur "Friulian" :gaa "Ga" :gay "Gayo" :gba "Gbaya" :gem "Germanic languages" :geo "Georgian" :ger "German" :gez "Geez" :gil "Gilbertese" :gla "Gaelic" :gle "Irish" :glg "Galician" :glv "Manx" :gmh "German, Middle High (ca.1050-1500)" :goh "German, Old High (ca.750-1050)" :gon "Gondi" :gor "Gorontalo" :got "Gothic" :grb "Grebo" :grc "Greek, Ancient (to 1453)" :gre "Greek, Modern (1453-)" :grn "Guarani" :gsw "Swiss German" :guj "Gujarati" :gwi "Gwich'in" :hai "Haida" :hat "Haitian" :hau "Hausa" :haw "Hawaiian" :heb "Hebrew" :her "Herero" :hil "Hiligaynon" :him "Himachali languages" :hin "Hindi" :hit "Hittite" :hmn "Hmong" :hmo "Hiri Motu" :hrv "Croatian" :hsb "Upper Sorbian" :hun "Hungarian" :hup "Hupa" :iba "Iban" :ibo "Igbo" :ice "Icelandic" :ido "Ido" :iii "Sichuan Yi" :ijo "Ijo languages" :iku "Inuktitut" :ile "Interlingue" :ilo "Iloko" :ina "Interlingua (International Auxiliary Language Association)" :inc "Indic languages" :ind "Indonesian" :ine "Indo-European languages" :inh "Ingush" :ipk "Inupiaq" :ira "Iranian languages" :iro "Iroquoian languages" :ita "Italian" :jav "Javanese" :jbo "Lojban" :jpn "Japanese" :jpr "Judeo-Persian" :jrb "Judeo-Arabic" :kaa "Kara-Kalpak" :kab "Kabyle" :kac "Kachin" :kal "Kalaallisut" :kam "Kamba" :kan "Kannada" :kar "Karen languages" :kas "Kashmiri" :kau "Kanuri" :kaw "Kawi" :kaz "Kazakh" :kbd "Kabardian" :kha "Khasi" :khi "Khoisan languages" :khm "Central Khmer" :kho "Khotanese" :kik "Kikuyu" :kin "Kinyarwanda" :kir "Kirghiz" :kmb "Kimbundu" :kok "Konkani" :kom "Komi" :kon "Kongo" :kor "Korean" :kos "Kosraean" :kpe "Kpelle" :krc "Karachay-Balkar" :krl "Karelian" :kro "Kru languages" :kru "Kurukh" :kua "Kuanyama" :kum "Kumyk" :kur "Kurdish" :kut "Kutenai" :lad "Ladino" :lah "Lahnda" :lam "Lamba" :lao "Lao" :lat "Latin" :lav "Latvian" :lez "Lezghian" :lim "Limburgan" :lin "Lingala" :lit "Lithuanian" :lol "Mongo" :loz "Lozi" :ltz "Luxembourgish" :lua "Luba-Lulua" :lub "Luba-Katanga" :lug "Ganda" :lui "Luiseno" :lun "Lunda" :luo "Luo (Kenya and Tanzania)" :lus "Lushai" :mac "Macedonian" :mad "Madurese" :mag "Magahi" :mah "Marshallese" :mai "Maithili" :mak "Makasar" :mal "Malayalam" :man "Mandingo" :mao "Maori" :map "Austronesian languages" :mar "Marathi" :mas "Masai" :may "Malay" :mdf "Moksha" :mdr "Mandar" :men "Mende" :mga "Irish, Middle (900-1200)" :mic "Mi'kmaq" :min "Minangkabau" :mis "Uncoded languages" :mkh "Mon-Khmer languages" :mlg "Malagasy" :mlt "Maltese" :mnc "Manchu" :mni "Manipuri" :mno "Manobo languages" :moh "Mohawk" :mon "Mongolian" :mos "Mossi" :mul "Multiple languages" :mun "Munda languages" :mus "Creek" :mwl "Mirandese" :mwr "Marwari" :myn "Mayan languages" :myv "Erzya" :nah "Nahuatl languages" :nai "North American Indian languages" :nap "Neapolitan" :nau "Nauru" :nav "Navajo" :nbl "Ndebele, South" :nde "Ndebele, North" :ndo "Ndonga" :nds "Low German" :nep "Nepali" :new "Nepal Bhasa" :nia "Nias" :nic "Niger-Kordofanian languages" :niu "Niuean" :nno "Norwegian Nynorsk" :nob "Bokmål, Norwegian" :nog "Nogai" :non "Norse, Old" :nor "Norwegian" :nqo "N'Ko" :nso "Pedi" :nub "Nubian languages" :nwc "Classical Newari" :nya "Chichewa" :nym "Nyamwezi" :nyn "Nyankole" :nyo "Nyoro" :nzi "Nzima" :oci "Occitan (post 1500)" :oji "Ojibwa" :ori "Oriya" :orm "Oromo" :osa "Osage" :oss "Ossetian" :ota "Turkish, Ottoman (1500-1928)" :oto "Otomian languages" :paa "Papuan languages" :pag "Pangasinan" :pal "Pahlavi" :pam "Pampanga" :pan "Panjabi" :pap "Papiamento" :pau "Palauan" :peo "Persian, Old (ca.600-400 B.C.)" :per "Persian" :phi "Philippine languages" :phn "Phoenician" :pli "Pali" :pol "Polish" :pon "Pohnpeian" :por "Portuguese" :pra "Prakrit languages" :pro "Provençal, Old (to 1500)" :pus "Pushto" :qaa-"qtz Reserved for local use" :que "Quechua" :raj "Rajasthani" :rap "Rapanui" :rar "Rarotongan" :roa "Romance languages" :roh "Romansh" :rom "Romany" :rum "Romanian" :run "Rundi" :rup "Aromanian" :rus "Russian" :sad "Sandawe" :sag "Sango" :sah "Yakut" :sai "South American Indian languages" :sal "Salishan languages" :sam "Samaritan Aramaic" :san "Sanskrit" :sas "Sasak" :sat "Santali" :scn "Sicilian" :sco "Scots" :sel "Selkup" :sem "Semitic languages" :sga "Irish, Old (to 900)" :sgn "Sign Languages" :shn "Shan" :sid "Sidamo" :sin "Sinhala" :sio "Siouan languages" :sit "Sino-Tibetan languages" :sla "Slavic languages" :slo "Slovak" :slv "Slovenian" :sma "Southern Sami" :sme "Northern Sami" :smi "Sami languages" :smj "Lule Sami" :smn "Inari Sami" :smo "Samoan" :sms "Skolt Sami" :sna "Shona" :snd "Sindhi" :snk "Soninke" :sog "Sogdian" :som "Somali" :son "Songhai languages" :sot "Sotho, Southern" :spa "Spanish" :srd "Sardinian" :srn "Sranan Tongo" :srp "Serbian" :srr "Serer" :ssa "Nilo-Saharan languages" :ssw "Swati" :suk "Sukuma" :sun "Sundanese" :sus "Susu" :sux "Sumerian" :swa "Swahili" :swe "Swedish" :syc "Classical Syriac" :syr "Syriac" :tah "Tahitian" :tai "Tai languages" :tam "Tamil" :tat "Tatar" :tel "Telugu" :tem "Timne" :ter "Tereno" :tet "Tetum" :tgk "Tajik" :tgl "Tagalog" :tha "Thai" :tib "Tibetan" :tig "Tigre" :tir "Tigrinya" :tiv "Tiv" :tkl "Tokelau" :tlh "Klingon" :tli "Tlingit" :tmh "Tamashek" :tog "Tonga (Nyasa)" :ton "Tonga (Tonga Islands)" :tpi "Tok Pisin" :tsi "Tsimshian" :tsn "Tswana" :tso "Tsonga" :tuk "Turkmen" :tum "Tumbuka" :tup "Tupi languages" :tur "Turkish" :tut "Altaic languages" :tvl "Tuvalu" :twi "Twi" :tyv "Tuvinian" :udm "Udmurt" :uga "Ugaritic" :uig "Uighur" :ukr "Ukrainian" :umb "Umbundu" :und "Undetermined" :urd "Urdu" :uzb "Uzbek" :vai "Vai" :ven "Venda" :vie "Vietnamese" :vol "Volapük" :vot "Votic" :wak "Wakashan languages" :wal "Wolaitta" :war "Waray" :was "Washo" :wel "Welsh" :wen "Sorbian languages" :wln "Walloon" :wol "Wolof" :xal "Kalmyk" :xho "Xhosa" :yao "Yao" :yap "Yapese" :yid "Yiddish" :yor "Yoruba" :ypk "Yupik languages" :zap "Zapotec" :zbl "Blissymbols" :zen "Zenaga" :zgh "Standard Moroccan Tamazight" :zha "Zhuang" :znd "Zande languages" :zul "Zulu" :zun "Zuni" :zxx "No linguistic content" :zza "Zaza" :XXX "Not Used")) (defun get-iso-639-2-language (l) "Convert an ISO-639-2 language tag into a readable language." (declare #.utils:*standard-optimize-settings*) (let* ((lang (getf *langs* (make-keyword (string-upcase l))))) (if lang lang "Bad ISO-639-2 language")))
cf905f7919eb52532374c505a46c74c22fd7566d4f17d9c0865adcab362ba7b4
WorksHub/client
github.cljc
(ns wh.components.github (:require #?(:clj [wh.config :as config]) #?(:cljs [re-frame.core :refer [dispatch]]) #?(:cljs [wh.events]) #?(:cljs [wh.subs :as subs :refer [<sub]]) [wh.common.subs] [wh.components.icons :refer [icon]])) (defn app-name [] #?(:clj (config/get-in [:github :app :name])) #?(:cljs (<sub [:wh.subs/github-app-name]))) (defn state-query-param [] #?(:cljs (let [env (<sub [:wh/env]) pr-number (some-> (re-find #"-\d+" js/window.location.href) (subs 1))] (when (and (= :stage env) pr-number) (str "?state=" pr-number))))) (defn install-gh-app-url [] (str "/" (app-name) "/installations/new" (state-query-param))) (defn install-github-app [{:keys [class label id] :or {label "Integrate with GitHub"}}] #?(:cljs [:a.button.button--public.button--github (merge {:class class :href (install-gh-app-url) :on-click #(dispatch [:company/track-install-gh-clicked])} (when id {:id id})) [icon "github" :class "button__icon"] [:span label]]))
null
https://raw.githubusercontent.com/WorksHub/client/77e4212a69dad049a9e784143915058acd918982/common/src/wh/components/github.cljc
clojure
(ns wh.components.github (:require #?(:clj [wh.config :as config]) #?(:cljs [re-frame.core :refer [dispatch]]) #?(:cljs [wh.events]) #?(:cljs [wh.subs :as subs :refer [<sub]]) [wh.common.subs] [wh.components.icons :refer [icon]])) (defn app-name [] #?(:clj (config/get-in [:github :app :name])) #?(:cljs (<sub [:wh.subs/github-app-name]))) (defn state-query-param [] #?(:cljs (let [env (<sub [:wh/env]) pr-number (some-> (re-find #"-\d+" js/window.location.href) (subs 1))] (when (and (= :stage env) pr-number) (str "?state=" pr-number))))) (defn install-gh-app-url [] (str "/" (app-name) "/installations/new" (state-query-param))) (defn install-github-app [{:keys [class label id] :or {label "Integrate with GitHub"}}] #?(:cljs [:a.button.button--public.button--github (merge {:class class :href (install-gh-app-url) :on-click #(dispatch [:company/track-install-gh-clicked])} (when id {:id id})) [icon "github" :class "button__icon"] [:span label]]))
eb210326aa9c0151b019bc9c980cc2fed62e0b805c00cd5975056b024fbeeaa0
xedin/gremlin-xpath
util_test.clj
(ns gremlin.contrib.xpath.util-test (:use [clojure.test] [gremlin.contrib.xpath.util]) (:import [org.apache.commons.jxpath.ri Parser] [org.apache.commons.jxpath.ri.compiler Step Constant TreeCompiler])) (deftest get-axis-test (let [compiler (TreeCompiler.) xpath (Parser/parseExpression "./outE/@name" compiler) steps (seq (.getSteps xpath))] (is (= (map get-axis steps) '("self" "child" "attribute"))))) (deftest skip-quotes-test (is (= (skip-quotes "'friend'") "friend"))) (deftest skip-spaces-test (is (= (skip-spaces "no spaces") "nospaces"))) (deftest xpath-constant-value-test (let [num-constant (Constant. 10.5) str-constant (Constant. "'hello'") str-num-constant (Constant. "'10.5'")] (is (= (xpath-constant-value num-constant) 10.5)) (is (= (xpath-constant-value str-constant) "hello")) (is (= (xpath-constant-value str-num-constant) "10.5"))))
null
https://raw.githubusercontent.com/xedin/gremlin-xpath/983e17617d8f4358c4b369946aa56bb9fe6e2e8c/test/gremlin/contrib/xpath/util_test.clj
clojure
(ns gremlin.contrib.xpath.util-test (:use [clojure.test] [gremlin.contrib.xpath.util]) (:import [org.apache.commons.jxpath.ri Parser] [org.apache.commons.jxpath.ri.compiler Step Constant TreeCompiler])) (deftest get-axis-test (let [compiler (TreeCompiler.) xpath (Parser/parseExpression "./outE/@name" compiler) steps (seq (.getSteps xpath))] (is (= (map get-axis steps) '("self" "child" "attribute"))))) (deftest skip-quotes-test (is (= (skip-quotes "'friend'") "friend"))) (deftest skip-spaces-test (is (= (skip-spaces "no spaces") "nospaces"))) (deftest xpath-constant-value-test (let [num-constant (Constant. 10.5) str-constant (Constant. "'hello'") str-num-constant (Constant. "'10.5'")] (is (= (xpath-constant-value num-constant) 10.5)) (is (= (xpath-constant-value str-constant) "hello")) (is (= (xpath-constant-value str-num-constant) "10.5"))))
b310665089cd35fe26d99ed0089dfa539bbdcbb3feb57f0e866a8cb3b4c12131
kappelmann/eidi2_repetitorium_tum
SparseVector.ml
type v = (int * int) list let empty = [] let sb_vektor xs = let rec create i = function | [] -> [] | x::xs when x<>0 -> (i,x)::create (i+1) xs | x::xs -> create (i+1) xs in create 0 xs let rec set i v = function [] -> if v<>0 then [(i,v)] else empty | (i',v')::xs -> if i'=i then if v<>0 then (i,v)::xs else xs else if i<i' then if v<>0 then (i,v)::(i',v')::xs else (i',v')::xs else (i',v')::set i v xs let rec mul s v = if s=0 then empty else match v with | [] -> [] | (i,v)::xs -> (i,s*v)::mul s xs let rec add_sb_vektor v w = match (v,w) with | (_,[]) -> v | ([],_) -> w | ((vi,vv)::vt, (wi,wv)::wt) -> if(vi<wi) then (vi,vv)::add_sb_vektor vt w else if(vi>wi) then (wi,wv)::add_sb_vektor v wt else let r = vv+wv in if(r=0) then add_sb_vektor vt wt else (vi,r)::add_sb_vektor vt wt let rec mul_sb_vektor v w = match (v,w) with | (_,[]) -> 0 | ([],_) -> 0 | ((vi,vv)::vt, (wi,wv)::wt) -> if(vi<wi) then mul_sb_vektor vt w else if(vi>wi) then mul_sb_vektor v wt else vv*wv+(mul_sb_vektor vt wt)
null
https://raw.githubusercontent.com/kappelmann/eidi2_repetitorium_tum/1d16bbc498487a85960e0d83152249eb13944611/2016/sparse_vector/solutions/SparseVector.ml
ocaml
type v = (int * int) list let empty = [] let sb_vektor xs = let rec create i = function | [] -> [] | x::xs when x<>0 -> (i,x)::create (i+1) xs | x::xs -> create (i+1) xs in create 0 xs let rec set i v = function [] -> if v<>0 then [(i,v)] else empty | (i',v')::xs -> if i'=i then if v<>0 then (i,v)::xs else xs else if i<i' then if v<>0 then (i,v)::(i',v')::xs else (i',v')::xs else (i',v')::set i v xs let rec mul s v = if s=0 then empty else match v with | [] -> [] | (i,v)::xs -> (i,s*v)::mul s xs let rec add_sb_vektor v w = match (v,w) with | (_,[]) -> v | ([],_) -> w | ((vi,vv)::vt, (wi,wv)::wt) -> if(vi<wi) then (vi,vv)::add_sb_vektor vt w else if(vi>wi) then (wi,wv)::add_sb_vektor v wt else let r = vv+wv in if(r=0) then add_sb_vektor vt wt else (vi,r)::add_sb_vektor vt wt let rec mul_sb_vektor v w = match (v,w) with | (_,[]) -> 0 | ([],_) -> 0 | ((vi,vv)::vt, (wi,wv)::wt) -> if(vi<wi) then mul_sb_vektor vt w else if(vi>wi) then mul_sb_vektor v wt else vv*wv+(mul_sb_vektor vt wt)
096785689904434efa80944e4278ac3594b99aaf1039a78980fe66e8525aa4a5
lambdalille/history
path.mli
val target : Yocaml.Filepath.t val history_target : Yocaml.Filepath.t val css_repository : Yocaml.Filepath.t val talks_repository : Yocaml.Filepath.t val speakers_repository : Yocaml.Filepath.t val companies_repository : Yocaml.Filepath.t val places_repository : Yocaml.Filepath.t val events_repository : Yocaml.Filepath.t val talks_target : Yocaml.Filepath.t val events_target : Yocaml.Filepath.t val talk_file : string -> Yocaml.Filepath.t val event_file : string -> Yocaml.Filepath.t val company_file : string -> Yocaml.Filepath.t val place_file : string -> Yocaml.Filepath.t val speaker_file : string -> Yocaml.Filepath.t val talk_target : string -> Yocaml.Filepath.t val event_target : string -> Yocaml.Filepath.t val template : ?extension:string -> string -> Yocaml.Filepath.t val css_target : Yocaml.Filepath.t
null
https://raw.githubusercontent.com/lambdalille/history/6b5dd76b80d095d3a54e8860a7568b115ed747e8/bin/path.mli
ocaml
val target : Yocaml.Filepath.t val history_target : Yocaml.Filepath.t val css_repository : Yocaml.Filepath.t val talks_repository : Yocaml.Filepath.t val speakers_repository : Yocaml.Filepath.t val companies_repository : Yocaml.Filepath.t val places_repository : Yocaml.Filepath.t val events_repository : Yocaml.Filepath.t val talks_target : Yocaml.Filepath.t val events_target : Yocaml.Filepath.t val talk_file : string -> Yocaml.Filepath.t val event_file : string -> Yocaml.Filepath.t val company_file : string -> Yocaml.Filepath.t val place_file : string -> Yocaml.Filepath.t val speaker_file : string -> Yocaml.Filepath.t val talk_target : string -> Yocaml.Filepath.t val event_target : string -> Yocaml.Filepath.t val template : ?extension:string -> string -> Yocaml.Filepath.t val css_target : Yocaml.Filepath.t
6837978f2c879b157a460445dba76574844119850c99734ade386b52c0e87855
replikativ/datahike
tuples_test.cljc
(ns datahike.test.tuples-test (:require #?(:cljs [cljs.test :as t :refer-macros [is are deftest testing]] :clj [clojure.test :as t :refer [is are deftest testing]]) [datahike.api :as d] [datahike.db :as db]) #?(:clj (:import [clojure.lang ExceptionInfo]))) (deftest test-schema-declaration (testing "composite tuple" (is (db/empty-db {:reg/semester+course+student {:db/valueType :db.type/tuple :db/tupleAttrs [:reg/course :reg/semester :reg/student]}}))) (testing "heterogeneous tuples" (is (db/empty-db {:player/location {:db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/long]}}))) (testing "homogeneous tuples" (is (db/empty-db {:db/tupleAttrs {:db/valueType :db.type/tuple :db/tupleType :db.type/keyword}})))) (defn connect [] (d/delete-database) ;; deletes the 'default' db (d/create-database {:schema-flexibility :write}) (d/connect)) (deftest test-transaction (testing "homogeneous tuple" (let [conn (connect)] (d/transact conn [{:db/ident :prices :db/valueType :db.type/tuple :db/tupleType :db.type/number :db/cardinality :db.cardinality/one}]) (testing "of less than 9 values" (is (d/transact conn [{:prices [1 2 3 4 5 6 7 8]}])) (testing "are of different types" (is (thrown-with-msg? ExceptionInfo #".*Cannot store homogeneous tuple with values of different type.*" (d/transact conn [{:prices [1 2 3 4 5 6 "fdsfdsf"]}])))) (testing "are of wrong type" (is (thrown-with-msg? ExceptionInfo #".*Cannot store homogeneous tuple. Values are of wrong type.*" (d/transact conn [{:prices ["a" "b" "fdsfdsf"]}]))))) (testing "of more than 8 values" (is (thrown-with-msg? ExceptionInfo #".*Cannot store more than 8 values .*" (d/transact conn [{:prices [1 2 3 4 5 6 7 8 9]}])))))) (testing "heterogeneous tuple" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/keyword] :db/cardinality :db.cardinality/one}]) (is (d/transact conn [{:coord [100 :coord/west]}])) (testing "with wrong number of values" (is (thrown-with-msg? ExceptionInfo #".*Cannot store heterogeneous tuple: expecting 2 values, got 3.*" (d/transact conn [{:coord [100 :coord/west 9]}])))) (testing "with type mismatch" (is (thrown-with-msg? ExceptionInfo #".*Cannot store heterogeneous tuple: there is a mismatch between values.* and their types.*" (d/transact conn [{:coord [100 9]}])))))) (testing "composite tuple" (let [conn (connect) reg-schema [{:db/ident :reg/course :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :reg/semester :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :reg/student :db/valueType :db.type/string :db/cardinality :db.cardinality/one}]] (d/transact conn reg-schema) (is (d/transact conn [{:db/ident :reg/semester+course+student :db/valueType :db.type/tuple :db/tupleAttrs [:reg/course :reg/semester :reg/student] :db/cardinality :db.cardinality/one}])) (is (d/transact conn [{:reg/course "BIO-101" :reg/semester "2018-fall" :reg/student ""}]))))) (deftest test-transact-and-query-non-composite (testing "heterogeneous" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/keyword] :db/cardinality :db.cardinality/one}]) (d/transact conn [[:db/add 100 :coord [100 :coord/west]]]) (is (= #{[[100 :coord/west]]} (d/q '[:find ?v :where [_ :coord ?v]] @conn))))) (testing "homogeneous" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleType :db.type/long :db/cardinality :db.cardinality/one}]) (d/transact conn [[:db/add 100 :coord [100 200 300]]]) (is (= #{[[100 200 300]]} (d/q '[:find ?v :where [_ :coord ?v]] @conn)))))) (deftest test-transact-and-query-composite (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/long :db/cardinality :db.cardinality/one} {:db/ident :a+b+c :db/valueType :db.type/tuple :db/tupleAttrs [:a :b :c] :db/cardinality :db.cardinality/one}]) (is (d/transact conn [[:db/add 100 :a 123]])) (is (= #{[123]} (d/q '[:find ?v :where [100 :a ?v]] @conn))) (is (= #{[100 [123 nil nil]]} (d/q '[:find ?e ?v :where [?e :a+b+c ?v]] @conn))) (is (= #{[[123 nil nil]]} (d/q '[:find ?v :where [100 :a+b+c ?v]] @conn))))) (defn some-datoms [db es] (into #{} (map (juxt :e :a :v)) (mapcat #(d/datoms db {:index :eavt :components [%]}) es))) (deftest test-more-composite-transaction (let [conn (connect) e 100] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :c :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :d :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one} {:db/ident :a+c+d :db/valueType :db.type/tuple :db/tupleAttrs [:a :c :d] :db/cardinality :db.cardinality/one}]) (are [tx datoms] (= datoms (some-datoms (:db-after (d/transact conn tx)) [e])) [[:db/add e :a "a"]] #{[e :a "a"] [e :a+b ["a" nil]] [e :a+c+d ["a" nil nil]]} [[:db/add e :b "b"]] #{[e :a "a"] [e :b "b"] [e :a+b ["a" "b"]] [e :a+c+d ["a" nil nil]]} [[:db/add e :a "A"]] #{[e :a "A"] [e :b "b"] [e :a+b ["A" "b"]] [e :a+c+d ["A" nil nil]]} [[:db/add e :c "c"] [:db/add e :d "d"]] #{[e :a "A"] [e :b "b"] [e :a+b ["A" "b"]] [e :c "c"] [e :d "d"] [e :a+c+d ["A" "c" "d"]]} [[:db/add e :a "a"]] #{[e :a "a"] [e :b "b"] [e :a+b ["a" "b"]] [e :c "c"] [e :d "d"] [e :a+c+d ["a" "c" "d"]]} [[:db/add e :a "A"] [:db/add e :b "B"] [:db/add e :c "C"] [:db/add e :d "D"]] #{[e :a "A"] [e :b "B"] [e :a+b ["A" "B"]] [e :c "C"] [e :d "D"] [e :a+c+d ["A" "C" "D"]]} [[:db/retract e :a "A"]] #{[e :b "B"] [e :a+b [nil "B"]] [e :c "C"] [e :d "D"] [e :a+c+d [nil "C" "D"]]} [[:db/retract e :b "B"]] #{[e :c "C"] [e :d "D"] [e :a+c+d [nil "C" "D"]]}) (is (thrown-with-msg? ExceptionInfo #"Can’t modify tuple attrs directly:.*" (d/transact conn [{:db/id 100 :a+b ["A" "B"]}]))))) (deftest test-queries (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/value}]) (d/transact conn [{:db/id 1 :a "A" :b "B"} {:db/id 2 :a "A" :b "b"} {:db/id 3 :a "a" :b "B"} {:db/id 4 :a "a" :b "b"}]) (is (= #{[3]} (d/q '[:find ?e :where [?e :a+b ["a" "B"]]] @conn))) (is (= #{[["a" "B"]]} (d/q '[:find ?a+b :where [[:a+b ["a" "B"]] :a+b ?a+b]] @conn))) (is (= #{[["A" "B"]] [["A" "b"]] [["a" "B"]] [["a" "b"]]} (d/q '[:find ?a+b :where [?e :a ?a] [?e :b ?b] [(tuple ?a ?b) ?a+b]] @conn))) (is (= #{["A" "B"] ["A" "b"] ["a" "B"] ["a" "b"]} (d/q '[:find ?a ?b :where [?e :a+b ?a+b] [(untuple ?a+b) [?a ?b]]] @conn))))) (deftest test-lookup-refs (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :c :db/valueType :db.type/string :db/cardinality :db.cardinality/one :db/unique :db.unique/identity} {:db/ident :d :db/valueType :db.type/string :db/cardinality :db.cardinality/one :db/unique :db.unique/identity} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/identity}]) (d/transact conn [{:db/id 100 :a "A" :b "B"} {:db/id 200 :a "a" :b "b"}]) (d/transact conn [[:db/add [:a+b ["A" "B"]] :c "C"] {:db/id [:a+b ["a" "b"]] :c "c"}]) (is (= #{[100 :a "A"] [100 :b "B"] [100 :a+b ["A" "B"]] [100 :c "C"] [200 :a "a"] [200 :b "b"] [200 :a+b ["a" "b"]] [200 :c "c"]} (some-datoms (d/db conn) [100 200]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add [:a+b ["A" "B"]] :c "c"]]))) (is (thrown-with-msg? ExceptionInfo #".*Conflicting upsert: \[\:c \"c\"] .*" (d/transact conn [{:db/id [:a+b ["A" "B"]] :c "c"}]))) ;; change tuple + upsert (d/transact conn [{:db/id [:a+b ["A" "B"]] :b "b" :d "D"}]) (is (= #{[100 :a "A"] [100 :b "b"] [100 :a+b ["A" "b"]] [100 :c "C"] [100 :d "D"] [200 :a "a"] [200 :b "b"] [200 :a+b ["a" "b"]] [200 :c "c"]} (some-datoms (d/db conn) [100 200]))) (is (= {:db/id 200 :a "a" :b "b" :a+b ["a" "b"] :c "c"} (d/pull (d/db conn) '[*] [:a+b ["a" "b"]]))))) (deftest test-unique (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/identity}]) (d/transact conn [[:db/add 100 :a "a"]]) (d/transact conn [[:db/add 200 :a "A"]]) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"]]))) (d/transact conn [[:db/add 100 :b "b"] [:db/add 200 :b "b"] {:db/id 300 :a "a" :b "B"}]) (is (= #{[100 :a "a"] [100 :b "b"] [100 :a+b ["a" "b"]] [200 :a "A"] [200 :b "b"] [200 :a+b ["A" "b"]] [300 :a "a"] [300 :b "B"] [300 :a+b ["a" "B"]]} (some-datoms (d/db conn) [100 200 300]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"]]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :b "B"]]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"] [:db/add 100 :b "B"]]))) (testing "multiple tuple updates" ;; changing both tuple components in a single operation (d/transact conn [{:db/id 100 :a "A" :b "B"}]) (is (= {:db/id 100 :a "A" :b "B" :a+b ["A" "B"]} (d/pull (d/db conn) '[*] 100))) adding entity with two tuple components in a single operation (d/transact conn [{:db/id 4 :a "a" :b "c"}]) (is (= {:db/id 4 :a "a" :b "c" :a+b ["a" "c"]} (d/pull (d/db conn) '[*] 4)))))) (deftest test-validation (let [db (db/empty-db {:a+b {:db/valueType :db.type/tuple :db/tupleAttrs [:a :b]}}) db1 (d/db-with db [[:db/add 100 :a "a"]]) err-msg #"Can’t modify tuple attrs directly:.*"] (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db [[:db/add 100 :a+b [nil nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db1 [[:db/add 100 :a+b ["a" nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db [[:db/add 100 :a "a"] [:db/add 100 :a+b ["a" nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db1 [[:db/retract 100 :a+b ["a" nil]]]))))) (deftest test-indexes (let [db (-> (db/empty-db {:a+b+c {:db/tupleAttrs [:a :b :c] :db/valueType :db.type/tuple :db/index true}}) (d/db-with [{:db/id 1 :a "a" :b "b" :c "c"} {:db/id 2 :a "A" :b "b" :c "c"} {:db/id 3 :a "a" :b "B" :c "c"} {:db/id 4 :a "A" :b "B" :c "c"} {:db/id 5 :a "a" :b "b" :c "C"} {:db/id 6 :a "A" :b "b" :c "C"} {:db/id 7 :a "a" :b "B" :c "C"} {:db/id 8 :a "A" :b "B" :c "C"}]))] (is (= [6] (mapv :e (d/datoms db :avet :a+b+c ["A" "b" "C"])))) (is (= [] (mapv :e (d/datoms db :avet :a+b+c ["A" "b" nil])))) (is (= [8 4 6 2] (mapv :e (d/index-range db {:attrid :a+b+c :start ["A" "B" "C"] :end ["A" "b" "c"]})))) (is (= [8 4] (mapv :e (d/index-range db {:attrid :a+b+c :start ["A" "B" nil] :end ["A" "b" nil]}))))))
null
https://raw.githubusercontent.com/replikativ/datahike/408cb0f538a837267e44b098df895fa00348fe10/test/datahike/test/tuples_test.cljc
clojure
deletes the 'default' db change tuple + upsert changing both tuple components in a single operation
(ns datahike.test.tuples-test (:require #?(:cljs [cljs.test :as t :refer-macros [is are deftest testing]] :clj [clojure.test :as t :refer [is are deftest testing]]) [datahike.api :as d] [datahike.db :as db]) #?(:clj (:import [clojure.lang ExceptionInfo]))) (deftest test-schema-declaration (testing "composite tuple" (is (db/empty-db {:reg/semester+course+student {:db/valueType :db.type/tuple :db/tupleAttrs [:reg/course :reg/semester :reg/student]}}))) (testing "heterogeneous tuples" (is (db/empty-db {:player/location {:db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/long]}}))) (testing "homogeneous tuples" (is (db/empty-db {:db/tupleAttrs {:db/valueType :db.type/tuple :db/tupleType :db.type/keyword}})))) (defn connect [] (d/create-database {:schema-flexibility :write}) (d/connect)) (deftest test-transaction (testing "homogeneous tuple" (let [conn (connect)] (d/transact conn [{:db/ident :prices :db/valueType :db.type/tuple :db/tupleType :db.type/number :db/cardinality :db.cardinality/one}]) (testing "of less than 9 values" (is (d/transact conn [{:prices [1 2 3 4 5 6 7 8]}])) (testing "are of different types" (is (thrown-with-msg? ExceptionInfo #".*Cannot store homogeneous tuple with values of different type.*" (d/transact conn [{:prices [1 2 3 4 5 6 "fdsfdsf"]}])))) (testing "are of wrong type" (is (thrown-with-msg? ExceptionInfo #".*Cannot store homogeneous tuple. Values are of wrong type.*" (d/transact conn [{:prices ["a" "b" "fdsfdsf"]}]))))) (testing "of more than 8 values" (is (thrown-with-msg? ExceptionInfo #".*Cannot store more than 8 values .*" (d/transact conn [{:prices [1 2 3 4 5 6 7 8 9]}])))))) (testing "heterogeneous tuple" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/keyword] :db/cardinality :db.cardinality/one}]) (is (d/transact conn [{:coord [100 :coord/west]}])) (testing "with wrong number of values" (is (thrown-with-msg? ExceptionInfo #".*Cannot store heterogeneous tuple: expecting 2 values, got 3.*" (d/transact conn [{:coord [100 :coord/west 9]}])))) (testing "with type mismatch" (is (thrown-with-msg? ExceptionInfo #".*Cannot store heterogeneous tuple: there is a mismatch between values.* and their types.*" (d/transact conn [{:coord [100 9]}])))))) (testing "composite tuple" (let [conn (connect) reg-schema [{:db/ident :reg/course :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :reg/semester :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :reg/student :db/valueType :db.type/string :db/cardinality :db.cardinality/one}]] (d/transact conn reg-schema) (is (d/transact conn [{:db/ident :reg/semester+course+student :db/valueType :db.type/tuple :db/tupleAttrs [:reg/course :reg/semester :reg/student] :db/cardinality :db.cardinality/one}])) (is (d/transact conn [{:reg/course "BIO-101" :reg/semester "2018-fall" :reg/student ""}]))))) (deftest test-transact-and-query-non-composite (testing "heterogeneous" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleTypes [:db.type/long :db.type/keyword] :db/cardinality :db.cardinality/one}]) (d/transact conn [[:db/add 100 :coord [100 :coord/west]]]) (is (= #{[[100 :coord/west]]} (d/q '[:find ?v :where [_ :coord ?v]] @conn))))) (testing "homogeneous" (let [conn (connect)] (d/transact conn [{:db/ident :coord :db/valueType :db.type/tuple :db/tupleType :db.type/long :db/cardinality :db.cardinality/one}]) (d/transact conn [[:db/add 100 :coord [100 200 300]]]) (is (= #{[[100 200 300]]} (d/q '[:find ?v :where [_ :coord ?v]] @conn)))))) (deftest test-transact-and-query-composite (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/long :db/cardinality :db.cardinality/one} {:db/ident :a+b+c :db/valueType :db.type/tuple :db/tupleAttrs [:a :b :c] :db/cardinality :db.cardinality/one}]) (is (d/transact conn [[:db/add 100 :a 123]])) (is (= #{[123]} (d/q '[:find ?v :where [100 :a ?v]] @conn))) (is (= #{[100 [123 nil nil]]} (d/q '[:find ?e ?v :where [?e :a+b+c ?v]] @conn))) (is (= #{[[123 nil nil]]} (d/q '[:find ?v :where [100 :a+b+c ?v]] @conn))))) (defn some-datoms [db es] (into #{} (map (juxt :e :a :v)) (mapcat #(d/datoms db {:index :eavt :components [%]}) es))) (deftest test-more-composite-transaction (let [conn (connect) e 100] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :c :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :d :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one} {:db/ident :a+c+d :db/valueType :db.type/tuple :db/tupleAttrs [:a :c :d] :db/cardinality :db.cardinality/one}]) (are [tx datoms] (= datoms (some-datoms (:db-after (d/transact conn tx)) [e])) [[:db/add e :a "a"]] #{[e :a "a"] [e :a+b ["a" nil]] [e :a+c+d ["a" nil nil]]} [[:db/add e :b "b"]] #{[e :a "a"] [e :b "b"] [e :a+b ["a" "b"]] [e :a+c+d ["a" nil nil]]} [[:db/add e :a "A"]] #{[e :a "A"] [e :b "b"] [e :a+b ["A" "b"]] [e :a+c+d ["A" nil nil]]} [[:db/add e :c "c"] [:db/add e :d "d"]] #{[e :a "A"] [e :b "b"] [e :a+b ["A" "b"]] [e :c "c"] [e :d "d"] [e :a+c+d ["A" "c" "d"]]} [[:db/add e :a "a"]] #{[e :a "a"] [e :b "b"] [e :a+b ["a" "b"]] [e :c "c"] [e :d "d"] [e :a+c+d ["a" "c" "d"]]} [[:db/add e :a "A"] [:db/add e :b "B"] [:db/add e :c "C"] [:db/add e :d "D"]] #{[e :a "A"] [e :b "B"] [e :a+b ["A" "B"]] [e :c "C"] [e :d "D"] [e :a+c+d ["A" "C" "D"]]} [[:db/retract e :a "A"]] #{[e :b "B"] [e :a+b [nil "B"]] [e :c "C"] [e :d "D"] [e :a+c+d [nil "C" "D"]]} [[:db/retract e :b "B"]] #{[e :c "C"] [e :d "D"] [e :a+c+d [nil "C" "D"]]}) (is (thrown-with-msg? ExceptionInfo #"Can’t modify tuple attrs directly:.*" (d/transact conn [{:db/id 100 :a+b ["A" "B"]}]))))) (deftest test-queries (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/value}]) (d/transact conn [{:db/id 1 :a "A" :b "B"} {:db/id 2 :a "A" :b "b"} {:db/id 3 :a "a" :b "B"} {:db/id 4 :a "a" :b "b"}]) (is (= #{[3]} (d/q '[:find ?e :where [?e :a+b ["a" "B"]]] @conn))) (is (= #{[["a" "B"]]} (d/q '[:find ?a+b :where [[:a+b ["a" "B"]] :a+b ?a+b]] @conn))) (is (= #{[["A" "B"]] [["A" "b"]] [["a" "B"]] [["a" "b"]]} (d/q '[:find ?a+b :where [?e :a ?a] [?e :b ?b] [(tuple ?a ?b) ?a+b]] @conn))) (is (= #{["A" "B"] ["A" "b"] ["a" "B"] ["a" "b"]} (d/q '[:find ?a ?b :where [?e :a+b ?a+b] [(untuple ?a+b) [?a ?b]]] @conn))))) (deftest test-lookup-refs (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :c :db/valueType :db.type/string :db/cardinality :db.cardinality/one :db/unique :db.unique/identity} {:db/ident :d :db/valueType :db.type/string :db/cardinality :db.cardinality/one :db/unique :db.unique/identity} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/identity}]) (d/transact conn [{:db/id 100 :a "A" :b "B"} {:db/id 200 :a "a" :b "b"}]) (d/transact conn [[:db/add [:a+b ["A" "B"]] :c "C"] {:db/id [:a+b ["a" "b"]] :c "c"}]) (is (= #{[100 :a "A"] [100 :b "B"] [100 :a+b ["A" "B"]] [100 :c "C"] [200 :a "a"] [200 :b "b"] [200 :a+b ["a" "b"]] [200 :c "c"]} (some-datoms (d/db conn) [100 200]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add [:a+b ["A" "B"]] :c "c"]]))) (is (thrown-with-msg? ExceptionInfo #".*Conflicting upsert: \[\:c \"c\"] .*" (d/transact conn [{:db/id [:a+b ["A" "B"]] :c "c"}]))) (d/transact conn [{:db/id [:a+b ["A" "B"]] :b "b" :d "D"}]) (is (= #{[100 :a "A"] [100 :b "b"] [100 :a+b ["A" "b"]] [100 :c "C"] [100 :d "D"] [200 :a "a"] [200 :b "b"] [200 :a+b ["a" "b"]] [200 :c "c"]} (some-datoms (d/db conn) [100 200]))) (is (= {:db/id 200 :a "a" :b "b" :a+b ["a" "b"] :c "c"} (d/pull (d/db conn) '[*] [:a+b ["a" "b"]]))))) (deftest test-unique (let [conn (connect)] (d/transact conn [{:db/ident :a :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :b :db/valueType :db.type/string :db/cardinality :db.cardinality/one} {:db/ident :a+b :db/valueType :db.type/tuple :db/tupleAttrs [:a :b] :db/cardinality :db.cardinality/one :db/unique :db.unique/identity}]) (d/transact conn [[:db/add 100 :a "a"]]) (d/transact conn [[:db/add 200 :a "A"]]) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"]]))) (d/transact conn [[:db/add 100 :b "b"] [:db/add 200 :b "b"] {:db/id 300 :a "a" :b "B"}]) (is (= #{[100 :a "a"] [100 :b "b"] [100 :a+b ["a" "b"]] [200 :a "A"] [200 :b "b"] [200 :a+b ["A" "b"]] [300 :a "a"] [300 :b "B"] [300 :a+b ["a" "B"]]} (some-datoms (d/db conn) [100 200 300]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"]]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :b "B"]]))) (is (thrown-with-msg? ExceptionInfo #"Cannot add .* because of unique constraint: .*" (d/transact conn [[:db/add 100 :a "A"] [:db/add 100 :b "B"]]))) (testing "multiple tuple updates" (d/transact conn [{:db/id 100 :a "A" :b "B"}]) (is (= {:db/id 100 :a "A" :b "B" :a+b ["A" "B"]} (d/pull (d/db conn) '[*] 100))) adding entity with two tuple components in a single operation (d/transact conn [{:db/id 4 :a "a" :b "c"}]) (is (= {:db/id 4 :a "a" :b "c" :a+b ["a" "c"]} (d/pull (d/db conn) '[*] 4)))))) (deftest test-validation (let [db (db/empty-db {:a+b {:db/valueType :db.type/tuple :db/tupleAttrs [:a :b]}}) db1 (d/db-with db [[:db/add 100 :a "a"]]) err-msg #"Can’t modify tuple attrs directly:.*"] (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db [[:db/add 100 :a+b [nil nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db1 [[:db/add 100 :a+b ["a" nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db [[:db/add 100 :a "a"] [:db/add 100 :a+b ["a" nil]]]))) (is (thrown-with-msg? ExceptionInfo err-msg (d/db-with db1 [[:db/retract 100 :a+b ["a" nil]]]))))) (deftest test-indexes (let [db (-> (db/empty-db {:a+b+c {:db/tupleAttrs [:a :b :c] :db/valueType :db.type/tuple :db/index true}}) (d/db-with [{:db/id 1 :a "a" :b "b" :c "c"} {:db/id 2 :a "A" :b "b" :c "c"} {:db/id 3 :a "a" :b "B" :c "c"} {:db/id 4 :a "A" :b "B" :c "c"} {:db/id 5 :a "a" :b "b" :c "C"} {:db/id 6 :a "A" :b "b" :c "C"} {:db/id 7 :a "a" :b "B" :c "C"} {:db/id 8 :a "A" :b "B" :c "C"}]))] (is (= [6] (mapv :e (d/datoms db :avet :a+b+c ["A" "b" "C"])))) (is (= [] (mapv :e (d/datoms db :avet :a+b+c ["A" "b" nil])))) (is (= [8 4 6 2] (mapv :e (d/index-range db {:attrid :a+b+c :start ["A" "B" "C"] :end ["A" "b" "c"]})))) (is (= [8 4] (mapv :e (d/index-range db {:attrid :a+b+c :start ["A" "B" nil] :end ["A" "b" nil]}))))))
5cbe44ba43cddee0b1860208be32a86326e16abb005fae918bacc6eff04f9ac5
HaskellZhangSong/Introduction_to_Haskell_2ed_source
Maybe.hs
# LANGUAGE DeriveFunctor # import Prelude hiding (Maybe, Just, Nothing) data Identity a = Identity a data Maybe a = Just a | Nothing deriving Functor instance Applicative Maybe where pure = Just Just f <*> Just a = Just (f a) _ <*> _ = Nothing instance Monad Maybe where return = Just (Just a) >>= f = f a Nothing >>= _ = Nothing fail _ = Nothing data Exp = Lit Integer | Add Exp Exp | Sub Exp Exp | Mul Exp Exp | Div Exp Exp deriving (Show) safeEval (Add e1 e2) = do n1 <- safeEval e1 n2 <- safeEval e2 return (n1+n2)
null
https://raw.githubusercontent.com/HaskellZhangSong/Introduction_to_Haskell_2ed_source/140c50fdccfe608fe499ecf2d8a3732f531173f5/C10/Maybe.hs
haskell
# LANGUAGE DeriveFunctor # import Prelude hiding (Maybe, Just, Nothing) data Identity a = Identity a data Maybe a = Just a | Nothing deriving Functor instance Applicative Maybe where pure = Just Just f <*> Just a = Just (f a) _ <*> _ = Nothing instance Monad Maybe where return = Just (Just a) >>= f = f a Nothing >>= _ = Nothing fail _ = Nothing data Exp = Lit Integer | Add Exp Exp | Sub Exp Exp | Mul Exp Exp | Div Exp Exp deriving (Show) safeEval (Add e1 e2) = do n1 <- safeEval e1 n2 <- safeEval e2 return (n1+n2)
278c8b1554e573a6721a83cb3c7826fc7ab84ccc9ad0f1f7b50aa65ab396c703
spawnfest/eep49ers
io_lib_format.erl
%% %% %CopyrightBegin% %% Copyright Ericsson AB 1996 - 2019 . All Rights Reserved . %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% %CopyrightEnd% %% -module(io_lib_format). %% Formatting functions of io library. -export([fwrite/2,fwrite/3,fwrite_g/1,indentation/2,scan/2,unscan/1, build/1, build/2]). Format the arguments in after string Format . Just generate %% an error if there is an error in the arguments. %% %% To do the printing command correctly we need to calculate the %% current indentation for everything before it. This may be very expensive , especially when it is not needed , so we first determine %% if, and for how long, we need to calculate the indentations. We do this by first collecting all the control sequences and %% corresponding arguments, then counting the print sequences and %% then building the output. This method has some drawbacks, it does two passes over the format string and creates more temporary data , and it also splits the handling of the control characters into two %% parts. -spec fwrite(Format, Data) -> io_lib:chars() when Format :: io:format(), Data :: [term()]. fwrite(Format, Args) -> build(scan(Format, Args)). -spec fwrite(Format, Data, Options) -> io_lib:chars() when Format :: io:format(), Data :: [term()], Options :: [Option], Option :: {'chars_limit', CharsLimit}, CharsLimit :: io_lib:chars_limit(). fwrite(Format, Args, Options) -> build(scan(Format, Args), Options). %% Build the output text for a pre-parsed format list. -spec build(FormatList) -> io_lib:chars() when FormatList :: [char() | io_lib:format_spec()]. build(Cs) -> build(Cs, []). -spec build(FormatList, Options) -> io_lib:chars() when FormatList :: [char() | io_lib:format_spec()], Options :: [Option], Option :: {'chars_limit', CharsLimit}, CharsLimit :: io_lib:chars_limit(). build(Cs, Options) -> CharsLimit = get_option(chars_limit, Options, -1), Res1 = build_small(Cs), {P, S, W, Other} = count_small(Res1), case P + S + W of 0 -> Res1; NumOfLimited -> RemainingChars = sub(CharsLimit, Other), build_limited(Res1, P, NumOfLimited, RemainingChars, 0) end. Parse all control sequences in the format string . -spec scan(Format, Data) -> FormatList when Format :: io:format(), Data :: [term()], FormatList :: [char() | io_lib:format_spec()]. scan(Format, Args) when is_atom(Format) -> scan(atom_to_list(Format), Args); scan(Format, Args) when is_binary(Format) -> scan(binary_to_list(Format), Args); scan(Format, Args) -> collect(Format, Args). %% Revert a pre-parsed format list to a plain character list and a %% list of arguments. -spec unscan(FormatList) -> {Format, Data} when FormatList :: [char() | io_lib:format_spec()], Format :: io:format(), Data :: [term()]. unscan(Cs) -> {print(Cs), args(Cs)}. args([#{args := As} | Cs]) -> As ++ args(Cs); args([_C | Cs]) -> args(Cs); args([]) -> []. print([#{control_char := C, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) -> print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs); print([C | Cs]) -> [C | print(Cs)]; print([]) -> []. print(C, F, Ad, P, Pad, Encoding, Strings) -> [$~] ++ print_field_width(F, Ad) ++ print_precision(P, Pad) ++ print_pad_char(Pad) ++ print_encoding(Encoding) ++ print_strings(Strings) ++ [C]. print_field_width(none, _Ad) -> ""; print_field_width(F, left) -> integer_to_list(-F); print_field_width(F, right) -> integer_to_list(F). print_precision(none, $\s) -> ""; pad must be second dot print_precision(P, _Pad) -> [$. | integer_to_list(P)]. print_pad_char($\s) -> ""; % default, no need to make explicit print_pad_char(Pad) -> [$., Pad]. print_encoding(unicode) -> "t"; print_encoding(latin1) -> "". print_strings(false) -> "l"; print_strings(true) -> "". collect([$~|Fmt0], Args0) -> {C,Fmt1,Args1} = collect_cseq(Fmt0, Args0), [C|collect(Fmt1, Args1)]; collect([C|Fmt], Args) -> [C|collect(Fmt, Args)]; collect([], []) -> []. collect_cseq(Fmt0, Args0) -> {F,Ad,Fmt1,Args1} = field_width(Fmt0, Args0), {P,Fmt2,Args2} = precision(Fmt1, Args1), {Pad,Fmt3,Args3} = pad_char(Fmt2, Args2), Spec0 = #{width => F, adjust => Ad, precision => P, pad_char => Pad, encoding => latin1, strings => true}, {Spec1,Fmt4} = modifiers(Fmt3, Spec0), {C,As,Fmt5,Args4} = collect_cc(Fmt4, Args3), Spec2 = Spec1#{control_char => C, args => As}, {Spec2,Fmt5,Args4}. modifiers([$t|Fmt], Spec) -> modifiers(Fmt, Spec#{encoding => unicode}); modifiers([$l|Fmt], Spec) -> modifiers(Fmt, Spec#{strings => false}); modifiers(Fmt, Spec) -> {Spec, Fmt}. field_width([$-|Fmt0], Args0) -> {F,Fmt,Args} = field_value(Fmt0, Args0), field_width(-F, Fmt, Args); field_width(Fmt0, Args0) -> {F,Fmt,Args} = field_value(Fmt0, Args0), field_width(F, Fmt, Args). field_width(F, Fmt, Args) when F < 0 -> {-F,left,Fmt,Args}; field_width(F, Fmt, Args) when F >= 0 -> {F,right,Fmt,Args}. precision([$.|Fmt], Args) -> field_value(Fmt, Args); precision(Fmt, Args) -> {none,Fmt,Args}. field_value([$*|Fmt], [A|Args]) when is_integer(A) -> {A,Fmt,Args}; field_value([C|Fmt], Args) when is_integer(C), C >= $0, C =< $9 -> field_value([C|Fmt], Args, 0); field_value(Fmt, Args) -> {none,Fmt,Args}. field_value([C|Fmt], Args, F) when is_integer(C), C >= $0, C =< $9 -> field_value(Fmt, Args, 10*F + (C - $0)); field_value(Fmt, Args, F) -> %Default case {F,Fmt,Args}. pad_char([$.,$*|Fmt], [Pad|Args]) -> {Pad,Fmt,Args}; pad_char([$.,Pad|Fmt], Args) -> {Pad,Fmt,Args}; pad_char(Fmt, Args) -> {$\s,Fmt,Args}. %% collect_cc([FormatChar], [Argument]) -> %% {Control,[ControlArg],[FormatChar],[Arg]}. %% Here we collect the argments for each control character. %% Be explicit to cause failure early. collect_cc([$w|Fmt], [A|Args]) -> {$w,[A],Fmt,Args}; collect_cc([$p|Fmt], [A|Args]) -> {$p,[A],Fmt,Args}; collect_cc([$W|Fmt], [A,Depth|Args]) -> {$W,[A,Depth],Fmt,Args}; collect_cc([$P|Fmt], [A,Depth|Args]) -> {$P,[A,Depth],Fmt,Args}; collect_cc([$s|Fmt], [A|Args]) -> {$s,[A],Fmt,Args}; collect_cc([$e|Fmt], [A|Args]) -> {$e,[A],Fmt,Args}; collect_cc([$f|Fmt], [A|Args]) -> {$f,[A],Fmt,Args}; collect_cc([$g|Fmt], [A|Args]) -> {$g,[A],Fmt,Args}; collect_cc([$b|Fmt], [A|Args]) -> {$b,[A],Fmt,Args}; collect_cc([$B|Fmt], [A|Args]) -> {$B,[A],Fmt,Args}; collect_cc([$x|Fmt], [A,Prefix|Args]) -> {$x,[A,Prefix],Fmt,Args}; collect_cc([$X|Fmt], [A,Prefix|Args]) -> {$X,[A,Prefix],Fmt,Args}; collect_cc([$+|Fmt], [A|Args]) -> {$+,[A],Fmt,Args}; collect_cc([$#|Fmt], [A|Args]) -> {$#,[A],Fmt,Args}; collect_cc([$c|Fmt], [A|Args]) -> {$c,[A],Fmt,Args}; collect_cc([$~|Fmt], Args) when is_list(Args) -> {$~,[],Fmt,Args}; collect_cc([$n|Fmt], Args) when is_list(Args) -> {$n,[],Fmt,Args}; collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}. %% count_small([ControlC]) -> Count. %% Count the number of big (pPwWsS) print requests and %% number of characters of other print (small) requests. count_small(Cs) -> count_small(Cs, #{p => 0, s => 0, w => 0, other => 0}). count_small([#{control_char := $p}|Cs], #{p := P} = Cnts) -> count_small(Cs, Cnts#{p := P + 1}); count_small([#{control_char := $P}|Cs], #{p := P} = Cnts) -> count_small(Cs, Cnts#{p := P + 1}); count_small([#{control_char := $w}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([#{control_char := $W}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([#{control_char := $s}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([S|Cs], #{other := Other} = Cnts) when is_list(S); is_binary(S) -> count_small(Cs, Cnts#{other := Other + io_lib:chars_length(S)}); count_small([C|Cs], #{other := Other} = Cnts) when is_integer(C) -> count_small(Cs, Cnts#{other := Other + 1}); count_small([], #{p := P, s := S, w := W, other := Other}) -> {P, S, W, Other}. %% build_small([Control]) -> io_lib:chars(). %% Interpret the control structures, but only the small ones. %% The big ones are saved for later. build_limited([Control ] , NumberOfPps , NumberOfLimited , %% CharsLimit, Indentation) %% Interpret the control structures. Count the number of print %% remaining and only calculate indentation when necessary. Must also %% be smart when calculating indentation for characters in format. build_small([#{control_char := C, args := As, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Enc}=CC | Cs]) -> case control_small(C, As, F, Ad, P, Pad, Enc) of not_small -> [CC | build_small(Cs)]; S -> lists:flatten(S) ++ build_small(Cs) end; build_small([C|Cs]) -> [C|build_small(Cs)]; build_small([]) -> []. build_limited([#{control_char := C, args := As, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Enc, strings := Str} | Cs], NumOfPs0, Count0, MaxLen0, I) -> MaxChars = if MaxLen0 < 0 -> MaxLen0; true -> MaxLen0 div Count0 end, S = control_limited(C, As, F, Ad, P, Pad, Enc, Str, MaxChars, I), NumOfPs = decr_pc(C, NumOfPs0), Count = Count0 - 1, MaxLen = if MaxLen0 < 0 -> % optimization MaxLen0; true -> Len = io_lib:chars_length(S), sub(MaxLen0, Len) end, if NumOfPs > 0 -> [S|build_limited(Cs, NumOfPs, Count, MaxLen, indentation(S, I))]; true -> [S|build_limited(Cs, NumOfPs, Count, MaxLen, I)] end; build_limited([$\n|Cs], NumOfPs, Count, MaxLen, _I) -> [$\n|build_limited(Cs, NumOfPs, Count, MaxLen, 0)]; build_limited([$\t|Cs], NumOfPs, Count, MaxLen, I) -> [$\t|build_limited(Cs, NumOfPs, Count, MaxLen, ((I + 8) div 8) * 8)]; build_limited([C|Cs], NumOfPs, Count, MaxLen, I) -> [C|build_limited(Cs, NumOfPs, Count, MaxLen, I+1)]; build_limited([], _, _, _, _) -> []. decr_pc($p, Pc) -> Pc - 1; decr_pc($P, Pc) -> Pc - 1; decr_pc(_, Pc) -> Pc. %% Calculate the indentation of the end of a string given its start indentation . We assume tabs at 8 cols . -spec indentation(String, StartIndent) -> integer() when String :: io_lib:chars(), StartIndent :: integer(). indentation([$\n|Cs], _I) -> indentation(Cs, 0); indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8); indentation([C|Cs], I) when is_integer(C) -> indentation(Cs, I+1); indentation([C|Cs], I) -> indentation(Cs, indentation(C, I)); indentation([], I) -> I. %% control_small(FormatChar, [Argument], FieldWidth, Adjust, Precision, PadChar , Encoding ) - > String , [ Argument ] , FieldWidth , Adjust , Precision , PadChar , Encoding , StringP , , Indentation ) - > String %% These are the dispatch functions for the various formatting controls. control_small($s, [A], F, Adj, P, Pad, latin1=Enc) when is_atom(A) -> L = iolist_to_chars(atom_to_list(A)), string(L, F, Adj, P, Pad, Enc); control_small($s, [A], F, Adj, P, Pad, unicode=Enc) when is_atom(A) -> string(atom_to_list(A), F, Adj, P, Pad, Enc); control_small($e, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_e(A, F, Adj, P, Pad); control_small($f, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_f(A, F, Adj, P, Pad); control_small($g, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_g(A, F, Adj, P, Pad); control_small($b, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> unprefixed_integer(A, F, Adj, base(P), Pad, true); control_small($B, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> unprefixed_integer(A, F, Adj, base(P), Pad, false); control_small($x, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A), is_atom(Prefix) -> prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), true); control_small($x, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A) -> true = io_lib:deep_char_list(Prefix), %Check if Prefix a character list prefixed_integer(A, F, Adj, base(P), Pad, Prefix, true); control_small($X, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A), is_atom(Prefix) -> prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), false); control_small($X, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A) -> true = io_lib:deep_char_list(Prefix), %Check if Prefix a character list prefixed_integer(A, F, Adj, base(P), Pad, Prefix, false); control_small($+, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> Base = base(P), Prefix = [integer_to_list(Base), $#], prefixed_integer(A, F, Adj, Base, Pad, Prefix, true); control_small($#, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> Base = base(P), Prefix = [integer_to_list(Base), $#], prefixed_integer(A, F, Adj, Base, Pad, Prefix, false); control_small($c, [A], F, Adj, P, Pad, unicode) when is_integer(A) -> char(A, F, Adj, P, Pad); control_small($c, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> char(A band 255, F, Adj, P, Pad); control_small($~, [], F, Adj, P, Pad, _Enc) -> char($~, F, Adj, P, Pad); control_small($n, [], F, Adj, P, Pad, _Enc) -> newline(F, Adj, P, Pad); control_small($i, [_A], _F, _Adj, _P, _Pad, _Enc) -> []; control_small(_C, _As, _F, _Adj, _P, _Pad, _Enc) -> not_small. control_limited($s, [L0], F, Adj, P, Pad, latin1=Enc, _Str, CL, _I) -> L = iolist_to_chars(L0, F, CL), string(L, limit_field(F, CL), Adj, P, Pad, Enc); control_limited($s, [L0], F, Adj, P, Pad, unicode=Enc, _Str, CL, _I) -> L = cdata_to_chars(L0, F, CL), uniconv(string(L, limit_field(F, CL), Adj, P, Pad, Enc)); control_limited($w, [A], F, Adj, P, Pad, Enc, _Str, CL, _I) -> Chars = io_lib:write(A, [{depth, -1}, {encoding, Enc}, {chars_limit, CL}]), term(Chars, F, Adj, P, Pad); control_limited($p, [A], F, Adj, P, Pad, Enc, Str, CL, I) -> print(A, -1, F, Adj, P, Pad, Enc, Str, CL, I); control_limited($W, [A,Depth], F, Adj, P, Pad, Enc, _Str, CL, _I) when is_integer(Depth) -> Chars = io_lib:write(A, [{depth, Depth}, {encoding, Enc}, {chars_limit, CL}]), term(Chars, F, Adj, P, Pad); control_limited($P, [A,Depth], F, Adj, P, Pad, Enc, Str, CL, I) when is_integer(Depth) -> print(A, Depth, F, Adj, P, Pad, Enc, Str, CL, I). -ifdef(UNICODE_AS_BINARIES). uniconv(C) -> unicode:characters_to_binary(C,unicode). -else. uniconv(C) -> C. -endif. %% Default integer base base(none) -> 10; base(B) when is_integer(B) -> B. term(TermList , Field , Adjust , Precision , PadChar ) %% Output the characters in a term. Adjust the characters within the field if length less than padding %% with PadChar. term(T, none, _Adj, none, _Pad) -> T; term(T, none, Adj, P, Pad) -> term(T, P, Adj, P, Pad); term(T, F, Adj, P0, Pad) -> L = io_lib:chars_length(T), P = erlang:min(L, case P0 of none -> F; _ -> min(P0, F) end), if L > P -> adjust(chars($*, P), chars(Pad, F-P), Adj); F >= P -> adjust(T, chars(Pad, F-L), Adj) end. print(Term , Depth , Field , Adjust , Precision , PadChar , Encoding , %% Indentation) %% Print a term. Field width sets maximum line length, Precision sets %% initial indentation. print(T, D, none, Adj, P, Pad, E, Str, ChLim, I) -> print(T, D, 80, Adj, P, Pad, E, Str, ChLim, I); print(T, D, F, Adj, none, Pad, E, Str, ChLim, I) -> print(T, D, F, Adj, I+1, Pad, E, Str, ChLim, I); print(T, D, F, right, P, _Pad, Enc, Str, ChLim, _I) -> Options = [{chars_limit, ChLim}, {column, P}, {line_length, F}, {depth, D}, {encoding, Enc}, {strings, Str}], io_lib_pretty:print(T, Options). fwrite_e(Float , Field , Adjust , Precision , PadChar ) fwrite_e(Fl, none, Adj, none, Pad) -> %Default values fwrite_e(Fl, none, Adj, 6, Pad); fwrite_e(Fl, none, _Adj, P, _Pad) when P >= 2 -> float_e(Fl, float_data(Fl), P); fwrite_e(Fl, F, Adj, none, Pad) -> fwrite_e(Fl, F, Adj, 6, Pad); fwrite_e(Fl, F, Adj, P, Pad) when P >= 2 -> term(float_e(Fl, float_data(Fl), P), F, Adj, F, Pad). float_e(Fl, Fd, P) -> signbit(Fl) ++ abs_float_e(abs(Fl), Fd, P). abs_float_e(_Fl, {Ds,E}, P) -> case float_man(Ds, 1, P-1) of {[$0|Fs],true} -> [[$1|Fs]|float_exp(E)]; {Fs,false} -> [Fs|float_exp(E-1)] end. float_man([Digit ] , Icount , Dcount ) - > { [ Char],CarryFlag } . Generate the characters in the mantissa from the digits with Icount characters before the ' . ' and Dcount decimals . Handle carry and let %% caller decide what to do at top. float_man(Ds, 0, Dc) -> {Cs,C} = float_man(Ds, Dc), {[$.|Cs],C}; float_man([D|Ds], I, Dc) -> case float_man(Ds, I-1, Dc) of {Cs,true} when D =:= $9 -> {[$0|Cs],true}; {Cs,true} -> {[D+1|Cs],false}; {Cs,false} -> {[D|Cs],false} end; float_man([], I, Dc) -> %Pad with 0's {lists:duplicate(I, $0) ++ [$.|lists:duplicate(Dc, $0)],false}. float_man([D|_], 0) when D >= $5 -> {[],true}; float_man([_|_], 0) -> {[],false}; float_man([D|Ds], Dc) -> case float_man(Ds, Dc-1) of {Cs,true} when D =:= $9 -> {[$0|Cs],true}; {Cs,true} -> {[D+1|Cs],false}; {Cs,false} -> {[D|Cs],false} end; float_man([], Dc) -> {lists:duplicate(Dc, $0),false}. %Pad with 0's float_exp(Exponent ) - > [ ] . %% Generate the exponent of a floating point number. Always include sign. float_exp(E) when E >= 0 -> [$e,$+|integer_to_list(E)]; float_exp(E) -> [$e|integer_to_list(E)]. fwrite_f(FloatData , Field , Adjust , Precision , PadChar ) fwrite_f(Fl, none, Adj, none, Pad) -> %Default values fwrite_f(Fl, none, Adj, 6, Pad); fwrite_f(Fl, none, _Adj, P, _Pad) when P >= 1 -> float_f(Fl, float_data(Fl), P); fwrite_f(Fl, F, Adj, none, Pad) -> fwrite_f(Fl, F, Adj, 6, Pad); fwrite_f(Fl, F, Adj, P, Pad) when P >= 1 -> term(float_f(Fl, float_data(Fl), P), F, Adj, F, Pad). float_f(Fl, Fd, P) -> signbit(Fl) ++ abs_float_f(abs(Fl), Fd, P). abs_float_f(Fl, {Ds,E}, P) when E =< 0 -> abs_float_f(Fl, {lists:duplicate(-E+1, $0)++Ds,1}, P); %Prepend enough 0's abs_float_f(_Fl, {Ds,E}, P) -> case float_man(Ds, E, P) of {Fs,true} -> "1" ++ Fs; %Handle carry {Fs,false} -> Fs end. %% signbit(Float) -> [$-] | [] signbit(Fl) when Fl < 0.0 -> [$-]; signbit(Fl) when Fl > 0.0 -> []; signbit(Fl) -> case <<Fl/float>> of <<1:1,_:63>> -> [$-]; _ -> [] end. %% float_data([FloatChar]) -> {[Digit],Exponent} float_data(Fl) -> float_data(float_to_list(Fl), []). float_data([$e|E], Ds) -> {lists:reverse(Ds),list_to_integer(E)+1}; float_data([D|Cs], Ds) when D >= $0, D =< $9 -> float_data(Cs, [D|Ds]); float_data([_|Cs], Ds) -> float_data(Cs, Ds). %% Returns a correctly rounded string that converts to Float when %% read back with list_to_float/1. -spec fwrite_g(float()) -> string(). fwrite_g(Float) -> float_to_list(Float, [short]). fwrite_g(Float , Field , Adjust , Precision , PadChar ) Use the f form if Float is > = 0.1 and < 1.0e4 , %% and the prints correctly in the f form, else the e form. %% Precision always means the # of significant digits. fwrite_g(Fl, F, Adj, none, Pad) -> fwrite_g(Fl, F, Adj, 6, Pad); fwrite_g(Fl, F, Adj, P, Pad) when P >= 1 -> A = abs(Fl), E = if A < 1.0e-1 -> -2; A < 1.0e0 -> -1; A < 1.0e1 -> 0; A < 1.0e2 -> 1; A < 1.0e3 -> 2; A < 1.0e4 -> 3; true -> fwrite_f end, if P =< 1, E =:= -1; P-1 > E, E >= -1 -> fwrite_f(Fl, F, Adj, P-1-E, Pad); P =< 1 -> fwrite_e(Fl, F, Adj, 2, Pad); true -> fwrite_e(Fl, F, Adj, P, Pad) end. iolist_to_chars(Cs, F, CharsLimit) when CharsLimit < 0; CharsLimit >= F -> iolist_to_chars(Cs); iolist_to_chars(Cs, _, CharsLimit) -> three dots iolist_to_chars([C|Cs]) when is_integer(C), C >= $\000, C =< $\377 -> [C | iolist_to_chars(Cs)]; iolist_to_chars([I|Cs]) -> [iolist_to_chars(I) | iolist_to_chars(Cs)]; iolist_to_chars([]) -> []; iolist_to_chars(B) when is_binary(B) -> binary_to_list(B). limit_iolist_to_chars(Cs, 0, S, normal) -> L = limit_iolist_to_chars(Cs, 4, S, final), case iolist_size(L) of N when N < 4 -> L; 4 -> "..." end; limit_iolist_to_chars(_Cs, 0, _S, final) -> []; limit_iolist_to_chars([C|Cs], Limit, S, Mode) when C >= $\000, C =< $\377 -> [C | limit_iolist_to_chars(Cs, Limit - 1, S, Mode)]; limit_iolist_to_chars([I|Cs], Limit, S, Mode) -> limit_iolist_to_chars(I, Limit, [Cs|S], Mode); limit_iolist_to_chars([], _Limit, [], _Mode) -> []; limit_iolist_to_chars([], Limit, [Cs|S], Mode) -> limit_iolist_to_chars(Cs, Limit, S, Mode); limit_iolist_to_chars(B, Limit, S, Mode) when is_binary(B) -> case byte_size(B) of Sz when Sz > Limit -> {B1, B2} = split_binary(B, Limit), [binary_to_list(B1) | limit_iolist_to_chars(B2, 0, S, Mode)]; Sz -> [binary_to_list(B) | limit_iolist_to_chars([], Limit-Sz, S, Mode)] end. cdata_to_chars(Cs, F, CharsLimit) when CharsLimit < 0; CharsLimit >= F -> cdata_to_chars(Cs); cdata_to_chars(Cs, _, CharsLimit) -> three dots cdata_to_chars([C|Cs]) when is_integer(C), C >= $\000 -> [C | cdata_to_chars(Cs)]; cdata_to_chars([I|Cs]) -> [cdata_to_chars(I) | cdata_to_chars(Cs)]; cdata_to_chars([]) -> []; cdata_to_chars(B) when is_binary(B) -> case catch unicode:characters_to_list(B) of L when is_list(L) -> L; _ -> binary_to_list(B) end. limit_cdata_to_chars(Cs, 0, normal) -> L = limit_cdata_to_chars(Cs, 4, final), case string:length(L) of N when N < 4 -> L; 4 -> "..." end; limit_cdata_to_chars(_Cs, 0, final) -> []; limit_cdata_to_chars(Cs, Limit, Mode) -> case string:next_grapheme(Cs) of {error, <<C,Cs1/binary>>} -> %% This is how ~ts handles Latin1 binaries with option %% chars_limit. [C | limit_cdata_to_chars(Cs1, Limit - 1, Mode)]; {error, [C|Cs1]} -> % not all versions of module string return this [C | limit_cdata_to_chars(Cs1, Limit - 1, Mode)]; [] -> []; [GC|Cs1] -> [GC | limit_cdata_to_chars(Cs1, Limit - 1, Mode)] end. limit_field(F, CharsLimit) when CharsLimit < 0; F =:= none -> F; limit_field(F, CharsLimit) -> max(3, min(F, CharsLimit)). string(String , Field , Adjust , Precision , PadChar ) string(S, none, _Adj, none, _Pad, _Enc) -> S; string(S, F, Adj, none, Pad, Enc) -> string_field(S, F, Adj, io_lib:chars_length(S), Pad, Enc); string(S, none, _Adj, P, Pad, Enc) -> string_field(S, P, left, io_lib:chars_length(S), Pad, Enc); string(S, F, Adj, P, Pad, Enc) when F >= P -> N = io_lib:chars_length(S), if F > P -> if N > P -> adjust(flat_trunc(S, P, Enc), chars(Pad, F-P), Adj); N < P -> adjust([S|chars(Pad, P-N)], chars(Pad, F-P), Adj); true -> % N == P adjust(S, chars(Pad, F-P), Adj) end; true -> % F == P string_field(S, F, Adj, N, Pad, Enc) end. string_field(S, F, _Adj, N, _Pad, Enc) when N > F -> flat_trunc(S, F, Enc); string_field(S, F, Adj, N, Pad, _Enc) when N < F -> adjust(S, chars(Pad, F-N), Adj); string_field(S, _, _, _, _, _) -> % N == F S. unprefixed_integer(Int , Field , Adjust , Base , PadChar , Lowercase ) - > [ ] . unprefixed_integer(Int, F, Adj, Base, Pad, Lowercase) when Base >= 2, Base =< 1+$Z-$A+10 -> if Int < 0 -> S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase), term([$-|S], F, Adj, none, Pad); true -> S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase), term(S, F, Adj, none, Pad) end. prefixed_integer(Int , Field , Adjust , Base , PadChar , Prefix , Lowercase ) - > [ ] . prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase) when Base >= 2, Base =< 1+$Z-$A+10 -> if Int < 0 -> S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase), term([$-,Prefix|S], F, Adj, none, Pad); true -> S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase), term([Prefix|S], F, Adj, none, Pad) end. char(Char , Field , Adjust , Precision , PadChar ) - > chars ( ) . char(C, none, _Adj, none, _Pad) -> [C]; char(C, F, _Adj, none, _Pad) -> chars(C, F); char(C, none, _Adj, P, _Pad) -> chars(C, P); char(C, F, Adj, P, Pad) when F >= P -> adjust(chars(C, P), chars(Pad, F - P), Adj). newline(Field , Adjust , Precision , PadChar ) - > [ ] . newline(none, _Adj, _P, _Pad) -> "\n"; newline(F, right, _P, _Pad) -> chars($\n, F). %% Utilities %% adjust(Data, [], _) -> Data; adjust(Data, Pad, left) -> [Data|Pad]; adjust(Data, Pad, right) -> [Pad|Data]. %% Flatten and truncate a deep list to at most N elements. flat_trunc(List, N, latin1) when is_integer(N), N >= 0 -> {S, _} = lists:split(N, lists:flatten(List)), S; flat_trunc(List, N, unicode) when is_integer(N), N >= 0 -> string:slice(List, 0, N). A deep version of lists : duplicate/2 chars(_C, 0) -> []; chars(C, 1) -> [C]; chars(C, 2) -> [C,C]; chars(C, 3) -> [C,C,C]; chars(C, N) when is_integer(N), (N band 1) =:= 0 -> S = chars(C, N bsr 1), [S|S]; chars(C, N) when is_integer(N) -> S = chars(C, N bsr 1), [C,S|S]. %chars(C, N, Tail) -> [ chars(C , N)|Tail ] . %% Lowercase conversion cond_lowercase(String, true) -> lowercase(String); cond_lowercase(String,false) -> String. lowercase([H|T]) when is_integer(H), H >= $A, H =< $Z -> [(H-$A+$a)|lowercase(T)]; lowercase([H|T]) -> [H|lowercase(T)]; lowercase([]) -> []. %% Make sure T does change sign. sub(T, _) when T < 0 -> T; sub(T, E) when T >= E -> T - E; sub(_, _) -> 0. get_option(Key, TupleList, Default) -> case lists:keyfind(Key, 1, TupleList) of false -> Default; {Key, Value} -> Value; _ -> Default end.
null
https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/stdlib/src/io_lib_format.erl
erlang
%CopyrightBegin% you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. %CopyrightEnd% Formatting functions of io library. an error if there is an error in the arguments. To do the printing command correctly we need to calculate the current indentation for everything before it. This may be very if, and for how long, we need to calculate the indentations. We do corresponding arguments, then counting the print sequences and then building the output. This method has some drawbacks, it does parts. Build the output text for a pre-parsed format list. Revert a pre-parsed format list to a plain character list and a list of arguments. default, no need to make explicit Default case collect_cc([FormatChar], [Argument]) -> {Control,[ControlArg],[FormatChar],[Arg]}. Here we collect the argments for each control character. Be explicit to cause failure early. count_small([ControlC]) -> Count. Count the number of big (pPwWsS) print requests and number of characters of other print (small) requests. build_small([Control]) -> io_lib:chars(). Interpret the control structures, but only the small ones. The big ones are saved for later. CharsLimit, Indentation) Interpret the control structures. Count the number of print remaining and only calculate indentation when necessary. Must also be smart when calculating indentation for characters in format. optimization Calculate the indentation of the end of a string given its start control_small(FormatChar, [Argument], FieldWidth, Adjust, Precision, These are the dispatch functions for the various formatting controls. Check if Prefix a character list Check if Prefix a character list Default integer base Output the characters in a term. with PadChar. Indentation) Print a term. Field width sets maximum line length, Precision sets initial indentation. Default values caller decide what to do at top. Pad with 0's Pad with 0's Generate the exponent of a floating point number. Always include sign. Default values Prepend enough 0's Handle carry signbit(Float) -> [$-] | [] float_data([FloatChar]) -> {[Digit],Exponent} Returns a correctly rounded string that converts to Float when read back with list_to_float/1. and the prints correctly in the f form, else the e form. Precision always means the # of significant digits. This is how ~ts handles Latin1 binaries with option chars_limit. not all versions of module string return this N == P F == P N == F Flatten and truncate a deep list to at most N elements. chars(C, N, Tail) -> Lowercase conversion Make sure T does change sign.
Copyright Ericsson AB 1996 - 2019 . All Rights Reserved . Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(io_lib_format). -export([fwrite/2,fwrite/3,fwrite_g/1,indentation/2,scan/2,unscan/1, build/1, build/2]). Format the arguments in after string Format . Just generate expensive , especially when it is not needed , so we first determine this by first collecting all the control sequences and two passes over the format string and creates more temporary data , and it also splits the handling of the control characters into two -spec fwrite(Format, Data) -> io_lib:chars() when Format :: io:format(), Data :: [term()]. fwrite(Format, Args) -> build(scan(Format, Args)). -spec fwrite(Format, Data, Options) -> io_lib:chars() when Format :: io:format(), Data :: [term()], Options :: [Option], Option :: {'chars_limit', CharsLimit}, CharsLimit :: io_lib:chars_limit(). fwrite(Format, Args, Options) -> build(scan(Format, Args), Options). -spec build(FormatList) -> io_lib:chars() when FormatList :: [char() | io_lib:format_spec()]. build(Cs) -> build(Cs, []). -spec build(FormatList, Options) -> io_lib:chars() when FormatList :: [char() | io_lib:format_spec()], Options :: [Option], Option :: {'chars_limit', CharsLimit}, CharsLimit :: io_lib:chars_limit(). build(Cs, Options) -> CharsLimit = get_option(chars_limit, Options, -1), Res1 = build_small(Cs), {P, S, W, Other} = count_small(Res1), case P + S + W of 0 -> Res1; NumOfLimited -> RemainingChars = sub(CharsLimit, Other), build_limited(Res1, P, NumOfLimited, RemainingChars, 0) end. Parse all control sequences in the format string . -spec scan(Format, Data) -> FormatList when Format :: io:format(), Data :: [term()], FormatList :: [char() | io_lib:format_spec()]. scan(Format, Args) when is_atom(Format) -> scan(atom_to_list(Format), Args); scan(Format, Args) when is_binary(Format) -> scan(binary_to_list(Format), Args); scan(Format, Args) -> collect(Format, Args). -spec unscan(FormatList) -> {Format, Data} when FormatList :: [char() | io_lib:format_spec()], Format :: io:format(), Data :: [term()]. unscan(Cs) -> {print(Cs), args(Cs)}. args([#{args := As} | Cs]) -> As ++ args(Cs); args([_C | Cs]) -> args(Cs); args([]) -> []. print([#{control_char := C, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) -> print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs); print([C | Cs]) -> [C | print(Cs)]; print([]) -> []. print(C, F, Ad, P, Pad, Encoding, Strings) -> [$~] ++ print_field_width(F, Ad) ++ print_precision(P, Pad) ++ print_pad_char(Pad) ++ print_encoding(Encoding) ++ print_strings(Strings) ++ [C]. print_field_width(none, _Ad) -> ""; print_field_width(F, left) -> integer_to_list(-F); print_field_width(F, right) -> integer_to_list(F). print_precision(none, $\s) -> ""; pad must be second dot print_precision(P, _Pad) -> [$. | integer_to_list(P)]. print_pad_char(Pad) -> [$., Pad]. print_encoding(unicode) -> "t"; print_encoding(latin1) -> "". print_strings(false) -> "l"; print_strings(true) -> "". collect([$~|Fmt0], Args0) -> {C,Fmt1,Args1} = collect_cseq(Fmt0, Args0), [C|collect(Fmt1, Args1)]; collect([C|Fmt], Args) -> [C|collect(Fmt, Args)]; collect([], []) -> []. collect_cseq(Fmt0, Args0) -> {F,Ad,Fmt1,Args1} = field_width(Fmt0, Args0), {P,Fmt2,Args2} = precision(Fmt1, Args1), {Pad,Fmt3,Args3} = pad_char(Fmt2, Args2), Spec0 = #{width => F, adjust => Ad, precision => P, pad_char => Pad, encoding => latin1, strings => true}, {Spec1,Fmt4} = modifiers(Fmt3, Spec0), {C,As,Fmt5,Args4} = collect_cc(Fmt4, Args3), Spec2 = Spec1#{control_char => C, args => As}, {Spec2,Fmt5,Args4}. modifiers([$t|Fmt], Spec) -> modifiers(Fmt, Spec#{encoding => unicode}); modifiers([$l|Fmt], Spec) -> modifiers(Fmt, Spec#{strings => false}); modifiers(Fmt, Spec) -> {Spec, Fmt}. field_width([$-|Fmt0], Args0) -> {F,Fmt,Args} = field_value(Fmt0, Args0), field_width(-F, Fmt, Args); field_width(Fmt0, Args0) -> {F,Fmt,Args} = field_value(Fmt0, Args0), field_width(F, Fmt, Args). field_width(F, Fmt, Args) when F < 0 -> {-F,left,Fmt,Args}; field_width(F, Fmt, Args) when F >= 0 -> {F,right,Fmt,Args}. precision([$.|Fmt], Args) -> field_value(Fmt, Args); precision(Fmt, Args) -> {none,Fmt,Args}. field_value([$*|Fmt], [A|Args]) when is_integer(A) -> {A,Fmt,Args}; field_value([C|Fmt], Args) when is_integer(C), C >= $0, C =< $9 -> field_value([C|Fmt], Args, 0); field_value(Fmt, Args) -> {none,Fmt,Args}. field_value([C|Fmt], Args, F) when is_integer(C), C >= $0, C =< $9 -> field_value(Fmt, Args, 10*F + (C - $0)); {F,Fmt,Args}. pad_char([$.,$*|Fmt], [Pad|Args]) -> {Pad,Fmt,Args}; pad_char([$.,Pad|Fmt], Args) -> {Pad,Fmt,Args}; pad_char(Fmt, Args) -> {$\s,Fmt,Args}. collect_cc([$w|Fmt], [A|Args]) -> {$w,[A],Fmt,Args}; collect_cc([$p|Fmt], [A|Args]) -> {$p,[A],Fmt,Args}; collect_cc([$W|Fmt], [A,Depth|Args]) -> {$W,[A,Depth],Fmt,Args}; collect_cc([$P|Fmt], [A,Depth|Args]) -> {$P,[A,Depth],Fmt,Args}; collect_cc([$s|Fmt], [A|Args]) -> {$s,[A],Fmt,Args}; collect_cc([$e|Fmt], [A|Args]) -> {$e,[A],Fmt,Args}; collect_cc([$f|Fmt], [A|Args]) -> {$f,[A],Fmt,Args}; collect_cc([$g|Fmt], [A|Args]) -> {$g,[A],Fmt,Args}; collect_cc([$b|Fmt], [A|Args]) -> {$b,[A],Fmt,Args}; collect_cc([$B|Fmt], [A|Args]) -> {$B,[A],Fmt,Args}; collect_cc([$x|Fmt], [A,Prefix|Args]) -> {$x,[A,Prefix],Fmt,Args}; collect_cc([$X|Fmt], [A,Prefix|Args]) -> {$X,[A,Prefix],Fmt,Args}; collect_cc([$+|Fmt], [A|Args]) -> {$+,[A],Fmt,Args}; collect_cc([$#|Fmt], [A|Args]) -> {$#,[A],Fmt,Args}; collect_cc([$c|Fmt], [A|Args]) -> {$c,[A],Fmt,Args}; collect_cc([$~|Fmt], Args) when is_list(Args) -> {$~,[],Fmt,Args}; collect_cc([$n|Fmt], Args) when is_list(Args) -> {$n,[],Fmt,Args}; collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}. count_small(Cs) -> count_small(Cs, #{p => 0, s => 0, w => 0, other => 0}). count_small([#{control_char := $p}|Cs], #{p := P} = Cnts) -> count_small(Cs, Cnts#{p := P + 1}); count_small([#{control_char := $P}|Cs], #{p := P} = Cnts) -> count_small(Cs, Cnts#{p := P + 1}); count_small([#{control_char := $w}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([#{control_char := $W}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([#{control_char := $s}|Cs], #{w := W} = Cnts) -> count_small(Cs, Cnts#{w := W + 1}); count_small([S|Cs], #{other := Other} = Cnts) when is_list(S); is_binary(S) -> count_small(Cs, Cnts#{other := Other + io_lib:chars_length(S)}); count_small([C|Cs], #{other := Other} = Cnts) when is_integer(C) -> count_small(Cs, Cnts#{other := Other + 1}); count_small([], #{p := P, s := S, w := W, other := Other}) -> {P, S, W, Other}. build_limited([Control ] , NumberOfPps , NumberOfLimited , build_small([#{control_char := C, args := As, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Enc}=CC | Cs]) -> case control_small(C, As, F, Ad, P, Pad, Enc) of not_small -> [CC | build_small(Cs)]; S -> lists:flatten(S) ++ build_small(Cs) end; build_small([C|Cs]) -> [C|build_small(Cs)]; build_small([]) -> []. build_limited([#{control_char := C, args := As, width := F, adjust := Ad, precision := P, pad_char := Pad, encoding := Enc, strings := Str} | Cs], NumOfPs0, Count0, MaxLen0, I) -> MaxChars = if MaxLen0 < 0 -> MaxLen0; true -> MaxLen0 div Count0 end, S = control_limited(C, As, F, Ad, P, Pad, Enc, Str, MaxChars, I), NumOfPs = decr_pc(C, NumOfPs0), Count = Count0 - 1, MaxLen = if MaxLen0; true -> Len = io_lib:chars_length(S), sub(MaxLen0, Len) end, if NumOfPs > 0 -> [S|build_limited(Cs, NumOfPs, Count, MaxLen, indentation(S, I))]; true -> [S|build_limited(Cs, NumOfPs, Count, MaxLen, I)] end; build_limited([$\n|Cs], NumOfPs, Count, MaxLen, _I) -> [$\n|build_limited(Cs, NumOfPs, Count, MaxLen, 0)]; build_limited([$\t|Cs], NumOfPs, Count, MaxLen, I) -> [$\t|build_limited(Cs, NumOfPs, Count, MaxLen, ((I + 8) div 8) * 8)]; build_limited([C|Cs], NumOfPs, Count, MaxLen, I) -> [C|build_limited(Cs, NumOfPs, Count, MaxLen, I+1)]; build_limited([], _, _, _, _) -> []. decr_pc($p, Pc) -> Pc - 1; decr_pc($P, Pc) -> Pc - 1; decr_pc(_, Pc) -> Pc. indentation . We assume tabs at 8 cols . -spec indentation(String, StartIndent) -> integer() when String :: io_lib:chars(), StartIndent :: integer(). indentation([$\n|Cs], _I) -> indentation(Cs, 0); indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8); indentation([C|Cs], I) when is_integer(C) -> indentation(Cs, I+1); indentation([C|Cs], I) -> indentation(Cs, indentation(C, I)); indentation([], I) -> I. PadChar , Encoding ) - > String , [ Argument ] , FieldWidth , Adjust , Precision , PadChar , Encoding , StringP , , Indentation ) - > String control_small($s, [A], F, Adj, P, Pad, latin1=Enc) when is_atom(A) -> L = iolist_to_chars(atom_to_list(A)), string(L, F, Adj, P, Pad, Enc); control_small($s, [A], F, Adj, P, Pad, unicode=Enc) when is_atom(A) -> string(atom_to_list(A), F, Adj, P, Pad, Enc); control_small($e, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_e(A, F, Adj, P, Pad); control_small($f, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_f(A, F, Adj, P, Pad); control_small($g, [A], F, Adj, P, Pad, _Enc) when is_float(A) -> fwrite_g(A, F, Adj, P, Pad); control_small($b, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> unprefixed_integer(A, F, Adj, base(P), Pad, true); control_small($B, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> unprefixed_integer(A, F, Adj, base(P), Pad, false); control_small($x, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A), is_atom(Prefix) -> prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), true); control_small($x, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A) -> prefixed_integer(A, F, Adj, base(P), Pad, Prefix, true); control_small($X, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A), is_atom(Prefix) -> prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), false); control_small($X, [A,Prefix], F, Adj, P, Pad, _Enc) when is_integer(A) -> prefixed_integer(A, F, Adj, base(P), Pad, Prefix, false); control_small($+, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> Base = base(P), Prefix = [integer_to_list(Base), $#], prefixed_integer(A, F, Adj, Base, Pad, Prefix, true); control_small($#, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> Base = base(P), Prefix = [integer_to_list(Base), $#], prefixed_integer(A, F, Adj, Base, Pad, Prefix, false); control_small($c, [A], F, Adj, P, Pad, unicode) when is_integer(A) -> char(A, F, Adj, P, Pad); control_small($c, [A], F, Adj, P, Pad, _Enc) when is_integer(A) -> char(A band 255, F, Adj, P, Pad); control_small($~, [], F, Adj, P, Pad, _Enc) -> char($~, F, Adj, P, Pad); control_small($n, [], F, Adj, P, Pad, _Enc) -> newline(F, Adj, P, Pad); control_small($i, [_A], _F, _Adj, _P, _Pad, _Enc) -> []; control_small(_C, _As, _F, _Adj, _P, _Pad, _Enc) -> not_small. control_limited($s, [L0], F, Adj, P, Pad, latin1=Enc, _Str, CL, _I) -> L = iolist_to_chars(L0, F, CL), string(L, limit_field(F, CL), Adj, P, Pad, Enc); control_limited($s, [L0], F, Adj, P, Pad, unicode=Enc, _Str, CL, _I) -> L = cdata_to_chars(L0, F, CL), uniconv(string(L, limit_field(F, CL), Adj, P, Pad, Enc)); control_limited($w, [A], F, Adj, P, Pad, Enc, _Str, CL, _I) -> Chars = io_lib:write(A, [{depth, -1}, {encoding, Enc}, {chars_limit, CL}]), term(Chars, F, Adj, P, Pad); control_limited($p, [A], F, Adj, P, Pad, Enc, Str, CL, I) -> print(A, -1, F, Adj, P, Pad, Enc, Str, CL, I); control_limited($W, [A,Depth], F, Adj, P, Pad, Enc, _Str, CL, _I) when is_integer(Depth) -> Chars = io_lib:write(A, [{depth, Depth}, {encoding, Enc}, {chars_limit, CL}]), term(Chars, F, Adj, P, Pad); control_limited($P, [A,Depth], F, Adj, P, Pad, Enc, Str, CL, I) when is_integer(Depth) -> print(A, Depth, F, Adj, P, Pad, Enc, Str, CL, I). -ifdef(UNICODE_AS_BINARIES). uniconv(C) -> unicode:characters_to_binary(C,unicode). -else. uniconv(C) -> C. -endif. base(none) -> 10; base(B) when is_integer(B) -> B. term(TermList , Field , Adjust , Precision , PadChar ) Adjust the characters within the field if length less than padding term(T, none, _Adj, none, _Pad) -> T; term(T, none, Adj, P, Pad) -> term(T, P, Adj, P, Pad); term(T, F, Adj, P0, Pad) -> L = io_lib:chars_length(T), P = erlang:min(L, case P0 of none -> F; _ -> min(P0, F) end), if L > P -> adjust(chars($*, P), chars(Pad, F-P), Adj); F >= P -> adjust(T, chars(Pad, F-L), Adj) end. print(Term , Depth , Field , Adjust , Precision , PadChar , Encoding , print(T, D, none, Adj, P, Pad, E, Str, ChLim, I) -> print(T, D, 80, Adj, P, Pad, E, Str, ChLim, I); print(T, D, F, Adj, none, Pad, E, Str, ChLim, I) -> print(T, D, F, Adj, I+1, Pad, E, Str, ChLim, I); print(T, D, F, right, P, _Pad, Enc, Str, ChLim, _I) -> Options = [{chars_limit, ChLim}, {column, P}, {line_length, F}, {depth, D}, {encoding, Enc}, {strings, Str}], io_lib_pretty:print(T, Options). fwrite_e(Float , Field , Adjust , Precision , PadChar ) fwrite_e(Fl, none, Adj, 6, Pad); fwrite_e(Fl, none, _Adj, P, _Pad) when P >= 2 -> float_e(Fl, float_data(Fl), P); fwrite_e(Fl, F, Adj, none, Pad) -> fwrite_e(Fl, F, Adj, 6, Pad); fwrite_e(Fl, F, Adj, P, Pad) when P >= 2 -> term(float_e(Fl, float_data(Fl), P), F, Adj, F, Pad). float_e(Fl, Fd, P) -> signbit(Fl) ++ abs_float_e(abs(Fl), Fd, P). abs_float_e(_Fl, {Ds,E}, P) -> case float_man(Ds, 1, P-1) of {[$0|Fs],true} -> [[$1|Fs]|float_exp(E)]; {Fs,false} -> [Fs|float_exp(E-1)] end. float_man([Digit ] , Icount , Dcount ) - > { [ Char],CarryFlag } . Generate the characters in the mantissa from the digits with Icount characters before the ' . ' and Dcount decimals . Handle carry and let float_man(Ds, 0, Dc) -> {Cs,C} = float_man(Ds, Dc), {[$.|Cs],C}; float_man([D|Ds], I, Dc) -> case float_man(Ds, I-1, Dc) of {Cs,true} when D =:= $9 -> {[$0|Cs],true}; {Cs,true} -> {[D+1|Cs],false}; {Cs,false} -> {[D|Cs],false} end; {lists:duplicate(I, $0) ++ [$.|lists:duplicate(Dc, $0)],false}. float_man([D|_], 0) when D >= $5 -> {[],true}; float_man([_|_], 0) -> {[],false}; float_man([D|Ds], Dc) -> case float_man(Ds, Dc-1) of {Cs,true} when D =:= $9 -> {[$0|Cs],true}; {Cs,true} -> {[D+1|Cs],false}; {Cs,false} -> {[D|Cs],false} end; float_exp(Exponent ) - > [ ] . float_exp(E) when E >= 0 -> [$e,$+|integer_to_list(E)]; float_exp(E) -> [$e|integer_to_list(E)]. fwrite_f(FloatData , Field , Adjust , Precision , PadChar ) fwrite_f(Fl, none, Adj, 6, Pad); fwrite_f(Fl, none, _Adj, P, _Pad) when P >= 1 -> float_f(Fl, float_data(Fl), P); fwrite_f(Fl, F, Adj, none, Pad) -> fwrite_f(Fl, F, Adj, 6, Pad); fwrite_f(Fl, F, Adj, P, Pad) when P >= 1 -> term(float_f(Fl, float_data(Fl), P), F, Adj, F, Pad). float_f(Fl, Fd, P) -> signbit(Fl) ++ abs_float_f(abs(Fl), Fd, P). abs_float_f(Fl, {Ds,E}, P) when E =< 0 -> abs_float_f(_Fl, {Ds,E}, P) -> case float_man(Ds, E, P) of {Fs,false} -> Fs end. signbit(Fl) when Fl < 0.0 -> [$-]; signbit(Fl) when Fl > 0.0 -> []; signbit(Fl) -> case <<Fl/float>> of <<1:1,_:63>> -> [$-]; _ -> [] end. float_data(Fl) -> float_data(float_to_list(Fl), []). float_data([$e|E], Ds) -> {lists:reverse(Ds),list_to_integer(E)+1}; float_data([D|Cs], Ds) when D >= $0, D =< $9 -> float_data(Cs, [D|Ds]); float_data([_|Cs], Ds) -> float_data(Cs, Ds). -spec fwrite_g(float()) -> string(). fwrite_g(Float) -> float_to_list(Float, [short]). fwrite_g(Float , Field , Adjust , Precision , PadChar ) Use the f form if Float is > = 0.1 and < 1.0e4 , fwrite_g(Fl, F, Adj, none, Pad) -> fwrite_g(Fl, F, Adj, 6, Pad); fwrite_g(Fl, F, Adj, P, Pad) when P >= 1 -> A = abs(Fl), E = if A < 1.0e-1 -> -2; A < 1.0e0 -> -1; A < 1.0e1 -> 0; A < 1.0e2 -> 1; A < 1.0e3 -> 2; A < 1.0e4 -> 3; true -> fwrite_f end, if P =< 1, E =:= -1; P-1 > E, E >= -1 -> fwrite_f(Fl, F, Adj, P-1-E, Pad); P =< 1 -> fwrite_e(Fl, F, Adj, 2, Pad); true -> fwrite_e(Fl, F, Adj, P, Pad) end. iolist_to_chars(Cs, F, CharsLimit) when CharsLimit < 0; CharsLimit >= F -> iolist_to_chars(Cs); iolist_to_chars(Cs, _, CharsLimit) -> three dots iolist_to_chars([C|Cs]) when is_integer(C), C >= $\000, C =< $\377 -> [C | iolist_to_chars(Cs)]; iolist_to_chars([I|Cs]) -> [iolist_to_chars(I) | iolist_to_chars(Cs)]; iolist_to_chars([]) -> []; iolist_to_chars(B) when is_binary(B) -> binary_to_list(B). limit_iolist_to_chars(Cs, 0, S, normal) -> L = limit_iolist_to_chars(Cs, 4, S, final), case iolist_size(L) of N when N < 4 -> L; 4 -> "..." end; limit_iolist_to_chars(_Cs, 0, _S, final) -> []; limit_iolist_to_chars([C|Cs], Limit, S, Mode) when C >= $\000, C =< $\377 -> [C | limit_iolist_to_chars(Cs, Limit - 1, S, Mode)]; limit_iolist_to_chars([I|Cs], Limit, S, Mode) -> limit_iolist_to_chars(I, Limit, [Cs|S], Mode); limit_iolist_to_chars([], _Limit, [], _Mode) -> []; limit_iolist_to_chars([], Limit, [Cs|S], Mode) -> limit_iolist_to_chars(Cs, Limit, S, Mode); limit_iolist_to_chars(B, Limit, S, Mode) when is_binary(B) -> case byte_size(B) of Sz when Sz > Limit -> {B1, B2} = split_binary(B, Limit), [binary_to_list(B1) | limit_iolist_to_chars(B2, 0, S, Mode)]; Sz -> [binary_to_list(B) | limit_iolist_to_chars([], Limit-Sz, S, Mode)] end. cdata_to_chars(Cs, F, CharsLimit) when CharsLimit < 0; CharsLimit >= F -> cdata_to_chars(Cs); cdata_to_chars(Cs, _, CharsLimit) -> three dots cdata_to_chars([C|Cs]) when is_integer(C), C >= $\000 -> [C | cdata_to_chars(Cs)]; cdata_to_chars([I|Cs]) -> [cdata_to_chars(I) | cdata_to_chars(Cs)]; cdata_to_chars([]) -> []; cdata_to_chars(B) when is_binary(B) -> case catch unicode:characters_to_list(B) of L when is_list(L) -> L; _ -> binary_to_list(B) end. limit_cdata_to_chars(Cs, 0, normal) -> L = limit_cdata_to_chars(Cs, 4, final), case string:length(L) of N when N < 4 -> L; 4 -> "..." end; limit_cdata_to_chars(_Cs, 0, final) -> []; limit_cdata_to_chars(Cs, Limit, Mode) -> case string:next_grapheme(Cs) of {error, <<C,Cs1/binary>>} -> [C | limit_cdata_to_chars(Cs1, Limit - 1, Mode)]; [C | limit_cdata_to_chars(Cs1, Limit - 1, Mode)]; [] -> []; [GC|Cs1] -> [GC | limit_cdata_to_chars(Cs1, Limit - 1, Mode)] end. limit_field(F, CharsLimit) when CharsLimit < 0; F =:= none -> F; limit_field(F, CharsLimit) -> max(3, min(F, CharsLimit)). string(String , Field , Adjust , Precision , PadChar ) string(S, none, _Adj, none, _Pad, _Enc) -> S; string(S, F, Adj, none, Pad, Enc) -> string_field(S, F, Adj, io_lib:chars_length(S), Pad, Enc); string(S, none, _Adj, P, Pad, Enc) -> string_field(S, P, left, io_lib:chars_length(S), Pad, Enc); string(S, F, Adj, P, Pad, Enc) when F >= P -> N = io_lib:chars_length(S), if F > P -> if N > P -> adjust(flat_trunc(S, P, Enc), chars(Pad, F-P), Adj); N < P -> adjust([S|chars(Pad, P-N)], chars(Pad, F-P), Adj); adjust(S, chars(Pad, F-P), Adj) end; string_field(S, F, Adj, N, Pad, Enc) end. string_field(S, F, _Adj, N, _Pad, Enc) when N > F -> flat_trunc(S, F, Enc); string_field(S, F, Adj, N, Pad, _Enc) when N < F -> adjust(S, chars(Pad, F-N), Adj); S. unprefixed_integer(Int , Field , Adjust , Base , PadChar , Lowercase ) - > [ ] . unprefixed_integer(Int, F, Adj, Base, Pad, Lowercase) when Base >= 2, Base =< 1+$Z-$A+10 -> if Int < 0 -> S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase), term([$-|S], F, Adj, none, Pad); true -> S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase), term(S, F, Adj, none, Pad) end. prefixed_integer(Int , Field , Adjust , Base , PadChar , Prefix , Lowercase ) - > [ ] . prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase) when Base >= 2, Base =< 1+$Z-$A+10 -> if Int < 0 -> S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase), term([$-,Prefix|S], F, Adj, none, Pad); true -> S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase), term([Prefix|S], F, Adj, none, Pad) end. char(Char , Field , Adjust , Precision , PadChar ) - > chars ( ) . char(C, none, _Adj, none, _Pad) -> [C]; char(C, F, _Adj, none, _Pad) -> chars(C, F); char(C, none, _Adj, P, _Pad) -> chars(C, P); char(C, F, Adj, P, Pad) when F >= P -> adjust(chars(C, P), chars(Pad, F - P), Adj). newline(Field , Adjust , Precision , PadChar ) - > [ ] . newline(none, _Adj, _P, _Pad) -> "\n"; newline(F, right, _P, _Pad) -> chars($\n, F). Utilities adjust(Data, [], _) -> Data; adjust(Data, Pad, left) -> [Data|Pad]; adjust(Data, Pad, right) -> [Pad|Data]. flat_trunc(List, N, latin1) when is_integer(N), N >= 0 -> {S, _} = lists:split(N, lists:flatten(List)), S; flat_trunc(List, N, unicode) when is_integer(N), N >= 0 -> string:slice(List, 0, N). A deep version of lists : duplicate/2 chars(_C, 0) -> []; chars(C, 1) -> [C]; chars(C, 2) -> [C,C]; chars(C, 3) -> [C,C,C]; chars(C, N) when is_integer(N), (N band 1) =:= 0 -> S = chars(C, N bsr 1), [S|S]; chars(C, N) when is_integer(N) -> S = chars(C, N bsr 1), [C,S|S]. [ chars(C , N)|Tail ] . cond_lowercase(String, true) -> lowercase(String); cond_lowercase(String,false) -> String. lowercase([H|T]) when is_integer(H), H >= $A, H =< $Z -> [(H-$A+$a)|lowercase(T)]; lowercase([H|T]) -> [H|lowercase(T)]; lowercase([]) -> []. sub(T, _) when T < 0 -> T; sub(T, E) when T >= E -> T - E; sub(_, _) -> 0. get_option(Key, TupleList, Default) -> case lists:keyfind(Key, 1, TupleList) of false -> Default; {Key, Value} -> Value; _ -> Default end.
d13cd19061ddd4e0ac2f560738cf3b0d71374c626b9486865b05d2c1113f0589
esb-lwb/lwb
eval.clj
lwb Logic WorkBench -- Linear Temporal Logic : Examples Evaluation of LTL Copyright ( c ) 2016 , THM . All rights reserved . ; The use and distribution terms for this software are covered by the Eclipse Public License 1.0 ( -1.0.php ) . ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.ltl.examples.eval (:require [lwb.ltl :refer :all] ; needed for macroexpand-1 of xor etc !! [lwb.ltl.eval :refer :all] [lwb.ltl.kripke :as ks] ; needed for instrument [clojure.spec.test.alpha :as stest] [clojure.spec.alpha :as s])) (stest/instrument `eval-phi) Examples of structures (def ks1 {:atoms '#{P} :nodes {:s_1 '#{P}} :initial :s_1 :edges #{[:s_1 :s_1]}}) (s/conform ::ks/model ks1) (comment (ks/texify ks1 "eval") ) (eval-phi '(always P) ks1) ; => true (eval-phi 'P ks1) ; => true (eval-phi '(atnext P) ks1) ; => true (eval-phi '(finally P) ks1) ; => true (eval-phi '(impl (always P) (finally P)) ks1) ; => true (eval-phi '(atnext (not P)) ks1) ; => false (eval-phi '(atnext (not P)) ks1 :counterexample) = > s_1 : s_1 ] (eval-phi '(not (finally P)) ks1 :counterexample) = > s_1 : s_1 ] ; Example from lecture notes (def ks2 {:atoms '#{P Q} :nodes {:s_1 '#{P Q} :s_2 '#{P Q} :s_3 '#{P}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_1] [:s_2 :s_3] [:s_3 :s_3]}}) (comment (ks/texify ks2 "eval") ) (eval-phi '(always P) ks2) ; => true (eval-phi '(atnext (or P Q)) ks2) ; => true (eval-phi '(atnext (and P Q)) ks2) ; => true (eval-phi '(atnext (atnext (atnext P))) ks2) ; => true (eval-phi '(atnext (atnext (atnext Q))) ks2) ; => false (eval-phi '(atnext (atnext (atnext Q))) ks2 :counterexample) = > s_2 : s_3 : s_3 : s_3 ] (eval-phi '(always (impl (not Q) (always (and P (not Q))))) ks2) ; => true (def ks3 {:atoms '#{P} :nodes {:s_0 '#{} :s_1 '#{P} :s_2 '#{}} :initial :s_0 :edges #{[:s_0 :s_1] [:s_1 :s_1] [:s_0 :s_2] [:s_2 :s_2]}}) (comment (ks/texify ks3 "eval") ) (eval-phi '(finally P) ks3) ; => false (eval-phi '(finally P) ks3 :counterexample) = > (eval-phi '(not (finally P)) ks3) ; => false (eval-phi '(not (finally P)) ks3 :counterexample) = > s_1 : s_1 : s_1 ] Microwave oven example from Clarke et al Model Checking p.39 (def oven {:atoms '#{Start Close Heat Error} :nodes {:s_1 '#{} :s_2 '#{Start Error} :s_3 '#{Close} :s_4 '#{Close Heat} :s_5 '#{Start Close Error} :s_6 '#{Start Close} :s_7 '#{Start Close Heat}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_1 :s_3] [:s_2 :s_5] [:s_3 :s_1] [:s_3 :s_6] [:s_4 :s_1] [:s_4 :s_3] [:s_4 :s_4] [:s_5 :s_2] [:s_5 :s_3] [:s_6 :s_7] [:s_7 :s_4]}}) (comment (ks/texify oven "eval") ; not really well arranged! ) ; p.45 ; it's impossible for the oven to be hot with the door open (eval-phi '(always (until (not Heat) Close)) oven) ; => true p.47 ; Whenever an illegal sequence of steps occurs, the oven will never heat or will eventually reset (eval-phi '(impl (always (and (not Close) Start)) (or (always (not Heat)) (finally (not Error)))) oven) ; => true ; Traffic lights (def tl {:atoms '#{Green Yellow Red} :nodes {:s_1 '#{Red} :s_2 '#{Green} :s_3 '#{Yellow}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_3] [:s_3 :s_1]}}) (comment (ks/texify tl "eval") ) The traffic light is infinitely often Green (eval-phi '(always (finally Green)) tl) ; true ; Never the lamp is Green and Red (eval-phi '(always (not (and Green Red))) tl) ; true ; Finally the lamp is Green and Yellow (eval-phi '(finally (and Green Yellow)) tl) ; => false (eval-phi '(finally (and Green Yellow)) tl :counterexample) = > s_2 : s_3 : s_1 ] ; Alternating states (def alt {:atoms '#{P Q} :nodes {:s_1 '#{P} :s_2 '#{Q}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_1]}}) (comment (ks/texify alt "eval") ) (eval-phi '(always (impl P (atnext Q))) alt) ; => true (eval-phi '(always (not (and P Q))) alt) ; => true (eval-phi '(always (finally P)) alt) ; => true (def ks4 {:atoms '#{P Q R} :nodes {:s_1 '#{P} :s_2 '#{Q} :s_3 '#{R}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_1 :s_3] [:s_2 :s_3] [:s_2 :s_2] [:s_3 :s_3]}}) (comment (ks/texify ks4 "eval") ) (eval-phi '(atnext P) ks4) ; => false (eval-phi '(atnext Q) ks4) ; => false, since it must be true for all paths (eval-phi '(atnext Q) ks4 :counterexample) = > s_3 : s_3 ] (eval-phi '(atnext (or Q R)) ks4) ; => true (eval-phi '(impl Q (atnext R)) ks4) ; => true (eval-phi '(impl P (atnext (atnext (or Q R)))) ks4) ; => true (eval-phi '(impl P (atnext (atnext R))) ks4) ; => false
null
https://raw.githubusercontent.com/esb-lwb/lwb/bba51ada7f7316341733d37b0dc4848c4891ef3a/src/lwb/ltl/examples/eval.clj
clojure
The use and distribution terms for this software are covered by the By using this software in any fashion, you are agreeing to be bound by the terms of this license. needed for macroexpand-1 of xor etc !! needed for instrument => true => true => true => true => true => false Example from lecture notes => true => true => true => true => false => true => false => false not really well arranged! p.45 it's impossible for the oven to be hot with the door open => true Whenever an illegal sequence of steps occurs, the oven will never heat or will eventually reset => true Traffic lights true Never the lamp is Green and Red true Finally the lamp is Green and Yellow => false Alternating states => true => true => true => false => false, since it must be true for all paths => true => true => true => false
lwb Logic WorkBench -- Linear Temporal Logic : Examples Evaluation of LTL Copyright ( c ) 2016 , THM . All rights reserved . Eclipse Public License 1.0 ( -1.0.php ) . (ns lwb.ltl.examples.eval [lwb.ltl.eval :refer :all] [clojure.spec.test.alpha :as stest] [clojure.spec.alpha :as s])) (stest/instrument `eval-phi) Examples of structures (def ks1 {:atoms '#{P} :nodes {:s_1 '#{P}} :initial :s_1 :edges #{[:s_1 :s_1]}}) (s/conform ::ks/model ks1) (comment (ks/texify ks1 "eval") ) (eval-phi '(always P) ks1) (eval-phi 'P ks1) (eval-phi '(atnext P) ks1) (eval-phi '(finally P) ks1) (eval-phi '(impl (always P) (finally P)) ks1) (eval-phi '(atnext (not P)) ks1) (eval-phi '(atnext (not P)) ks1 :counterexample) = > s_1 : s_1 ] (eval-phi '(not (finally P)) ks1 :counterexample) = > s_1 : s_1 ] (def ks2 {:atoms '#{P Q} :nodes {:s_1 '#{P Q} :s_2 '#{P Q} :s_3 '#{P}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_1] [:s_2 :s_3] [:s_3 :s_3]}}) (comment (ks/texify ks2 "eval") ) (eval-phi '(always P) ks2) (eval-phi '(atnext (or P Q)) ks2) (eval-phi '(atnext (and P Q)) ks2) (eval-phi '(atnext (atnext (atnext P))) ks2) (eval-phi '(atnext (atnext (atnext Q))) ks2) (eval-phi '(atnext (atnext (atnext Q))) ks2 :counterexample) = > s_2 : s_3 : s_3 : s_3 ] (eval-phi '(always (impl (not Q) (always (and P (not Q))))) ks2) (def ks3 {:atoms '#{P} :nodes {:s_0 '#{} :s_1 '#{P} :s_2 '#{}} :initial :s_0 :edges #{[:s_0 :s_1] [:s_1 :s_1] [:s_0 :s_2] [:s_2 :s_2]}}) (comment (ks/texify ks3 "eval") ) (eval-phi '(finally P) ks3) (eval-phi '(finally P) ks3 :counterexample) = > (eval-phi '(not (finally P)) ks3) (eval-phi '(not (finally P)) ks3 :counterexample) = > s_1 : s_1 : s_1 ] Microwave oven example from Clarke et al Model Checking p.39 (def oven {:atoms '#{Start Close Heat Error} :nodes {:s_1 '#{} :s_2 '#{Start Error} :s_3 '#{Close} :s_4 '#{Close Heat} :s_5 '#{Start Close Error} :s_6 '#{Start Close} :s_7 '#{Start Close Heat}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_1 :s_3] [:s_2 :s_5] [:s_3 :s_1] [:s_3 :s_6] [:s_4 :s_1] [:s_4 :s_3] [:s_4 :s_4] [:s_5 :s_2] [:s_5 :s_3] [:s_6 :s_7] [:s_7 :s_4]}}) (comment (ks/texify oven "eval") ) (eval-phi '(always (until (not Heat) Close)) oven) p.47 (eval-phi '(impl (always (and (not Close) Start)) (or (always (not Heat)) (finally (not Error)))) oven) (def tl {:atoms '#{Green Yellow Red} :nodes {:s_1 '#{Red} :s_2 '#{Green} :s_3 '#{Yellow}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_3] [:s_3 :s_1]}}) (comment (ks/texify tl "eval") ) The traffic light is infinitely often Green (eval-phi '(always (finally Green)) tl) (eval-phi '(always (not (and Green Red))) tl) (eval-phi '(finally (and Green Yellow)) tl) (eval-phi '(finally (and Green Yellow)) tl :counterexample) = > s_2 : s_3 : s_1 ] (def alt {:atoms '#{P Q} :nodes {:s_1 '#{P} :s_2 '#{Q}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_2 :s_1]}}) (comment (ks/texify alt "eval") ) (eval-phi '(always (impl P (atnext Q))) alt) (eval-phi '(always (not (and P Q))) alt) (eval-phi '(always (finally P)) alt) (def ks4 {:atoms '#{P Q R} :nodes {:s_1 '#{P} :s_2 '#{Q} :s_3 '#{R}} :initial :s_1 :edges #{[:s_1 :s_2] [:s_1 :s_3] [:s_2 :s_3] [:s_2 :s_2] [:s_3 :s_3]}}) (comment (ks/texify ks4 "eval") ) (eval-phi '(atnext P) ks4) (eval-phi '(atnext Q) ks4) (eval-phi '(atnext Q) ks4 :counterexample) = > s_3 : s_3 ] (eval-phi '(atnext (or Q R)) ks4) (eval-phi '(impl Q (atnext R)) ks4) (eval-phi '(impl P (atnext (atnext (or Q R)))) ks4) (eval-phi '(impl P (atnext (atnext R))) ks4)
f223c8346b5c989a7309faa1a3b1eda7675d2db8a1fb542f942ad432dc6b7c99
emina/rosette
infeasible-solver.rkt
#lang rosette (define-symbolic n integer?) (define xs (if (= n 0) '(1) '())) (when (= (add1 n) 1) (apply add1 xs))
null
https://raw.githubusercontent.com/emina/rosette/a64e2bccfe5876c5daaf4a17c5a28a49e2fbd501/test/trace/code/infeasible-solver.rkt
racket
#lang rosette (define-symbolic n integer?) (define xs (if (= n 0) '(1) '())) (when (= (add1 n) 1) (apply add1 xs))
7a0bafa41bf6b4053cd572a80f1ceccc065c1ff26f574311c3c2eea1651674dc
bradparker/servant-beam-realworld-example-app
Profile.hs
module RealWorld.Conduit.Users.Profile ( Profile(..) ) where import Data.Aeson (ToJSON, FromJSON) import Data.Swagger (ToSchema) data Profile = Profile { id :: Int , username :: Text , bio :: Text , image :: Maybe Text , following :: Bool } deriving (Generic) deriving instance ToJSON Profile deriving instance FromJSON Profile deriving instance ToSchema Profile
null
https://raw.githubusercontent.com/bradparker/servant-beam-realworld-example-app/d4a0d79d9dbc7e5b8987b367ac32f99ac3cc20e7/src/RealWorld/Conduit/Users/Profile.hs
haskell
module RealWorld.Conduit.Users.Profile ( Profile(..) ) where import Data.Aeson (ToJSON, FromJSON) import Data.Swagger (ToSchema) data Profile = Profile { id :: Int , username :: Text , bio :: Text , image :: Maybe Text , following :: Bool } deriving (Generic) deriving instance ToJSON Profile deriving instance FromJSON Profile deriving instance ToSchema Profile
4b42f5f15786151911e5549f0f62227f5568eb19dd82ead4b4633c3aea6cee46
jumarko/clojure-experiments
csv.clj
(ns clojure-experiments.csv "see `clojure-experiments.stats.techml` for more tech.ml.dataset related experiments" (:require [clojure.data.csv :as csv] [clojure.java.io :as io] requires the ` --illegal - access = permit ` workaround on JDK 16 : -goes-fast/clj-memory-meter/issues/8 [clj-memory-meter.core :as mm] [charred.api :as charred])) ;;; See also - semantic.csv : -csv , -semantic-csv ;;; - tech.ml.dataset: - > Walkthrough : - For a one - stop data exploration pathway that should work well for you : / ;;; Using CSV to quickly visualize data can be very handy ;;; and an easy way how to explore, observe trends and find errors (with-open [writer (io/writer "out-file.csv")] (csv/write-csv writer [(map name [:a :c]) ["abc" "def"] ["ghi" "jkl"]])) ;; Parsing into maps: /#parsing-into-maps (defn csv-data->maps [csv-data] (map zipmap (->> (first csv-data) ;; First row is the header (map keyword) ;; Drop if you want string keys instead repeat) (rest csv-data))) ;;; Charred - blazing-fast CSV & JSON parsing library with minimal dependencies ;;; new JSON & CSV parsing library with zero dependencies and very fast : chrisn : Introducing Charred - fast json / csv encode and decode . This library finalizes my research into csv and parsing and is a complete drop - in replacement for clojure.data.csv and clojure.data.json . Same API , much better ( 5 - 10x ) performance . This library gets as good performance for those tasks as anything on the JVM and avoids the hairball entirely . ;; * You can find my previous post on fast csv parsing for the reasons why the system is fast or just read the source code. All the files are pretty short. I moved the code from dtype-next into a stand-alone library and added encoding (writing) to the mix so you don't need any other dependencies. Finally this library has the same conformance suite as the libraries it replaces so you can feel at least somewhat confident it will handle your data with respect. * This is the same story as the fast CSV parser in the same library - do n't use pushback reader and write tight loops in java . In any case , here is a profile project . ;; * Former announcement about CSV parsing: * By my tests in jdk-17 read ( ) if a 1.7 GB file for a pushback reader is 51sec vs 500ms for a tight loop reading into a character array . ;; * fancy methods of reading character data from a file, such as memory mapping it and even potentially io_uring on linux are unlikely to get any faster for CSV parsing specifically unless you know your data doesn't have quoted sections. * At the end of the day - do n't use csv . Use arrow or parquet if you need any performance at all as a parquet file of a 1.7 GB test csv set was ~240 MB ;; maybe try this: "" (comment (def parsed-json (charred/read-json (io/file "resources/cars.json"))) (def parsed-csv (charred/read-csv (slurp "-covid-19.github.io/data/v2/latest/master.csv"))) ) ;;; - tech.ml.dataset: ;;; ;;; chrisn: ;;;; - tech.ml.dataset can load that file I believe. It is far more efficient with memory. in general. - For a one - stop data exploration pathway that should work well for you : / (comment (require '[clj-memory-meter.core :as mm]) (def csv-ds (csv/read-csv (slurp "-covid-19.github.io/data/v2/latest/master.csv"))) do n't be fooled by lazy seqs when measuring memory - > use vector ;; UPDATE: doesn't work with JDK16 out of the box (mm/measure (vec csv-ds)) = > " 27.8 MB " ( JDK 11 ! - 13.8.2021 ) = > " 13.3 MB " ( JDK 16 ! - 13.8.2021 ) = > " 23.1 MB " ( JDK 11 ? - a long time ago ) (mm/measure (vec (csv-data->maps csv-ds))) = > " 36.9 MB " ( JDK 11 ! - 13.8.2021 ) = > " 22.5 MB " ( JDK16 ! - 13.8.2021 ) = > " 31.8 MB " ( JDK 11 ? - a long time ago ) ;; can take a while to load (require '[tech.v3.dataset :as ds]) (def ds (ds/->dataset "-covid-19.github.io/data/v2/latest/master.csv")) (mm/measure ds) = > " 6.8 MB " ( JDK 11 - 13.8.2021 ) = > " 6.8 MB " ( JDK 16 ! - 13.8.2021 ) = > " 5.1 MB " ( JDK 11 ? - a long time ago ) ;; "clone" makes it more memory efficient ( suggested by ) ;; see also -reference.md#forcing-lazy-evaluation (mm/measure (tech.v3.datatype/clone ds)) = > " 3.7 MB " ;; dataset is logically a sequence of columens when treated like a sequence: (first ds) ;; => #tech.ml.dataset.column<string>[5001] ;; key [ AD , AE , AF , AF_BAL , , AF_BDG , AF_BDS , AF_BGL , AF_DAY , AF_FRA , , AF_GHA , AF_GHO , AF_HEL , AF_HER , AF_JOW , AF_KAB , AF_KAN , AF_KAP , , ... ] ;; ) ;;; newer version of tech.ml.dataset ;;; #mini-walkthrough ;;; => see `clojure-experiments.stats.techml` for more (comment ;; can take a while to load (require '[tech.v3.dataset :as ds]) (def csv-data (ds/->dataset "")) (ds/head csv-data) = > [ 5 3 ] : ;; | symbol | date | price | ;; |--------|------------|------:| | MSFT | 2000 - 01 - 01 | 39.81 | | MSFT | 2000 - 02 - 01 | 36.35 | | MSFT | 2000 - 03 - 01 | 43.22 | | MSFT | 2000 - 04 - 01 | 28.37 | | MSFT | 2000 - 05 - 01 | 25.45 | (def airports (ds/->dataset "" {:header-row? false :file-type :csv})) (ds/head airports) ,)
null
https://raw.githubusercontent.com/jumarko/clojure-experiments/a87098fe69044ad65813a68cb870d824c2c2d18f/src/clojure_experiments/csv.clj
clojure
See also - tech.ml.dataset: Using CSV to quickly visualize data can be very handy and an easy way how to explore, observe trends and find errors Parsing into maps: /#parsing-into-maps First row is the header Drop if you want string keys instead Charred - blazing-fast CSV & JSON parsing library with minimal dependencies * You can find my previous post on fast csv parsing for the reasons why the system is fast or just read the source code. All the files are pretty short. I moved the code from dtype-next into a stand-alone library and added encoding (writing) to the mix so you don't need any other dependencies. Finally this library has the same conformance suite as the libraries it replaces so you can feel at least somewhat confident it will handle your data with respect. * Former announcement about CSV parsing: * fancy methods of reading character data from a file, such as memory mapping it and even potentially io_uring on linux are unlikely to get any faster for CSV parsing specifically unless you know your data doesn't have quoted sections. maybe try this: "" - tech.ml.dataset: chrisn: - tech.ml.dataset can load that file I believe. It is far more efficient with memory. in general. UPDATE: doesn't work with JDK16 out of the box can take a while to load "clone" makes it more memory efficient see also -reference.md#forcing-lazy-evaluation dataset is logically a sequence of columens when treated like a sequence: => #tech.ml.dataset.column<string>[5001] key newer version of tech.ml.dataset #mini-walkthrough => see `clojure-experiments.stats.techml` for more can take a while to load | symbol | date | price | |--------|------------|------:|
(ns clojure-experiments.csv "see `clojure-experiments.stats.techml` for more tech.ml.dataset related experiments" (:require [clojure.data.csv :as csv] [clojure.java.io :as io] requires the ` --illegal - access = permit ` workaround on JDK 16 : -goes-fast/clj-memory-meter/issues/8 [clj-memory-meter.core :as mm] [charred.api :as charred])) - semantic.csv : -csv , -semantic-csv - > Walkthrough : - For a one - stop data exploration pathway that should work well for you : / (with-open [writer (io/writer "out-file.csv")] (csv/write-csv writer [(map name [:a :c]) ["abc" "def"] ["ghi" "jkl"]])) (defn csv-data->maps [csv-data] (map zipmap repeat) (rest csv-data))) new JSON & CSV parsing library with zero dependencies and very fast : chrisn : Introducing Charred - fast json / csv encode and decode . This library finalizes my research into csv and parsing and is a complete drop - in replacement for clojure.data.csv and clojure.data.json . Same API , much better ( 5 - 10x ) performance . This library gets as good performance for those tasks as anything on the JVM and avoids the hairball entirely . * This is the same story as the fast CSV parser in the same library - do n't use pushback reader and write tight loops in java . In any case , here is a profile project . * By my tests in jdk-17 read ( ) if a 1.7 GB file for a pushback reader is 51sec vs 500ms for a tight loop reading into a character array . * At the end of the day - do n't use csv . Use arrow or parquet if you need any performance at all as a parquet file of a 1.7 GB test csv set was ~240 MB (comment (def parsed-json (charred/read-json (io/file "resources/cars.json"))) (def parsed-csv (charred/read-csv (slurp "-covid-19.github.io/data/v2/latest/master.csv"))) ) - For a one - stop data exploration pathway that should work well for you : / (comment (require '[clj-memory-meter.core :as mm]) (def csv-ds (csv/read-csv (slurp "-covid-19.github.io/data/v2/latest/master.csv"))) do n't be fooled by lazy seqs when measuring memory - > use vector (mm/measure (vec csv-ds)) = > " 27.8 MB " ( JDK 11 ! - 13.8.2021 ) = > " 13.3 MB " ( JDK 16 ! - 13.8.2021 ) = > " 23.1 MB " ( JDK 11 ? - a long time ago ) (mm/measure (vec (csv-data->maps csv-ds))) = > " 36.9 MB " ( JDK 11 ! - 13.8.2021 ) = > " 22.5 MB " ( JDK16 ! - 13.8.2021 ) = > " 31.8 MB " ( JDK 11 ? - a long time ago ) (require '[tech.v3.dataset :as ds]) (def ds (ds/->dataset "-covid-19.github.io/data/v2/latest/master.csv")) (mm/measure ds) = > " 6.8 MB " ( JDK 11 - 13.8.2021 ) = > " 6.8 MB " ( JDK 16 ! - 13.8.2021 ) = > " 5.1 MB " ( JDK 11 ? - a long time ago ) ( suggested by ) (mm/measure (tech.v3.datatype/clone ds)) = > " 3.7 MB " (first ds) [ AD , AE , AF , AF_BAL , , AF_BDG , AF_BDS , AF_BGL , AF_DAY , AF_FRA , , AF_GHA , AF_GHO , AF_HEL , AF_HER , AF_JOW , AF_KAB , AF_KAN , AF_KAP , , ... ] ) (comment (require '[tech.v3.dataset :as ds]) (def csv-data (ds/->dataset "")) (ds/head csv-data) = > [ 5 3 ] : | MSFT | 2000 - 01 - 01 | 39.81 | | MSFT | 2000 - 02 - 01 | 36.35 | | MSFT | 2000 - 03 - 01 | 43.22 | | MSFT | 2000 - 04 - 01 | 28.37 | | MSFT | 2000 - 05 - 01 | 25.45 | (def airports (ds/->dataset "" {:header-row? false :file-type :csv})) (ds/head airports) ,)
54c90ecdcd7d4596ccc0dfde22ebff90828ce72d15e5af5efbd031f7fe4154af
arttuka/reagent-material-ui
box.cljs
(ns reagent-mui.material.box "Imports @mui/material/Box as a Reagent component. Original documentation is at -ui/api/box/ ." (:require [reagent.core :as r] ["@mui/material/Box" :as MuiBox])) (def box (r/adapt-react-class (.-default MuiBox)))
null
https://raw.githubusercontent.com/arttuka/reagent-material-ui/14103a696c41c0eb67fc07fc67cd8799efd88cb9/src/core/reagent_mui/material/box.cljs
clojure
(ns reagent-mui.material.box "Imports @mui/material/Box as a Reagent component. Original documentation is at -ui/api/box/ ." (:require [reagent.core :as r] ["@mui/material/Box" :as MuiBox])) (def box (r/adapt-react-class (.-default MuiBox)))
075f968659a24c0e963b1ee0da8b5ec2159187667881a237d89be2cddd982f40
xapi-project/ocaml-qmp
cli.ml
* Copyright ( C ) 2013 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2013 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) let project_url = "-qmp" let default_path = ref "/tmp/qmp" open Cmdliner (* Help sections common to all commands *) let _common_options = "COMMON OPTIONS" let help = [ `S _common_options; `P "These options are common to all commands."; `S "MORE HELP"; `P "Use `$(mname) $(i,COMMAND) --help' for help on a single command."; `Noblank; `S "BUGS"; `P (Printf.sprintf "Check bug reports at %s" project_url); ] (* Options common to all commands *) let common_options_t = let docs = _common_options in let debug = let doc = "Give only debug output." in Arg.(value & flag & info ["debug"] ~docs ~doc) in let verb = let doc = "Give verbose output." in let verbose = true, Arg.info ["v"; "verbose"] ~docs ~doc in Arg.(last & vflag_all [false] [verbose]) in let socket = let doc = Printf.sprintf "Specify path to the server Unix domain socket." in Arg.(value & opt file !default_path & info ["socket"] ~docs ~doc) in Term.(pure Common.make $ debug $ verb $ socket) let default_cmd = let doc = "interact with a running qemu via QMP" in let man = help in Term.(ret (pure (fun _ -> `Help (`Pager, None)) $ common_options_t)), Term.info "qmp-cli" ~version:"1.0.0" ~sdocs:_common_options ~doc ~man let watch_cmd = let doc = "watch for asynchronous events" in let man = [ `S "DESCRIPTION"; `P "Watches for and prints asynchronous events generated by the VM."; ] @ help in Term.(pure Client.watch $ common_options_t), Term.info "watch" ~sdocs:_common_options ~doc ~man let stop_cmd = let doc = "Immediately freeze execution" in let man = [ `S "DESCRIPTION"; `P "Stop the VM running but leave the state intact. Use 'cont' to start the VM executing again."; ] @ help in Term.(pure Client.stop $ common_options_t), Term.info "stop" ~sdocs:_common_options ~doc ~man let cont_cmd = let doc = "Continue a frozen VM" in let man = [ `S "DESCRIPTION"; `P "If a VM has been frozen with 'stop', then 'cont' will cause it to start executing again."; ] @ help in Term.(pure Client.cont $ common_options_t), Term.info "cont" ~sdocs:_common_options ~doc ~man let powerdown_cmd = let doc = "Press the system powerdown button" in let man = [ `S "DESCRIPTION"; `P "Press the system powerdown button which requests that the OS shuts itself down cleanly."; ] @ help in Term.(pure Client.system_powerdown $ common_options_t), Term.info "powerdown" ~sdocs:_common_options ~doc ~man let cmds = [ watch_cmd; stop_cmd; cont_cmd; powerdown_cmd ] let _ = match Term.eval_choice default_cmd cmds with | `Error _ -> exit 1 | _ -> exit 0
null
https://raw.githubusercontent.com/xapi-project/ocaml-qmp/1036d976dc0ef8ca75507fed195a1c03e4091fe9/cli/cli.ml
ocaml
Help sections common to all commands Options common to all commands
* Copyright ( C ) 2013 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2013 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) let project_url = "-qmp" let default_path = ref "/tmp/qmp" open Cmdliner let _common_options = "COMMON OPTIONS" let help = [ `S _common_options; `P "These options are common to all commands."; `S "MORE HELP"; `P "Use `$(mname) $(i,COMMAND) --help' for help on a single command."; `Noblank; `S "BUGS"; `P (Printf.sprintf "Check bug reports at %s" project_url); ] let common_options_t = let docs = _common_options in let debug = let doc = "Give only debug output." in Arg.(value & flag & info ["debug"] ~docs ~doc) in let verb = let doc = "Give verbose output." in let verbose = true, Arg.info ["v"; "verbose"] ~docs ~doc in Arg.(last & vflag_all [false] [verbose]) in let socket = let doc = Printf.sprintf "Specify path to the server Unix domain socket." in Arg.(value & opt file !default_path & info ["socket"] ~docs ~doc) in Term.(pure Common.make $ debug $ verb $ socket) let default_cmd = let doc = "interact with a running qemu via QMP" in let man = help in Term.(ret (pure (fun _ -> `Help (`Pager, None)) $ common_options_t)), Term.info "qmp-cli" ~version:"1.0.0" ~sdocs:_common_options ~doc ~man let watch_cmd = let doc = "watch for asynchronous events" in let man = [ `S "DESCRIPTION"; `P "Watches for and prints asynchronous events generated by the VM."; ] @ help in Term.(pure Client.watch $ common_options_t), Term.info "watch" ~sdocs:_common_options ~doc ~man let stop_cmd = let doc = "Immediately freeze execution" in let man = [ `S "DESCRIPTION"; `P "Stop the VM running but leave the state intact. Use 'cont' to start the VM executing again."; ] @ help in Term.(pure Client.stop $ common_options_t), Term.info "stop" ~sdocs:_common_options ~doc ~man let cont_cmd = let doc = "Continue a frozen VM" in let man = [ `S "DESCRIPTION"; `P "If a VM has been frozen with 'stop', then 'cont' will cause it to start executing again."; ] @ help in Term.(pure Client.cont $ common_options_t), Term.info "cont" ~sdocs:_common_options ~doc ~man let powerdown_cmd = let doc = "Press the system powerdown button" in let man = [ `S "DESCRIPTION"; `P "Press the system powerdown button which requests that the OS shuts itself down cleanly."; ] @ help in Term.(pure Client.system_powerdown $ common_options_t), Term.info "powerdown" ~sdocs:_common_options ~doc ~man let cmds = [ watch_cmd; stop_cmd; cont_cmd; powerdown_cmd ] let _ = match Term.eval_choice default_cmd cmds with | `Error _ -> exit 1 | _ -> exit 0
792f708a9a8dec4047ccfbcd5698a1f6fe80ed34afd4e08fb46dce22579334c5
adnelson/nixfromnpm
ToNix.hs
# LANGUAGE CPP # # LANGUAGE NoImplicitPrelude # # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE RecordWildCards # # LANGUAGE ScopedTypeVariables # # LANGUAGE FlexibleContexts # # LANGUAGE ViewPatterns # module NixFromNpm.Conversion.ToNix where import qualified Prelude as P import Data.Fix (Fix(..)) import qualified Data.HashMap.Strict as H import qualified Data.Map.Strict as M import qualified Data.ByteString.Char8 as C8 import Data.Char (isDigit) import Data.Text (Text, replace) import qualified Data.Text as T import Data.SemVer import NixFromNpm.Common hiding (replace) import Nix.Expr hiding (mkPath) import Nix.Pretty (prettyNix) import qualified Nix.Expr as Nix import Nix.Parser import NixFromNpm.Npm.Types import NixFromNpm.Npm.PackageMap #if MIN_VERSION_hnix(0,5,0) import Data.List.NonEmpty (NonEmpty(..), nonEmpty) import Data.Maybe (fromJust) import Text.Megaparsec (mkPos) import Text.Regex.TDFA ((=~)) import Text.Regex.TDFA.Text () genSourcePos :: SourcePos genSourcePos = SourcePos "generated by nixfromnpm" (mkPos 1) (mkPos 1) mkNamedVar :: NAttrPath r -> r -> Binding r mkNamedVar p r = NamedVar p r genSourcePos mkNAttrPath :: [NKeyName a] -> NAttrPath a mkNAttrPath = fromJust . nonEmpty (!.) :: NExpr -> Text -> NExpr (!.) = mkDot infixl 8 !. mkDot :: NExpr -> Text -> NExpr mkDot e key = mkDots e [key] mkDots :: NExpr -> [Text] -> NExpr mkDots e keys = Fix $ NSelect e (mkNAttrPath (toKey <$> keys)) Nothing where toKey :: Text -> NKeyName NExpr toKey k = (if isPlainSymbol k then StaticKey else dynamicKey) k -- | Make a dynamic key name that is only enclosed in double quotes -- (no antiquotes). dynamicKey :: Text -> NKeyName NExpr dynamicKey k = DynamicKey $ Plain $ DoubleQuoted [Plain k] -- | Check if it’s a valid nix symbol -- the nix lexer regex for IDs (symbols) is [a-zA-Z\_][a-zA-Z0-9\_\'\-]* isPlainSymbol :: Text -> Bool isPlainSymbol s = s =~ ("^[a-zA-Z_][a-zA-Z0-9_'-]*$" :: Text) mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr mkParamset' ps = mkParamset ps False #else mkNamedVar :: NAttrPath r -> r -> Binding r mkNamedVar p r = NamedVar p r mkNAttrPath :: [NKeyName a] -> NAttrPath a mkNAttrPath = id mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr mkParamset' ps = mkParamset ps #endif -- | This contains the same information as the .nix file that corresponds -- to the package. More or less it tells us everything that we need to build -- the package. data ResolvedPkg = ResolvedPkg { rpName :: PackageName, rpVersion :: SemVer, rpDistInfo :: Maybe DistInfo, -- ^ If a token was necessary to fetch the package, include it here. rpMeta :: PackageMeta, rpDependencies :: PRecord ResolvedDependency, rpOptionalDependencies :: PRecord ResolvedDependency, rpDevDependencies :: Maybe (PRecord ResolvedDependency) } deriving (Show, Eq) -- | True if any of the package's dependencies have namespaces. hasNamespacedDependency :: ResolvedPkg -> Bool hasNamespacedDependency rPkg = any hasNs (allDeps rPkg) where -- Get all of the dependency sets of the package. allDeps ResolvedPkg{..} = [rpDependencies, rpOptionalDependencies, maybe mempty id rpDevDependencies] -- Look at all of the package names (keys) to see if any are namespaced. hasNs = any isNamespaced . H.keys -- | Turns a string into one that can be used as an identifier. NPM package names can contain dots , so we translate these into dashes . -- Names can also start with a number; in this case prefix with an underscore. fixName :: Name -> Name fixName name = do -- Replace dots with dashes let name' = replace "." "-" name case T.findIndex isDigit name' of First character is a digit ; prefix with underscore Just 0 -> "_" <> name' _ -> name' | Converts a package name and semver into an Nix expression . Example : " foo " and 1.2.3 turns into " foo_1 - 2 - 3 " . Example : " foo.bar " and 1.2.3 - baz turns into " foo - bar_1 - 2 - 3 - baz " Example : " @foo / bar " and 1.2.3 turns into " namespaces.foo.bar_1 - 2 - 3 " toDepExpr :: PackageName -> SemVer -> NExpr toDepExpr (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = do let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags ident = fixName name <> "_" <> suffix case mNamespace of Nothing -> mkSym ident -- If there's a namespace, call "namespaces.namespace.pkgname" Just namespace -> mkDots "namespaces" [namespace, ident] | Converts a package name and semver into an selector , which can -- be used in a binding. This is very similar to @toDepExpr@, but it returns -- something to be used in a binding rather than an expression. toSelector :: PackageName -> SemVer -> NAttrPath NExpr toSelector (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = mkNAttrPath $ do let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags ident = fixName name <> "_" <> suffix StaticKey <$> case mNamespace of Nothing -> [ident] Just namespace -> ["namespaces", namespace, ident] -- | Same as toSelector, but doesn't append a version. toSelectorNoVersion :: PackageName -> NAttrPath NExpr toSelectorNoVersion (PackageName name mNamespace) = mkNAttrPath $ do StaticKey <$> case mNamespace of Nothing -> [fixName name] Just namespace -> ["namespaces", namespace, fixName name] -- | Converts a ResolvedDependency to a nix expression. toNixExpr :: PackageName -> ResolvedDependency -> NExpr toNixExpr name (Resolved (unpackPSC -> semver)) = toDepExpr name semver toNixExpr name (Broken reason) = "brokenPackage" @@ mkNonRecSet ["name" $= mkStr (tshow name), "reason" $= mkStr (tshow reason)] -- | Write a nix expression pretty-printed to a file. writeNix :: MonadIO io => FilePath -> NExpr -> io () writeNix path = writeFileUtf8 path . (<> "\n") . tshow . prettyNix | Gets the .nix filename of a semver . E.g. ( 0 , 1 , 2 ) - > 0.1.2.nix toDotNix :: SemVer -> FilePath toDotNix v = fromText $ tshow v <> ".nix" | Get the .nix filename relative to the nodePackages folder of a package . toRelPath :: PackageName -> SemVer -> FilePath toRelPath (PackageName name mNamespace) version = do let subPath = fromText name </> toDotNix version -- E.g. "foo/1.2.3.nix" case mNamespace of -- Simple case: package '[email protected]' -> './foo/1.2.3.nix' Nothing -> subPath Namespaced : package ' @foo / [email protected] ' - > ' ./@foo / bar/1.2.3.nix ' Just nspace -> fromText ("/@" <> nspace) </> subPath | Converts distinfo into a nix fetchurl call . distInfoToNix :: Maybe Name -- `Just` if we are fetching from a namespace. -> Maybe DistInfo -> NExpr distInfoToNix _ Nothing = Nix.mkPath False "./." distInfoToNix maybeNamespace (Just DistInfo{..}) = do let fetchurl = case maybeNamespace of Nothing -> "pkgs" !. "fetchurl" Just _ -> "fetchUrlNamespaced" (algo, hash) = case diShasum of SHA1 hash' -> ("sha1", hash') SHA256 hash' -> ("sha256", hash') authBinding = case maybeNamespace of Nothing -> [] Just namespace -> [bindTo "namespace" (mkStr namespace)] bindings = ["url" $= mkStr diUrl, algo $= mkStr hash] <> authBinding fetchurl @@ mkNonRecSet bindings -- | Converts package meta to a nix expression, if it exists. metaToNix :: PackageMeta -> Maybe NExpr metaToNix PackageMeta{..} = do let grab name = maybe [] (\s -> [name $= mkStr s]) homepage = grab "homepage" (map uriToText pmHomepage) description = grab "description" pmDescription author = grab "author" pmAuthor keywords = case pmKeywords of ks | null ks -> [] | otherwise -> ["keywords" $= mkList (toList (map mkStr ks))] stdenvPlatforms = mkDots "pkgs" ["stdenv", "lib", "platforms"] platforms = case map nodePlatformToText $ toList pmPlatforms of [] -> [] ps -> singleton $ "platforms" $= case ps of -- For a single one, just do pkgs.stdenv.lib.platforms.<platform> [p] -> stdenvPlatforms !. p -- For multiples, use the `with` syntax, and since each is a -- list, join with the concatenation operator. (p:ps) -> mkWith stdenvPlatforms $ foldl' ($++) (mkSym p) (mkSym <$> ps) case homepage <> description <> keywords <> author <> platforms of [] -> Nothing bindings -> Just $ mkNonRecSet bindings -- | Returns true if any of the resolved package's dependencies were broken. hasBroken :: ResolvedPkg -> Bool hasBroken ResolvedPkg{..} = case rpDevDependencies of Nothing -> any isBroken rpDependencies Just devDeps -> any isBroken rpDependencies || any isBroken devDeps where isBroken = \case {Broken _ -> True; _ -> False} -- | Given all of the versions defined in a node packages folder, create a -- default.nix which defines an object that calls out to all of those files. -- -- Essentially, given a directory structure like this: -- > foo/ -- > 0.1.2.nix > 0.2.3.nix -- > bar/ -- > 1.2.3.nix -- > @mynamespace/ -- > qux/ > 3.4.5.nix -- > default.nix -- -- We would generate a nix file that looks like this: -- -- > {callPackage}: -- > -- > { -- > foo_0-1-2 = callPackage ./foo/0.1.2.nix {}; > foo_0 - 2 - 3 = callPackage ./foo/0.2.3.nix { } ; -- > foo = callPackage ./foo/0.2.3.nix {}; > bar_1 - 2 - 3 = callPackage ./bar/1.2.3.nix { } ; -- > bar = callPackage ./bar/1.2.3.nix {}; -- > "@mynamespace-qux_3-4-5" = -- > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; -- > "@mynamespace-qux" = -- > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; -- > } -- -- Interestingly, it doesn't matter what the packagemap actually contains. We -- can derive all of the information we need (names and versions) from the keys -- of the map. packageMapToNix :: PackageMap a -> NExpr packageMapToNix pMap = do let -- Create a parameter set with no defaults given. params = mkParamset' $ [("callPackage", Nothing)] -- Create the function body as a single set which contains all of the -- packages in the set as keys, and a callPackage to their paths as values. toBindings :: (PackageName, [SemVer]) -> [Binding NExpr] toBindings (pkgName, []) = [] -- Grab the latest version and store that under a selector without a -- version. toBindings (pkgName, (latest:vs)) = binding : bindings where binding = toSelectorNoVersion pkgName `mkNamedVar` call latest -- Convert render the name and version to a nix path. It might -- contain an '@' sign, in which case we'll need to use a trick -- to get it into a valid path. path version = case T.find (== '@') textPath of -- There is no '@' in the path. Just make a path nix expression. Nothing -> mkPath renderedPath -- There is an '@'. Then use a path addition; i.e. use the syntax -- ./. + "/this/@path" Just _ -> mkPath "." $+ mkStr textPath where renderedPath = toRelPath pkgName version textPath = pathToText renderedPath -- Equiv. to `callPackage path {}` call v = "callPackage" @@ path v @@ mkNonRecSet [] toBinding :: SemVer -> Binding NExpr toBinding version = toSelector pkgName version `mkNamedVar` call version bindings :: [Binding NExpr] bindings = map toBinding (latest:vs) sortedPackages :: [(PackageName, [SemVer])] sortedPackages = do M.toList $ M.map (map fst . M.toDescList) $ psToMap pMap bindings :: [Binding NExpr] bindings = concatMap toBindings sortedPackages mkFunction params $ mkNonRecSet bindings -- | Converts a resolved package object into a nix expression. The expresion -- will be a function where the arguments are its dependencies, and its result -- is a call to `buildNodePackage`. resolvedPkgToNix :: ResolvedPkg -> NExpr resolvedPkgToNix rPkg@ResolvedPkg{..} = mkFunction funcParams body where --------------------------------------------------------------------------- -- Circular dependency resolution -- -- This is pretty gnarly but for now just deal with it... Step 1 : throw out the " broken " packages from the dependencies . withoutBrokens = H.fromList $ go (H.toList rpDependencies) where go [] = [] go ((_, Broken _):rest) = go rest go ((k, Resolved v):rest) = (k, v):go rest Step 2 : separate the dependencies into circular and non - circular . (noncircDepMap, circDepMap) = sepCircularMap withoutBrokens Step 3 : create lists of nix expressions for the circular and -- non-circular dependencies. deps = map (uncurry toDepExpr) $ H.toList noncircDepMap circDeps = flip map (H.toList circDepMap) $ \(name, CircularSemVer ver) -> toDepExpr name ver --------------------------------------------------------------------------- optDeps = map (uncurry toNixExpr) $ H.toList rpOptionalDependencies -- Same for dev dependencies. devDeps = map (uncurry toNixExpr) . H.toList <$> rpDevDependencies -- List of arguments that these functions will take. funcParams' = catMaybes [ Just "pkgs", Just "buildNodePackage", Just "nodePackages", -- If the package has any broken dependencies, we will need to include -- this function. maybeIf (hasBroken rPkg) "brokenPackage", -- If the package has a namespace then it will need to set headers -- when fetching. So add that function as a dependency. maybeIf (isNamespaced rpName) "fetchUrlNamespaced", maybeIf (isNamespaced rpName) "namespaceTokens", -- If any of the package's dependencies have namespaces, they will appear -- in the `namespaces` set, so we'll need that as a dependency. maybeIf (hasNamespacedDependency rPkg) "namespaces" ] -- None of these have defaults, so put them into pairs with Nothing. funcParams = mkParamset' $ map (\x -> (x, Nothing)) funcParams' Wrap an list expression in a ` with nodePackages ; ` syntax if non - empty . withNodePackages noneIfEmpty list = case list of [] -> if noneIfEmpty then Nothing else Just $ mkList [] _ -> Just $ mkWith "nodePackages" $ mkList list devDepBinding = case devDeps of Nothing -> Nothing Just ddeps -> bindTo "devDependencies" <$> withNodePackages False ddeps PackageName name namespace = rpName args = mkNonRecSet $ catMaybes [ Just $ "name" $= mkStr name, Just $ "version" $= (mkStr $ tshow rpVersion), Just $ "src" $= distInfoToNix (pnNamespace rpName) rpDistInfo, bindTo "namespace" <$> map mkStr namespace, bindTo "deps" <$> withNodePackages False deps, bindTo "circularDependencies" <$> withNodePackages True circDeps, bindTo "optionalDependencies" <$> withNodePackages True optDeps, devDepBinding, maybeIf (hasBroken rPkg) ("isBroken" $= mkBool True), bindTo "meta" <$> metaToNix rpMeta ] body = "buildNodePackage" @@ args -- | Convenience function to generate an `import /path {args}` expression. importWith :: Bool -- ^ True if the path is from the env, e.g. <nixpkgs> -> FilePath -- ^ Path to import -> [Binding NExpr] -- ^ Arguments to pass -> NExpr -- ^ The resulting nix expression importWith isEnv path args = do "import" @@ Nix.mkPath isEnv (pathToString path) @@ mkNonRecSet args -- | We use this a few times: `import <nixpkgs> {}` importNixpkgs :: NExpr importNixpkgs = importWith True "nixpkgs" [] -- | The default version of nodejs we are using; this should correspond -- to a key in the nixpkgs set we're importing. defaultNodeJS :: Text defaultNodeJS = "nodejs-8_x" -- | Also used a few times, these are the top-level params to the generated -- default.nix files. defaultParams :: Params NExpr defaultParams = do mkParamset' [("pkgs", Just importNixpkgs), ("nodejs", Just $ "pkgs" !. defaultNodeJS)] -- | When passing through arguments, we inherit these things. defaultInherits :: [Binding NExpr] defaultInherits = [ inherit ["pkgs", "nodejs"] #if MIN_VERSION_hnix(0,5,0) genSourcePos #endif ] -- | The name of the subfolder within the output directory that -- contains node packages. nodePackagesDir :: FilePath nodePackagesDir = "nodePackages" bindRootPath :: Binding NExpr bindRootPath = "nodePackagesPath" $= mkPath ("./" </> nodePackagesDir) -- | The root-level default.nix file. rootDefaultNix :: NExpr rootDefaultNix = mkFunction defaultParams body where lets = [ "mkNodeLib" $= importWith False "./nodeLib" ["self" $= "mkNodeLib"] , "nodeLib" $= ("mkNodeLib" @@ mkNonRecSet defaultInherits) ] genPackages = "nodeLib" !. "generatePackages" body = mkLets lets $ genPackages @@ (mkNonRecSet [bindRootPath]) -- | Create a `default.nix` file for a particular package.json; this simply -- imports the package as defined in the given path, and calls into it. packageJsonDefaultNix :: FilePath -- ^ Path to the output directory. -> NExpr packageJsonDefaultNix outputPath = do let libBind = "lib" $= importWith False outputPath defaultInherits callPkg = "lib" !. "callPackage" call = callPkg @@ mkPath "project.nix" @@ mkNonRecSet [] mkFunction defaultParams $ mkLets [libBind] call bindingsToMap :: [Binding t] -> Record t bindingsToMap = foldl' step mempty where step record binding = case binding of #if MIN_VERSION_hnix(0,5,0) NamedVar (StaticKey key :| []) obj _SourcePos #else NamedVar [StaticKey key] obj #endif -> H.insert key obj record _ -> record
null
https://raw.githubusercontent.com/adnelson/nixfromnpm/4ab773cdead920d2312e864857fabaf5f739a80e/src/NixFromNpm/Conversion/ToNix.hs
haskell
# LANGUAGE OverloadedStrings # | Make a dynamic key name that is only enclosed in double quotes (no antiquotes). | Check if it’s a valid nix symbol the nix lexer regex for IDs (symbols) is [a-zA-Z\_][a-zA-Z0-9\_\'\-]* | This contains the same information as the .nix file that corresponds to the package. More or less it tells us everything that we need to build the package. ^ If a token was necessary to fetch the package, include it here. | True if any of the package's dependencies have namespaces. Get all of the dependency sets of the package. Look at all of the package names (keys) to see if any are namespaced. | Turns a string into one that can be used as an identifier. Names can also start with a number; in this case prefix with an underscore. Replace dots with dashes If there's a namespace, call "namespaces.namespace.pkgname" be used in a binding. This is very similar to @toDepExpr@, but it returns something to be used in a binding rather than an expression. | Same as toSelector, but doesn't append a version. | Converts a ResolvedDependency to a nix expression. | Write a nix expression pretty-printed to a file. E.g. "foo/1.2.3.nix" Simple case: package '[email protected]' -> './foo/1.2.3.nix' `Just` if we are fetching from a namespace. | Converts package meta to a nix expression, if it exists. For a single one, just do pkgs.stdenv.lib.platforms.<platform> For multiples, use the `with` syntax, and since each is a list, join with the concatenation operator. | Returns true if any of the resolved package's dependencies were broken. | Given all of the versions defined in a node packages folder, create a default.nix which defines an object that calls out to all of those files. Essentially, given a directory structure like this: > foo/ > 0.1.2.nix > bar/ > 1.2.3.nix > @mynamespace/ > qux/ > default.nix We would generate a nix file that looks like this: > {callPackage}: > > { > foo_0-1-2 = callPackage ./foo/0.1.2.nix {}; > foo = callPackage ./foo/0.2.3.nix {}; > bar = callPackage ./bar/1.2.3.nix {}; > "@mynamespace-qux_3-4-5" = > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; > "@mynamespace-qux" = > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; > } Interestingly, it doesn't matter what the packagemap actually contains. We can derive all of the information we need (names and versions) from the keys of the map. Create a parameter set with no defaults given. Create the function body as a single set which contains all of the packages in the set as keys, and a callPackage to their paths as values. Grab the latest version and store that under a selector without a version. Convert render the name and version to a nix path. It might contain an '@' sign, in which case we'll need to use a trick to get it into a valid path. There is no '@' in the path. Just make a path nix expression. There is an '@'. Then use a path addition; i.e. use the syntax ./. + "/this/@path" Equiv. to `callPackage path {}` | Converts a resolved package object into a nix expression. The expresion will be a function where the arguments are its dependencies, and its result is a call to `buildNodePackage`. ------------------------------------------------------------------------- Circular dependency resolution This is pretty gnarly but for now just deal with it... non-circular dependencies. ------------------------------------------------------------------------- Same for dev dependencies. List of arguments that these functions will take. If the package has any broken dependencies, we will need to include this function. If the package has a namespace then it will need to set headers when fetching. So add that function as a dependency. If any of the package's dependencies have namespaces, they will appear in the `namespaces` set, so we'll need that as a dependency. None of these have defaults, so put them into pairs with Nothing. | Convenience function to generate an `import /path {args}` expression. ^ True if the path is from the env, e.g. <nixpkgs> ^ Path to import ^ Arguments to pass ^ The resulting nix expression | We use this a few times: `import <nixpkgs> {}` | The default version of nodejs we are using; this should correspond to a key in the nixpkgs set we're importing. | Also used a few times, these are the top-level params to the generated default.nix files. | When passing through arguments, we inherit these things. | The name of the subfolder within the output directory that contains node packages. | The root-level default.nix file. | Create a `default.nix` file for a particular package.json; this simply imports the package as defined in the given path, and calls into it. ^ Path to the output directory.
# LANGUAGE CPP # # LANGUAGE NoImplicitPrelude # # LANGUAGE LambdaCase # # LANGUAGE RecordWildCards # # LANGUAGE ScopedTypeVariables # # LANGUAGE FlexibleContexts # # LANGUAGE ViewPatterns # module NixFromNpm.Conversion.ToNix where import qualified Prelude as P import Data.Fix (Fix(..)) import qualified Data.HashMap.Strict as H import qualified Data.Map.Strict as M import qualified Data.ByteString.Char8 as C8 import Data.Char (isDigit) import Data.Text (Text, replace) import qualified Data.Text as T import Data.SemVer import NixFromNpm.Common hiding (replace) import Nix.Expr hiding (mkPath) import Nix.Pretty (prettyNix) import qualified Nix.Expr as Nix import Nix.Parser import NixFromNpm.Npm.Types import NixFromNpm.Npm.PackageMap #if MIN_VERSION_hnix(0,5,0) import Data.List.NonEmpty (NonEmpty(..), nonEmpty) import Data.Maybe (fromJust) import Text.Megaparsec (mkPos) import Text.Regex.TDFA ((=~)) import Text.Regex.TDFA.Text () genSourcePos :: SourcePos genSourcePos = SourcePos "generated by nixfromnpm" (mkPos 1) (mkPos 1) mkNamedVar :: NAttrPath r -> r -> Binding r mkNamedVar p r = NamedVar p r genSourcePos mkNAttrPath :: [NKeyName a] -> NAttrPath a mkNAttrPath = fromJust . nonEmpty (!.) :: NExpr -> Text -> NExpr (!.) = mkDot infixl 8 !. mkDot :: NExpr -> Text -> NExpr mkDot e key = mkDots e [key] mkDots :: NExpr -> [Text] -> NExpr mkDots e keys = Fix $ NSelect e (mkNAttrPath (toKey <$> keys)) Nothing where toKey :: Text -> NKeyName NExpr toKey k = (if isPlainSymbol k then StaticKey else dynamicKey) k dynamicKey :: Text -> NKeyName NExpr dynamicKey k = DynamicKey $ Plain $ DoubleQuoted [Plain k] isPlainSymbol :: Text -> Bool isPlainSymbol s = s =~ ("^[a-zA-Z_][a-zA-Z0-9_'-]*$" :: Text) mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr mkParamset' ps = mkParamset ps False #else mkNamedVar :: NAttrPath r -> r -> Binding r mkNamedVar p r = NamedVar p r mkNAttrPath :: [NKeyName a] -> NAttrPath a mkNAttrPath = id mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr mkParamset' ps = mkParamset ps #endif data ResolvedPkg = ResolvedPkg { rpName :: PackageName, rpVersion :: SemVer, rpDistInfo :: Maybe DistInfo, rpMeta :: PackageMeta, rpDependencies :: PRecord ResolvedDependency, rpOptionalDependencies :: PRecord ResolvedDependency, rpDevDependencies :: Maybe (PRecord ResolvedDependency) } deriving (Show, Eq) hasNamespacedDependency :: ResolvedPkg -> Bool hasNamespacedDependency rPkg = any hasNs (allDeps rPkg) where allDeps ResolvedPkg{..} = [rpDependencies, rpOptionalDependencies, maybe mempty id rpDevDependencies] hasNs = any isNamespaced . H.keys NPM package names can contain dots , so we translate these into dashes . fixName :: Name -> Name fixName name = do let name' = replace "." "-" name case T.findIndex isDigit name' of First character is a digit ; prefix with underscore Just 0 -> "_" <> name' _ -> name' | Converts a package name and semver into an Nix expression . Example : " foo " and 1.2.3 turns into " foo_1 - 2 - 3 " . Example : " foo.bar " and 1.2.3 - baz turns into " foo - bar_1 - 2 - 3 - baz " Example : " @foo / bar " and 1.2.3 turns into " namespaces.foo.bar_1 - 2 - 3 " toDepExpr :: PackageName -> SemVer -> NExpr toDepExpr (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = do let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags ident = fixName name <> "_" <> suffix case mNamespace of Nothing -> mkSym ident Just namespace -> mkDots "namespaces" [namespace, ident] | Converts a package name and semver into an selector , which can toSelector :: PackageName -> SemVer -> NAttrPath NExpr toSelector (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = mkNAttrPath $ do let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags ident = fixName name <> "_" <> suffix StaticKey <$> case mNamespace of Nothing -> [ident] Just namespace -> ["namespaces", namespace, ident] toSelectorNoVersion :: PackageName -> NAttrPath NExpr toSelectorNoVersion (PackageName name mNamespace) = mkNAttrPath $ do StaticKey <$> case mNamespace of Nothing -> [fixName name] Just namespace -> ["namespaces", namespace, fixName name] toNixExpr :: PackageName -> ResolvedDependency -> NExpr toNixExpr name (Resolved (unpackPSC -> semver)) = toDepExpr name semver toNixExpr name (Broken reason) = "brokenPackage" @@ mkNonRecSet ["name" $= mkStr (tshow name), "reason" $= mkStr (tshow reason)] writeNix :: MonadIO io => FilePath -> NExpr -> io () writeNix path = writeFileUtf8 path . (<> "\n") . tshow . prettyNix | Gets the .nix filename of a semver . E.g. ( 0 , 1 , 2 ) - > 0.1.2.nix toDotNix :: SemVer -> FilePath toDotNix v = fromText $ tshow v <> ".nix" | Get the .nix filename relative to the nodePackages folder of a package . toRelPath :: PackageName -> SemVer -> FilePath toRelPath (PackageName name mNamespace) version = do case mNamespace of Nothing -> subPath Namespaced : package ' @foo / [email protected] ' - > ' ./@foo / bar/1.2.3.nix ' Just nspace -> fromText ("/@" <> nspace) </> subPath | Converts distinfo into a nix fetchurl call . -> Maybe DistInfo -> NExpr distInfoToNix _ Nothing = Nix.mkPath False "./." distInfoToNix maybeNamespace (Just DistInfo{..}) = do let fetchurl = case maybeNamespace of Nothing -> "pkgs" !. "fetchurl" Just _ -> "fetchUrlNamespaced" (algo, hash) = case diShasum of SHA1 hash' -> ("sha1", hash') SHA256 hash' -> ("sha256", hash') authBinding = case maybeNamespace of Nothing -> [] Just namespace -> [bindTo "namespace" (mkStr namespace)] bindings = ["url" $= mkStr diUrl, algo $= mkStr hash] <> authBinding fetchurl @@ mkNonRecSet bindings metaToNix :: PackageMeta -> Maybe NExpr metaToNix PackageMeta{..} = do let grab name = maybe [] (\s -> [name $= mkStr s]) homepage = grab "homepage" (map uriToText pmHomepage) description = grab "description" pmDescription author = grab "author" pmAuthor keywords = case pmKeywords of ks | null ks -> [] | otherwise -> ["keywords" $= mkList (toList (map mkStr ks))] stdenvPlatforms = mkDots "pkgs" ["stdenv", "lib", "platforms"] platforms = case map nodePlatformToText $ toList pmPlatforms of [] -> [] ps -> singleton $ "platforms" $= case ps of [p] -> stdenvPlatforms !. p (p:ps) -> mkWith stdenvPlatforms $ foldl' ($++) (mkSym p) (mkSym <$> ps) case homepage <> description <> keywords <> author <> platforms of [] -> Nothing bindings -> Just $ mkNonRecSet bindings hasBroken :: ResolvedPkg -> Bool hasBroken ResolvedPkg{..} = case rpDevDependencies of Nothing -> any isBroken rpDependencies Just devDeps -> any isBroken rpDependencies || any isBroken devDeps where isBroken = \case {Broken _ -> True; _ -> False} > 0.2.3.nix > 3.4.5.nix > foo_0 - 2 - 3 = callPackage ./foo/0.2.3.nix { } ; > bar_1 - 2 - 3 = callPackage ./bar/1.2.3.nix { } ; packageMapToNix :: PackageMap a -> NExpr packageMapToNix pMap = do let params = mkParamset' $ [("callPackage", Nothing)] toBindings :: (PackageName, [SemVer]) -> [Binding NExpr] toBindings (pkgName, []) = [] toBindings (pkgName, (latest:vs)) = binding : bindings where binding = toSelectorNoVersion pkgName `mkNamedVar` call latest path version = case T.find (== '@') textPath of Nothing -> mkPath renderedPath Just _ -> mkPath "." $+ mkStr textPath where renderedPath = toRelPath pkgName version textPath = pathToText renderedPath call v = "callPackage" @@ path v @@ mkNonRecSet [] toBinding :: SemVer -> Binding NExpr toBinding version = toSelector pkgName version `mkNamedVar` call version bindings :: [Binding NExpr] bindings = map toBinding (latest:vs) sortedPackages :: [(PackageName, [SemVer])] sortedPackages = do M.toList $ M.map (map fst . M.toDescList) $ psToMap pMap bindings :: [Binding NExpr] bindings = concatMap toBindings sortedPackages mkFunction params $ mkNonRecSet bindings resolvedPkgToNix :: ResolvedPkg -> NExpr resolvedPkgToNix rPkg@ResolvedPkg{..} = mkFunction funcParams body where Step 1 : throw out the " broken " packages from the dependencies . withoutBrokens = H.fromList $ go (H.toList rpDependencies) where go [] = [] go ((_, Broken _):rest) = go rest go ((k, Resolved v):rest) = (k, v):go rest Step 2 : separate the dependencies into circular and non - circular . (noncircDepMap, circDepMap) = sepCircularMap withoutBrokens Step 3 : create lists of nix expressions for the circular and deps = map (uncurry toDepExpr) $ H.toList noncircDepMap circDeps = flip map (H.toList circDepMap) $ \(name, CircularSemVer ver) -> toDepExpr name ver optDeps = map (uncurry toNixExpr) $ H.toList rpOptionalDependencies devDeps = map (uncurry toNixExpr) . H.toList <$> rpDevDependencies funcParams' = catMaybes [ Just "pkgs", Just "buildNodePackage", Just "nodePackages", maybeIf (hasBroken rPkg) "brokenPackage", maybeIf (isNamespaced rpName) "fetchUrlNamespaced", maybeIf (isNamespaced rpName) "namespaceTokens", maybeIf (hasNamespacedDependency rPkg) "namespaces" ] funcParams = mkParamset' $ map (\x -> (x, Nothing)) funcParams' Wrap an list expression in a ` with nodePackages ; ` syntax if non - empty . withNodePackages noneIfEmpty list = case list of [] -> if noneIfEmpty then Nothing else Just $ mkList [] _ -> Just $ mkWith "nodePackages" $ mkList list devDepBinding = case devDeps of Nothing -> Nothing Just ddeps -> bindTo "devDependencies" <$> withNodePackages False ddeps PackageName name namespace = rpName args = mkNonRecSet $ catMaybes [ Just $ "name" $= mkStr name, Just $ "version" $= (mkStr $ tshow rpVersion), Just $ "src" $= distInfoToNix (pnNamespace rpName) rpDistInfo, bindTo "namespace" <$> map mkStr namespace, bindTo "deps" <$> withNodePackages False deps, bindTo "circularDependencies" <$> withNodePackages True circDeps, bindTo "optionalDependencies" <$> withNodePackages True optDeps, devDepBinding, maybeIf (hasBroken rPkg) ("isBroken" $= mkBool True), bindTo "meta" <$> metaToNix rpMeta ] body = "buildNodePackage" @@ args importWith isEnv path args = do "import" @@ Nix.mkPath isEnv (pathToString path) @@ mkNonRecSet args importNixpkgs :: NExpr importNixpkgs = importWith True "nixpkgs" [] defaultNodeJS :: Text defaultNodeJS = "nodejs-8_x" defaultParams :: Params NExpr defaultParams = do mkParamset' [("pkgs", Just importNixpkgs), ("nodejs", Just $ "pkgs" !. defaultNodeJS)] defaultInherits :: [Binding NExpr] defaultInherits = [ inherit ["pkgs", "nodejs"] #if MIN_VERSION_hnix(0,5,0) genSourcePos #endif ] nodePackagesDir :: FilePath nodePackagesDir = "nodePackages" bindRootPath :: Binding NExpr bindRootPath = "nodePackagesPath" $= mkPath ("./" </> nodePackagesDir) rootDefaultNix :: NExpr rootDefaultNix = mkFunction defaultParams body where lets = [ "mkNodeLib" $= importWith False "./nodeLib" ["self" $= "mkNodeLib"] , "nodeLib" $= ("mkNodeLib" @@ mkNonRecSet defaultInherits) ] genPackages = "nodeLib" !. "generatePackages" body = mkLets lets $ genPackages @@ (mkNonRecSet [bindRootPath]) -> NExpr packageJsonDefaultNix outputPath = do let libBind = "lib" $= importWith False outputPath defaultInherits callPkg = "lib" !. "callPackage" call = callPkg @@ mkPath "project.nix" @@ mkNonRecSet [] mkFunction defaultParams $ mkLets [libBind] call bindingsToMap :: [Binding t] -> Record t bindingsToMap = foldl' step mempty where step record binding = case binding of #if MIN_VERSION_hnix(0,5,0) NamedVar (StaticKey key :| []) obj _SourcePos #else NamedVar [StaticKey key] obj #endif -> H.insert key obj record _ -> record
1c7a8e5ee3f8ea6bf0f5180e8125861c5c6c4e0e481aa7e9be724e75d7599e2b
borodust/claw
function-index.lisp
(cl:in-package :iffi) (defstruct function-index (function-table (make-hash-table :test 'equal)) (argument-table (make-hash-table :test 'equal)) (alias-table (make-hash-table :test 'equal))) (defvar *function-index* (make-function-index)) (defun find-alias-node (name) (gethash name (function-index-alias-table *function-index*))) (defun register-alias-node (name parent children) (setf (gethash name (function-index-alias-table *function-index*)) (list* parent children))) (defun alias-node-parent (node) (first node)) (defun alias-node-children (node) (rest node)) (defun add-alias-node-child (node child) (pushnew child (rest node) :test #'equal) node) (defun update-alias-node-parent (node new-parent) (setf (first node) new-parent) node) (defun ensure-canonical-pointer (type) (cond ((and (atom type) (or (eq type :const) (string= "&rest" (string-downcase type)))) type) ((and (listp type) (eq :pointer (cffi::canonicalize-foreign-type type))) (list* :pointer (rest type))) ((eq :pointer (cffi::canonicalize-foreign-type type)) :pointer) (t type))) (defun find-base-type (type) (let ((type (ensure-canonical-pointer type))) (if (and (listp type) (eq :pointer (first type))) (list :pointer (find-base-type (second type))) (let* ((node (find-alias-node type)) (parent (and node (alias-node-parent node)))) (if parent (find-base-type parent) type))))) (defun find-intricate-aliases (name) (labels ((%collect-aliases (id) (let ((node (find-alias-node id))) (list* id (loop for child in (alias-node-children node) append (%collect-aliases child)))))) (%collect-aliases (find-base-type name)))) (defun maintain-base-functions (current-base new-base) (let* ((new-base (find-base-type new-base)) (fids (gethash current-base (function-index-argument-table *function-index*)))) (loop for fid in fids for function = (gethash fid (function-index-function-table *function-index*)) for new-fid = (list* (first fid) (nsubstitute new-base current-base (rest fid) :test #'equal)) do (remhash fid (function-index-function-table *function-index*)) (setf (gethash new-fid (function-index-function-table *function-index*)) function) collect new-fid into new-fids finally (remhash current-base (function-index-argument-table *function-index*)) (setf (gethash new-base (function-index-argument-table *function-index*)) new-fids)))) (defun register-intricate-alias (base alias) (labels ((%register-alias (name base) (register-alias-node name base nil)) (%register-base (name &rest children) (register-alias-node name nil children))) (let ((base-node (gethash base (function-index-alias-table *function-index*))) (alias-node (gethash alias (function-index-alias-table *function-index*)))) (cond ((not (or base-node alias-node)) (%register-base base alias) (%register-alias alias base) (maintain-base-functions alias base)) ((and base-node alias-node) (unless (and (equal (alias-node-parent alias-node) base) (member alias (alias-node-children base-node) :test #'equal)) (error "Alias ~A exists: current base ~A" alias (find-base-type alias)))) (base-node (%register-alias alias base) (add-alias-node-child base-node alias) (maintain-base-functions alias base)) (alias-node (if (alias-node-parent alias-node) (error "Alias ~A exists: current base ~A" alias (find-base-type alias)) (progn (%register-base base alias) (update-alias-node-parent alias-node base) (maintain-base-functions alias base)))))))) (defun intricate-function (name &rest arg-types) (let ((function-table (function-index-function-table *function-index*))) (gethash (list* name (mapcar #'find-base-type arg-types)) function-table))) (defun (setf intricate-function) (value name &rest arg-types) (let* ((function-table (function-index-function-table *function-index*)) (base-types (mapcar #'find-base-type arg-types)) (function-id (list* name base-types))) (loop for base in base-types do (push function-id (gethash base (function-index-argument-table *function-index*)))) (setf (gethash function-id function-table) value)))
null
https://raw.githubusercontent.com/borodust/claw/db10a752a491c8669c3ee90c0533f490d79c731f/src/iffi/function-index.lisp
lisp
(cl:in-package :iffi) (defstruct function-index (function-table (make-hash-table :test 'equal)) (argument-table (make-hash-table :test 'equal)) (alias-table (make-hash-table :test 'equal))) (defvar *function-index* (make-function-index)) (defun find-alias-node (name) (gethash name (function-index-alias-table *function-index*))) (defun register-alias-node (name parent children) (setf (gethash name (function-index-alias-table *function-index*)) (list* parent children))) (defun alias-node-parent (node) (first node)) (defun alias-node-children (node) (rest node)) (defun add-alias-node-child (node child) (pushnew child (rest node) :test #'equal) node) (defun update-alias-node-parent (node new-parent) (setf (first node) new-parent) node) (defun ensure-canonical-pointer (type) (cond ((and (atom type) (or (eq type :const) (string= "&rest" (string-downcase type)))) type) ((and (listp type) (eq :pointer (cffi::canonicalize-foreign-type type))) (list* :pointer (rest type))) ((eq :pointer (cffi::canonicalize-foreign-type type)) :pointer) (t type))) (defun find-base-type (type) (let ((type (ensure-canonical-pointer type))) (if (and (listp type) (eq :pointer (first type))) (list :pointer (find-base-type (second type))) (let* ((node (find-alias-node type)) (parent (and node (alias-node-parent node)))) (if parent (find-base-type parent) type))))) (defun find-intricate-aliases (name) (labels ((%collect-aliases (id) (let ((node (find-alias-node id))) (list* id (loop for child in (alias-node-children node) append (%collect-aliases child)))))) (%collect-aliases (find-base-type name)))) (defun maintain-base-functions (current-base new-base) (let* ((new-base (find-base-type new-base)) (fids (gethash current-base (function-index-argument-table *function-index*)))) (loop for fid in fids for function = (gethash fid (function-index-function-table *function-index*)) for new-fid = (list* (first fid) (nsubstitute new-base current-base (rest fid) :test #'equal)) do (remhash fid (function-index-function-table *function-index*)) (setf (gethash new-fid (function-index-function-table *function-index*)) function) collect new-fid into new-fids finally (remhash current-base (function-index-argument-table *function-index*)) (setf (gethash new-base (function-index-argument-table *function-index*)) new-fids)))) (defun register-intricate-alias (base alias) (labels ((%register-alias (name base) (register-alias-node name base nil)) (%register-base (name &rest children) (register-alias-node name nil children))) (let ((base-node (gethash base (function-index-alias-table *function-index*))) (alias-node (gethash alias (function-index-alias-table *function-index*)))) (cond ((not (or base-node alias-node)) (%register-base base alias) (%register-alias alias base) (maintain-base-functions alias base)) ((and base-node alias-node) (unless (and (equal (alias-node-parent alias-node) base) (member alias (alias-node-children base-node) :test #'equal)) (error "Alias ~A exists: current base ~A" alias (find-base-type alias)))) (base-node (%register-alias alias base) (add-alias-node-child base-node alias) (maintain-base-functions alias base)) (alias-node (if (alias-node-parent alias-node) (error "Alias ~A exists: current base ~A" alias (find-base-type alias)) (progn (%register-base base alias) (update-alias-node-parent alias-node base) (maintain-base-functions alias base)))))))) (defun intricate-function (name &rest arg-types) (let ((function-table (function-index-function-table *function-index*))) (gethash (list* name (mapcar #'find-base-type arg-types)) function-table))) (defun (setf intricate-function) (value name &rest arg-types) (let* ((function-table (function-index-function-table *function-index*)) (base-types (mapcar #'find-base-type arg-types)) (function-id (list* name base-types))) (loop for base in base-types do (push function-id (gethash base (function-index-argument-table *function-index*)))) (setf (gethash function-id function-table) value)))
817ad97f7a16239c7d26395b866cfdc3ea1962fa07471130d05f3c13085b49cb
may-liu/qtalk
subscribe_msg_sup.erl
%%%---------------------------------------------------------------------- %%% File : ejabberd_odbc_sup.erl Author : < > %%% Purpose : ODBC connections supervisor Created : 22 Dec 2004 by < > %%% %%% ejabberd , Copyright ( C ) 2002 - 2014 ProcessOne %%% %%% This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 2 of the %%% License, or (at your option) any later version. %%% %%% This program is distributed in the hope that it will be useful, %%% but WITHOUT ANY WARRANTY; without even the implied warranty of %%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU %%% General Public License for more details. %%% You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . %%% %%%---------------------------------------------------------------------- -module(subscribe_msg_sup). %% API -export([start_link/1, init/1, add_pid/2, remove_pid/2, get_pids/1, get_random_pid/1]). -include("ejabberd.hrl"). -include("logger.hrl"). -define(DEFAULT_POOL_SIZE, 8). -define(DEFAULT_START_INTERVAL, 30). -define(CONNECT_TIMEOUT, 500). start_link(Opts) -> Servers = ejabberd_config:get_myhosts(), Host = lists:nth(1,Servers), ets:new(subscribe_msg_pid, [named_table, bag, public]), supervisor:start_link({local, gen_mod:get_module_proc(Host, ?MODULE)}, ?MODULE, [Host,Opts]). init([Host,Opts]) -> PoolSize = ?DEFAULT_POOL_SIZE, StartInterval = ?DEFAULT_START_INTERVAL, {ok, {{one_for_one, ?DEFAULT_POOL_SIZE * 10, 1}, lists:map(fun (I) -> {I, {subscribe_msg, start_link, [Host, StartInterval * 1000]}, transient, 2000, worker, [?MODULE]} end, lists:seq(1, PoolSize))}}. get_pids(Host) -> case ets:lookup(subscribe_msg_pid,Host) of [] -> []; Rs when is_list(Rs) -> lists:flatmap(fun(B) -> [element(2,B)] end,Rs); _ -> [] end. get_random_pid(Host) -> case get_pids(Host) of [] -> none; Pids -> lists:nth(erlang:phash(os:timestamp(), length(Pids)), Pids) end. add_pid(Host,Pid) -> ets:insert(subscribe_msg_pid,{Host,Pid}). remove_pid(Host,Pid) -> ets:delete_object(subscribe_msg_pid,{Host,Pid}).
null
https://raw.githubusercontent.com/may-liu/qtalk/f5431e5a7123975e9656e7ab239e674ce33713cd/qtalk_opensource/src/subscribe_msg_sup.erl
erlang
---------------------------------------------------------------------- File : ejabberd_odbc_sup.erl Purpose : ODBC connections supervisor This program is free software; you can redistribute it and/or License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. ---------------------------------------------------------------------- API
Author : < > Created : 22 Dec 2004 by < > ejabberd , Copyright ( C ) 2002 - 2014 ProcessOne modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 2 of the You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . -module(subscribe_msg_sup). -export([start_link/1, init/1, add_pid/2, remove_pid/2, get_pids/1, get_random_pid/1]). -include("ejabberd.hrl"). -include("logger.hrl"). -define(DEFAULT_POOL_SIZE, 8). -define(DEFAULT_START_INTERVAL, 30). -define(CONNECT_TIMEOUT, 500). start_link(Opts) -> Servers = ejabberd_config:get_myhosts(), Host = lists:nth(1,Servers), ets:new(subscribe_msg_pid, [named_table, bag, public]), supervisor:start_link({local, gen_mod:get_module_proc(Host, ?MODULE)}, ?MODULE, [Host,Opts]). init([Host,Opts]) -> PoolSize = ?DEFAULT_POOL_SIZE, StartInterval = ?DEFAULT_START_INTERVAL, {ok, {{one_for_one, ?DEFAULT_POOL_SIZE * 10, 1}, lists:map(fun (I) -> {I, {subscribe_msg, start_link, [Host, StartInterval * 1000]}, transient, 2000, worker, [?MODULE]} end, lists:seq(1, PoolSize))}}. get_pids(Host) -> case ets:lookup(subscribe_msg_pid,Host) of [] -> []; Rs when is_list(Rs) -> lists:flatmap(fun(B) -> [element(2,B)] end,Rs); _ -> [] end. get_random_pid(Host) -> case get_pids(Host) of [] -> none; Pids -> lists:nth(erlang:phash(os:timestamp(), length(Pids)), Pids) end. add_pid(Host,Pid) -> ets:insert(subscribe_msg_pid,{Host,Pid}). remove_pid(Host,Pid) -> ets:delete_object(subscribe_msg_pid,{Host,Pid}).
d117a979d2403cbe1afc9349080c28a6f49cf800aa37bc7117150f16b81b1348
roswell/roswell
dist-disable.lisp
(defpackage :roswell.dist.disable (:use :cl)) (in-package :roswell.dist.disable) (defun disable (&rest r) (dolist (elm (rest r)) (let ((dist (ql-dist:find-dist elm))) (if dist (ql-dist:disable dist) (format t "~A not found.~%" elm)))))
null
https://raw.githubusercontent.com/roswell/roswell/0107dfb54393aff1a776deb79d58f67f642135cb/lisp/dist-disable.lisp
lisp
(defpackage :roswell.dist.disable (:use :cl)) (in-package :roswell.dist.disable) (defun disable (&rest r) (dolist (elm (rest r)) (let ((dist (ql-dist:find-dist elm))) (if dist (ql-dist:disable dist) (format t "~A not found.~%" elm)))))
debf9ab90b88916783aa3a04a8a1a18a50c4a0cb7fc9d3ce8692441633e41105
Carnap/Carnap
Main.hs
module Main where import Carnap.GHCJS.Action.ProofCheck main :: IO () main = proofCheckAction
null
https://raw.githubusercontent.com/Carnap/Carnap/99546e377008247c5a1e8de1e294aac8e22584d7/Carnap-GHCJS/Proof-Check/Main.hs
haskell
module Main where import Carnap.GHCJS.Action.ProofCheck main :: IO () main = proofCheckAction
526ddeca155bb62c5ace20b3b74b7fddef1f4643a966fd34073cd841ef513546
UU-ComputerScience/uhc
IntegralInteger1.hs
---------------------------------------------------------------------------------------- what : Integral class , for Integer expected : ok ---------------------------------------------------------------------------------------- what : Integral class, for Integer expected: ok ---------------------------------------------------------------------------------------- -} module IntegralInteger1 where main :: IO () main = do putStrLn (show ((5::Integer) `quot` 3)) putStrLn (show ((5::Integer) `rem` 3)) let (q,r) = quotRem (5::Integer) (3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((-5::Integer) `quot` 3)) putStrLn (show ((-5::Integer) `rem` 3)) let (q,r) = quotRem (-5::Integer) (3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((5::Integer) `quot` (-3))) putStrLn (show ((5::Integer) `rem` (-3))) let (q,r) = quotRem (5::Integer) (-3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((-5::Integer) `quot` (-3))) putStrLn (show ((-5::Integer) `rem` (-3))) let (q,r) = quotRem (-5::Integer) (-3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((5::Integer) `div` 3)) putStrLn (show ((5::Integer) `mod` 3)) let (d,m) = divMod (5::Integer) (3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((-5::Integer) `div` 3)) putStrLn (show ((-5::Integer) `mod` 3)) let (d,m) = divMod (-5::Integer) (3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((5::Integer) `div` (-3))) putStrLn (show ((5::Integer) `mod` (-3))) let (d,m) = divMod (5::Integer) (-3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((-5::Integer) `div` (-3))) putStrLn (show ((-5::Integer) `mod` (-3))) let (d,m) = divMod (-5::Integer) (-3) putStrLn (show d) putStrLn (show m)
null
https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/IntegralInteger1.hs
haskell
-------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------- -}
what : Integral class , for Integer expected : ok what : Integral class, for Integer expected: ok module IntegralInteger1 where main :: IO () main = do putStrLn (show ((5::Integer) `quot` 3)) putStrLn (show ((5::Integer) `rem` 3)) let (q,r) = quotRem (5::Integer) (3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((-5::Integer) `quot` 3)) putStrLn (show ((-5::Integer) `rem` 3)) let (q,r) = quotRem (-5::Integer) (3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((5::Integer) `quot` (-3))) putStrLn (show ((5::Integer) `rem` (-3))) let (q,r) = quotRem (5::Integer) (-3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((-5::Integer) `quot` (-3))) putStrLn (show ((-5::Integer) `rem` (-3))) let (q,r) = quotRem (-5::Integer) (-3) putStrLn (show q) putStrLn (show r) putStrLn "" putStrLn (show ((5::Integer) `div` 3)) putStrLn (show ((5::Integer) `mod` 3)) let (d,m) = divMod (5::Integer) (3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((-5::Integer) `div` 3)) putStrLn (show ((-5::Integer) `mod` 3)) let (d,m) = divMod (-5::Integer) (3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((5::Integer) `div` (-3))) putStrLn (show ((5::Integer) `mod` (-3))) let (d,m) = divMod (5::Integer) (-3) putStrLn (show d) putStrLn (show m) putStrLn "" putStrLn (show ((-5::Integer) `div` (-3))) putStrLn (show ((-5::Integer) `mod` (-3))) let (d,m) = divMod (-5::Integer) (-3) putStrLn (show d) putStrLn (show m)
9ca0c0b10031960021ca7db5cdf8f78e1f5e29569d8e9b64bbcbe460606467c2
jackfirth/lens
list-ref-take-drop.rkt
#lang racket/base (require racket/contract/base) (provide (contract-out [list-ref-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [take-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [drop-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [first-lens (lens/c (list*-length-at-least/c 1) any/c)] [second-lens (lens/c (list*-length-at-least/c 2) any/c)] [third-lens (lens/c (list*-length-at-least/c 3) any/c)] [fourth-lens (lens/c (list*-length-at-least/c 4) any/c)] [fifth-lens (lens/c (list*-length-at-least/c 5) any/c)] [sixth-lens (lens/c (list*-length-at-least/c 6) any/c)] [seventh-lens (lens/c (list*-length-at-least/c 7) any/c)] [eighth-lens (lens/c (list*-length-at-least/c 8) any/c)] [ninth-lens (lens/c (list*-length-at-least/c 9) any/c)] [tenth-lens (lens/c (list*-length-at-least/c 10) any/c)])) (require racket/list fancy-app "../util/improper-list-length.rkt" lens/private/base/main lens/private/compound/compose "car-cdr.rkt") (module+ test (require rackunit lens/private/test-util/test-lens)) (define (set-take n lst new-head) (append new-head (drop lst n))) (module+ test (check-equal? (set-take 2 '(1 2 3 4 5) '(a b)) '(a b 3 4 5))) (define (set-drop n lst new-tail) (append (take lst n) new-tail)) (module+ test (check-equal? (set-drop 2 '(1 2 3 4 5) '(a b c)) '(1 2 a b c))) (define (take-lens n) (make-lens (take _ n) (set-take n _ _))) (module+ test (define take2-lens (take-lens 2)) (check-lens-view take2-lens '(1 2 3 4 5) '(1 2)) (check-lens-set take2-lens '(1 2 3 4 5) '(a b) '(a b 3 4 5))) (define (drop-lens n) (make-lens (drop _ n) (set-drop n _ _))) (module+ test (define drop2-lens (drop-lens 2)) (check-lens-view drop2-lens '(1 2 3 4 5) '(3 4 5)) (check-lens-set drop2-lens '(1 2 3 4 5) '(a b c) '(1 2 a b c))) (define (list-ref-lens i) (lens-compose car-lens (drop-lens i))) (define first-lens (list-ref-lens 0)) (define second-lens (list-ref-lens 1)) (define third-lens (list-ref-lens 2)) (define fourth-lens (list-ref-lens 3)) (define fifth-lens (list-ref-lens 4)) (define sixth-lens (list-ref-lens 5)) (define seventh-lens (list-ref-lens 6)) (define eighth-lens (list-ref-lens 7)) (define ninth-lens (list-ref-lens 8)) (define tenth-lens (list-ref-lens 9)) (module+ test (check-lens-view first-lens '(1 2 3 4 5) 1) (check-lens-view second-lens '(1 2 3 4 5) 2) (check-lens-view third-lens '(1 2 3 4 5) 3) (check-lens-view fourth-lens '(1 2 3 4 5) 4) (check-lens-view fifth-lens '(1 2 3 4 5) 5) (check-lens-set first-lens '(1 2 3 4 5) 'a '(a 2 3 4 5)) (check-lens-set second-lens '(1 2 3 4 5) 'a '(1 a 3 4 5)) (check-lens-set third-lens '(1 2 3 4 5) 'a '(1 2 a 4 5)) (check-lens-set fourth-lens '(1 2 3 4 5) 'a '(1 2 3 a 5)) (check-lens-set fifth-lens '(1 2 3 4 5) 'a '(1 2 3 4 a)))
null
https://raw.githubusercontent.com/jackfirth/lens/733db7744921409b69ddc78ae5b23ffaa6b91e37/lens-data/lens/private/list/list-ref-take-drop.rkt
racket
#lang racket/base (require racket/contract/base) (provide (contract-out [list-ref-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [take-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [drop-lens (->i ([i exact-nonnegative-integer?]) [lens (i) (lens/c (list*-length-at-least/c (add1 i)) any/c)])] [first-lens (lens/c (list*-length-at-least/c 1) any/c)] [second-lens (lens/c (list*-length-at-least/c 2) any/c)] [third-lens (lens/c (list*-length-at-least/c 3) any/c)] [fourth-lens (lens/c (list*-length-at-least/c 4) any/c)] [fifth-lens (lens/c (list*-length-at-least/c 5) any/c)] [sixth-lens (lens/c (list*-length-at-least/c 6) any/c)] [seventh-lens (lens/c (list*-length-at-least/c 7) any/c)] [eighth-lens (lens/c (list*-length-at-least/c 8) any/c)] [ninth-lens (lens/c (list*-length-at-least/c 9) any/c)] [tenth-lens (lens/c (list*-length-at-least/c 10) any/c)])) (require racket/list fancy-app "../util/improper-list-length.rkt" lens/private/base/main lens/private/compound/compose "car-cdr.rkt") (module+ test (require rackunit lens/private/test-util/test-lens)) (define (set-take n lst new-head) (append new-head (drop lst n))) (module+ test (check-equal? (set-take 2 '(1 2 3 4 5) '(a b)) '(a b 3 4 5))) (define (set-drop n lst new-tail) (append (take lst n) new-tail)) (module+ test (check-equal? (set-drop 2 '(1 2 3 4 5) '(a b c)) '(1 2 a b c))) (define (take-lens n) (make-lens (take _ n) (set-take n _ _))) (module+ test (define take2-lens (take-lens 2)) (check-lens-view take2-lens '(1 2 3 4 5) '(1 2)) (check-lens-set take2-lens '(1 2 3 4 5) '(a b) '(a b 3 4 5))) (define (drop-lens n) (make-lens (drop _ n) (set-drop n _ _))) (module+ test (define drop2-lens (drop-lens 2)) (check-lens-view drop2-lens '(1 2 3 4 5) '(3 4 5)) (check-lens-set drop2-lens '(1 2 3 4 5) '(a b c) '(1 2 a b c))) (define (list-ref-lens i) (lens-compose car-lens (drop-lens i))) (define first-lens (list-ref-lens 0)) (define second-lens (list-ref-lens 1)) (define third-lens (list-ref-lens 2)) (define fourth-lens (list-ref-lens 3)) (define fifth-lens (list-ref-lens 4)) (define sixth-lens (list-ref-lens 5)) (define seventh-lens (list-ref-lens 6)) (define eighth-lens (list-ref-lens 7)) (define ninth-lens (list-ref-lens 8)) (define tenth-lens (list-ref-lens 9)) (module+ test (check-lens-view first-lens '(1 2 3 4 5) 1) (check-lens-view second-lens '(1 2 3 4 5) 2) (check-lens-view third-lens '(1 2 3 4 5) 3) (check-lens-view fourth-lens '(1 2 3 4 5) 4) (check-lens-view fifth-lens '(1 2 3 4 5) 5) (check-lens-set first-lens '(1 2 3 4 5) 'a '(a 2 3 4 5)) (check-lens-set second-lens '(1 2 3 4 5) 'a '(1 a 3 4 5)) (check-lens-set third-lens '(1 2 3 4 5) 'a '(1 2 a 4 5)) (check-lens-set fourth-lens '(1 2 3 4 5) 'a '(1 2 3 a 5)) (check-lens-set fifth-lens '(1 2 3 4 5) 'a '(1 2 3 4 a)))
43f3b4a031b5d0c3ed596467d5ade3da4c19f1dde1c08e8b307db0ef9eec0594
pbaille/binaural-beats
dl.cljs
(ns utils.dl) (defn download [data filename & [format]] (let [format (or format "application/octet-stream") a (.createElement js/document "a")] (aset a "href" (str "data:text/plain;charset=utf-8," (pr-str data))) (aset a "download" filename) (.click a) (.removeChild js/document a)))
null
https://raw.githubusercontent.com/pbaille/binaural-beats/628a2ed6999bd04647411be26b255fa4d5f13c95/src/utils/dl.cljs
clojure
(ns utils.dl) (defn download [data filename & [format]] (let [format (or format "application/octet-stream") a (.createElement js/document "a")] (aset a "href" (str "data:text/plain;charset=utf-8," (pr-str data))) (aset a "download" filename) (.click a) (.removeChild js/document a)))
615acff6cbaae248bcad8fe67b022f504b249817853042ae8b51434687cb68c6
crisptrutski/matchbox
keyword.cljc
(ns matchbox.serialization.keyword (:require [matchbox.serialization.plain :as plain] [matchbox.utils :as utils] [clojure.walk :as walk])) (defn hydrate-kw [x] (if (and (string? x) (= \: (first x))) (keyword (subs x 1)) x)) (defn hydrate-map [x] (if (map? x) (into (empty x) (map (fn [[k v]] [(keyword k) v]) x)) x)) (defn hydrate [v] #?(:clj (walk/prewalk (comp hydrate-kw hydrate-map plain/hydrate-raw) v) :cljs (walk/postwalk (comp hydrate-kw hydrate-map) (js->clj v :keywordize-keys true)))) (defn kw->str [x] (if (keyword? x) (str x) x)) (defn serialize [v] (->> (walk/stringify-keys v) (walk/postwalk kw->str) #?(:cljs clj->js))) (defn set-default! [] (utils/set-date-config! hydrate serialize))
null
https://raw.githubusercontent.com/crisptrutski/matchbox/5bb9ba96f5df01bce302a8232f6cddd9d64a1d71/src/matchbox/serialization/keyword.cljc
clojure
(ns matchbox.serialization.keyword (:require [matchbox.serialization.plain :as plain] [matchbox.utils :as utils] [clojure.walk :as walk])) (defn hydrate-kw [x] (if (and (string? x) (= \: (first x))) (keyword (subs x 1)) x)) (defn hydrate-map [x] (if (map? x) (into (empty x) (map (fn [[k v]] [(keyword k) v]) x)) x)) (defn hydrate [v] #?(:clj (walk/prewalk (comp hydrate-kw hydrate-map plain/hydrate-raw) v) :cljs (walk/postwalk (comp hydrate-kw hydrate-map) (js->clj v :keywordize-keys true)))) (defn kw->str [x] (if (keyword? x) (str x) x)) (defn serialize [v] (->> (walk/stringify-keys v) (walk/postwalk kw->str) #?(:cljs clj->js))) (defn set-default! [] (utils/set-date-config! hydrate serialize))
5a3d54bb0fd39fa23127118de5655c6c0b5ab3191fdfac2dde3fd55f1ee281e7
mbutterick/typesetting
check-pdf.rkt
#lang debug racket (require rackunit (prefix-in zlib: fontland/zlib) fontland/table/cff/cff-top) (provide (all-defined-out)) (define (xref-offset bs) (match (regexp-match #px"(?<=startxref\n)\\d+" bs) [(list val) (read (open-input-bytes val))] [_ (error 'no-xref)])) (define-syntax (pat-lex stx) (syntax-case stx (else) [(_ PORT [PAT . REST] ... [else ELSE-CLAUSE]) (with-syntax ([(REST ...) (map (λ (stx) (syntax-case stx () [() #'(=> car)] [_ stx])) (syntax->list #'(REST ...)))]) #'(cond [(regexp-try-match (pregexp (string-append "^" PAT)) PORT) . REST] ... [else ELSE-CLAUSE]))])) (define (between-delimiters bs left right) (parameterize ([current-input-port (if (input-port? bs) bs (open-input-bytes bs))]) (let loop ([acc null][depth 0]) (cond [(regexp-try-match (pregexp (string-append "^" (regexp-quote (~a left)))) (current-input-port)) => (λ (m) (loop (if (empty? acc) impliedly throw away first left delimiter (cons (car m) acc)) (add1 depth)))] [(regexp-try-match (pregexp (string-append "^" (regexp-quote (~a right)))) (current-input-port)) => (λ (m) (case depth [(1) (bytes-append* (reverse acc))] [else (loop (cons (car m) acc) (sub1 depth))]))] [else (define bstr (read-byte)) (and (not (eof-object? bstr)) (loop (if (zero? depth) acc ; impliedly throw away leading non-delimiter bytes (cons (bytes bstr) acc)) depth))])))) (module+ test (require rackunit) (define bs #"a<<b<<c>>x<<z>>d>>e<<f>>g") (check-equal? (between-delimiters bs #"<<" #">>") #"b<<c>>x<<z>>d") (check-equal? (between-delimiters (between-delimiters bs #"<<" #">>") #"<<" #">>") #"c") (check-false (between-delimiters #"abc" #"<<" #">>")) (check-equal? (between-delimiters #"[a[b]c]" #"[" #"]") #"a[b]c") (check-equal? (let ([ip (open-input-bytes #"<</foo 42>>z")]) (parse-1 ip) (port->bytes ip)) #"z")) (define excluded-keys (list #"/Producer" #"/Creator" #"/CreationDate" #"/ModDate" #"/Keywords" #"/Title" #"/Author" #"/Subject")) (define (parse-1 ip) (cond ;; the complication is that arrays & dicts can contain other arrays & dicts ;; so we have to scan ahead in an intelligent way. [(equal? (peek-bytes 1 0 ip) #"[") ;; array (parse-pdf-bytes (between-delimiters ip #"[" #"]"))] [(equal? (peek-bytes 2 0 ip) #"<<") ;; dict, maybe with stream (define items (parse-pdf-bytes (between-delimiters ip #"<<" #">>"))) (unless (even? (length items)) (raise items)) (define dic (sort ; put hash into order so it's comparable (for/list ([kv (in-slice 2 items)] suppress these keys so we can compare pdfkit & pitfall output #:unless (member (car kv) excluded-keys)) (apply cons kv)) bytes<? #:key car)) (cond ;; might have a stream [(regexp-try-match #px"^\\s*stream\n" ip) (define stream-length (read (open-input-bytes (cdr (assoc #"/Length" dic))))) (define compressed? (equal? (dict-ref dic #"/Filter" #f) #"/FlateDecode")) (define stream ((if compressed? zlib:inflate values) (read-bytes stream-length ip))) ;; font subsets have their own interior structure, so ignore (maybe too lenient) (define font? (equal? (subbytes stream 0 4) #"true")) (dict-update (append dic (list (cons 'stream (if font? #"0" stream)))) ;; compressed length may vary, so just set to #"0" #"/Length" (λ (val) (cond [(or font? compressed?) (bytes-length stream)] [else val])))] [else dic])] [else (pat-lex ip ["\\s+" (parse-1 ip)] ; whitespace ["\\d+ 0 obj" (parse-1 ip)] ;; obj name ["\\d+ 0 R"] ; xref ["[-]?\\d*\\.\\d+"] ; real ["[-]?\\d+\\.?"] ; integer ["\\(.*?\\)"] ; parenstring ["/[A-Z]{6}(\\+\\S+)" => cadr] ; font keystring. prefix is random, so ignore ["/\\S+"] ; keystring [else eof])])) (define (parse-pdf-bytes bs) (for/list ([tok (in-port parse-1 (open-input-bytes bs))]) tok)) (define (pdf->dict pdf) (define pdf-bs (if (bytes? pdf) pdf (file->bytes pdf))) (define xoff (xref-offset pdf-bs)) (define xref-ip (open-input-bytes (subbytes pdf-bs (+ xoff (bytes-length #"xref\n0"))))) (define ref-count (read xref-ip)) (define obj-locations (append (sort ; sort by byte offset drop record : there is no zeroth object (for/list ([i (in-range ref-count)]) (cons i (read (open-input-bytes (car (regexp-match #px"\\d{10}" xref-ip))))))) < #:key cdr) (list (cons #f xoff)))) (sort ; sort by index (parameterize ([current-input-port (open-input-bytes pdf-bs)]) (for/list ([(idx start) (in-dict obj-locations)] [(_ end) (in-dict (cdr obj-locations))]) (cons idx (car (parse-pdf-bytes (peek-bytes (- end start) start)))))) < #:key car)) (define (dict-compare arg1 arg2 [obj-idx #f]) (define d1 (if (dict? arg1) arg1 (pdf->dict arg1))) (define d2 (if (dict? arg2) arg2 (pdf->dict arg2))) (unless (dict? d1) (error "d1 is not a dict")) (unless (dict? d2) (error "d2 is not a dict")) (unless (= (length d1) (length d2)) (error (format "dict lengths different in d1 (~a) and d2 (~a)" (length d1) (length d2)))) (for/and ([(k1 v1) (in-dict d1)] [(k2 v2) (in-dict d2)]) (define current-object-idx (or obj-idx k1)) (cond [(dict? v1) (dict-compare v1 v2 current-object-idx)] [(not (equal? k1 k2)) (error (format "keys unequal in object ~a: ~a ≠ ~a" current-object-idx k1 k2))] [(not (equal? v1 v2)) (error (format "values unequal in object ~a for key ~e: ~e ≠ ~e" current-object-idx k1 v1 v2))] [else #true]))) (define-simple-check (check-headers-equal? ps1 ps2) (equal? (peek-bytes 14 0 (open-input-file ps1)) (peek-bytes 14 0 (open-input-file ps2)))) (define-simple-check (check-pdfs-equal? ps1 ps2) (dict-compare ps1 ps2)) (define-simple-check (check-font-subsets-equal? f1 f2) (define misses null) (define (dump val) (cond [(promise? val) 'promise-omitted] [(vector? val) (dump (vector->list val))] [(dict? val) (for/list ([(k v) (in-dict (sort (dict->list val) #:key car symbol<?))]) (list k (dump v)))] [(list? val) (map dump val)] [else val])) (define (cmp v1 v2) (cond [(and (list? v1) (list? v2)) (and (= (length v1) (length v2)) (for/and ([x1 (in-list v1)] [x2 (in-list v2)]) (unless (cmp x1 x2) (set! misses (cons (list v1 x1 v2 x2) misses)))))] [else (equal? v1 v2)])) (define ibs1 (dict-ref (dict-ref (pdf->dict f1) 8) 'stream)) (define cfftop1 (dump (send CFFTop x:decode (open-input-bytes ibs1)))) (define ibs2 (dict-ref (dict-ref (pdf->dict f2) 8) 'stream)) (define cfftop2 (dump (send CFFTop x:decode (open-input-bytes ibs2)))) (cmp cfftop1 cfftop2) (check-true (null? misses))) #;(module+ main (for ([p (in-directory)] #:when (path-has-extension? p #"pdf")) (with-handlers ([exn:fail? (λ (exn) (println (format "~a failed" p)))]) (pdf->dict p))))
null
https://raw.githubusercontent.com/mbutterick/typesetting/6f7a8bbc422a64d3a54cbbbd78801a01410f5c92/pitfall/pitfall/check-pdf.rkt
racket
impliedly throw away leading non-delimiter bytes the complication is that arrays & dicts can contain other arrays & dicts so we have to scan ahead in an intelligent way. array dict, maybe with stream put hash into order so it's comparable might have a stream font subsets have their own interior structure, so ignore (maybe too lenient) compressed length may vary, so just set to #"0" whitespace obj name xref real integer parenstring font keystring. prefix is random, so ignore keystring sort by byte offset sort by index (module+ main
#lang debug racket (require rackunit (prefix-in zlib: fontland/zlib) fontland/table/cff/cff-top) (provide (all-defined-out)) (define (xref-offset bs) (match (regexp-match #px"(?<=startxref\n)\\d+" bs) [(list val) (read (open-input-bytes val))] [_ (error 'no-xref)])) (define-syntax (pat-lex stx) (syntax-case stx (else) [(_ PORT [PAT . REST] ... [else ELSE-CLAUSE]) (with-syntax ([(REST ...) (map (λ (stx) (syntax-case stx () [() #'(=> car)] [_ stx])) (syntax->list #'(REST ...)))]) #'(cond [(regexp-try-match (pregexp (string-append "^" PAT)) PORT) . REST] ... [else ELSE-CLAUSE]))])) (define (between-delimiters bs left right) (parameterize ([current-input-port (if (input-port? bs) bs (open-input-bytes bs))]) (let loop ([acc null][depth 0]) (cond [(regexp-try-match (pregexp (string-append "^" (regexp-quote (~a left)))) (current-input-port)) => (λ (m) (loop (if (empty? acc) impliedly throw away first left delimiter (cons (car m) acc)) (add1 depth)))] [(regexp-try-match (pregexp (string-append "^" (regexp-quote (~a right)))) (current-input-port)) => (λ (m) (case depth [(1) (bytes-append* (reverse acc))] [else (loop (cons (car m) acc) (sub1 depth))]))] [else (define bstr (read-byte)) (and (not (eof-object? bstr)) (loop (if (zero? depth) (cons (bytes bstr) acc)) depth))])))) (module+ test (require rackunit) (define bs #"a<<b<<c>>x<<z>>d>>e<<f>>g") (check-equal? (between-delimiters bs #"<<" #">>") #"b<<c>>x<<z>>d") (check-equal? (between-delimiters (between-delimiters bs #"<<" #">>") #"<<" #">>") #"c") (check-false (between-delimiters #"abc" #"<<" #">>")) (check-equal? (between-delimiters #"[a[b]c]" #"[" #"]") #"a[b]c") (check-equal? (let ([ip (open-input-bytes #"<</foo 42>>z")]) (parse-1 ip) (port->bytes ip)) #"z")) (define excluded-keys (list #"/Producer" #"/Creator" #"/CreationDate" #"/ModDate" #"/Keywords" #"/Title" #"/Author" #"/Subject")) (define (parse-1 ip) (cond (parse-pdf-bytes (between-delimiters ip #"[" #"]"))] (define items (parse-pdf-bytes (between-delimiters ip #"<<" #">>"))) (unless (even? (length items)) (raise items)) (define dic (for/list ([kv (in-slice 2 items)] suppress these keys so we can compare pdfkit & pitfall output #:unless (member (car kv) excluded-keys)) (apply cons kv)) bytes<? #:key car)) [(regexp-try-match #px"^\\s*stream\n" ip) (define stream-length (read (open-input-bytes (cdr (assoc #"/Length" dic))))) (define compressed? (equal? (dict-ref dic #"/Filter" #f) #"/FlateDecode")) (define stream ((if compressed? zlib:inflate values) (read-bytes stream-length ip))) (define font? (equal? (subbytes stream 0 4) #"true")) (dict-update (append dic (list (cons 'stream (if font? #"0" stream)))) #"/Length" (λ (val) (cond [(or font? compressed?) (bytes-length stream)] [else val])))] [else dic])] [else (pat-lex ip [else eof])])) (define (parse-pdf-bytes bs) (for/list ([tok (in-port parse-1 (open-input-bytes bs))]) tok)) (define (pdf->dict pdf) (define pdf-bs (if (bytes? pdf) pdf (file->bytes pdf))) (define xoff (xref-offset pdf-bs)) (define xref-ip (open-input-bytes (subbytes pdf-bs (+ xoff (bytes-length #"xref\n0"))))) (define ref-count (read xref-ip)) (define obj-locations (append drop record : there is no zeroth object (for/list ([i (in-range ref-count)]) (cons i (read (open-input-bytes (car (regexp-match #px"\\d{10}" xref-ip))))))) < #:key cdr) (list (cons #f xoff)))) (parameterize ([current-input-port (open-input-bytes pdf-bs)]) (for/list ([(idx start) (in-dict obj-locations)] [(_ end) (in-dict (cdr obj-locations))]) (cons idx (car (parse-pdf-bytes (peek-bytes (- end start) start)))))) < #:key car)) (define (dict-compare arg1 arg2 [obj-idx #f]) (define d1 (if (dict? arg1) arg1 (pdf->dict arg1))) (define d2 (if (dict? arg2) arg2 (pdf->dict arg2))) (unless (dict? d1) (error "d1 is not a dict")) (unless (dict? d2) (error "d2 is not a dict")) (unless (= (length d1) (length d2)) (error (format "dict lengths different in d1 (~a) and d2 (~a)" (length d1) (length d2)))) (for/and ([(k1 v1) (in-dict d1)] [(k2 v2) (in-dict d2)]) (define current-object-idx (or obj-idx k1)) (cond [(dict? v1) (dict-compare v1 v2 current-object-idx)] [(not (equal? k1 k2)) (error (format "keys unequal in object ~a: ~a ≠ ~a" current-object-idx k1 k2))] [(not (equal? v1 v2)) (error (format "values unequal in object ~a for key ~e: ~e ≠ ~e" current-object-idx k1 v1 v2))] [else #true]))) (define-simple-check (check-headers-equal? ps1 ps2) (equal? (peek-bytes 14 0 (open-input-file ps1)) (peek-bytes 14 0 (open-input-file ps2)))) (define-simple-check (check-pdfs-equal? ps1 ps2) (dict-compare ps1 ps2)) (define-simple-check (check-font-subsets-equal? f1 f2) (define misses null) (define (dump val) (cond [(promise? val) 'promise-omitted] [(vector? val) (dump (vector->list val))] [(dict? val) (for/list ([(k v) (in-dict (sort (dict->list val) #:key car symbol<?))]) (list k (dump v)))] [(list? val) (map dump val)] [else val])) (define (cmp v1 v2) (cond [(and (list? v1) (list? v2)) (and (= (length v1) (length v2)) (for/and ([x1 (in-list v1)] [x2 (in-list v2)]) (unless (cmp x1 x2) (set! misses (cons (list v1 x1 v2 x2) misses)))))] [else (equal? v1 v2)])) (define ibs1 (dict-ref (dict-ref (pdf->dict f1) 8) 'stream)) (define cfftop1 (dump (send CFFTop x:decode (open-input-bytes ibs1)))) (define ibs2 (dict-ref (dict-ref (pdf->dict f2) 8) 'stream)) (define cfftop2 (dump (send CFFTop x:decode (open-input-bytes ibs2)))) (cmp cfftop1 cfftop2) (check-true (null? misses))) (for ([p (in-directory)] #:when (path-has-extension? p #"pdf")) (with-handlers ([exn:fail? (λ (exn) (println (format "~a failed" p)))]) (pdf->dict p))))
95215f1ec5127c164707949ac7af55796a63964e512f5092c370bd007d69f4c9
intermine/bluegenes
routes.clj
(ns bluegenes.routes (:require [compojure.core :as compojure :refer [GET POST defroutes context]] [compojure.route :refer [resources not-found]] [ring.util.response :as response :refer [response]] [ring.util.http-response :refer [found see-other]] [ring.middleware.params :refer [wrap-params]] [ring.middleware.keyword-params :refer [wrap-keyword-params]] [bluegenes.ws.auth :as auth] [bluegenes.ws.ids :as ids] [bluegenes.ws.rss :as rss] [bluegenes.ws.lookup :as lookup] [bluegenes.index :refer [index]] [config.core :refer [env]] [bluegenes.utils :refer [env->mines get-service-root]] [clj-http.client :as client] [bluegenes-tool-store.core :as tool] [hiccup.page :refer [html5]] [clojure.string :as str])) (defn with-init "One of BlueGenes' web service could have added some data we want passed on to the frontend to session.init, in which case we make sure to pass it on and remove it (as it gets 'consumed') from the session." [options {{:keys [init] :as session} :session}] (-> (response (index init options)) (response/content-type "text/html") This is very important - without it Firefox will request the HTML ;; twice, messing up session.init! (response/charset "utf-8") (assoc :session (dissoc session :init)))) (defn get-favicon "Get a favicon for when one isn't configured." [] (let [mine-favicon (str (get-service-root env) "/model/images/favicon.ico")] (if (-> (client/get mine-favicon) (get-in [:headers "Content-Type"]) (= "image/x-icon")) (found mine-favicon) (found (str (:bluegenes-deploy-path env) "/favicon-fallback.ico"))))) (defn not-found-page [{:keys [request-method uri] :as _req}] (let [bg-path (or (:bluegenes-deploy-path env) "/")] (html5 [:head [:title "Page Not Found"] [:style "h1{ font-size:80px; font-weight:800; text-align:center; font-family: 'Roboto', sans-serif; } h2 { font-size:25px; text-align:center; font-family: 'Roboto', sans-serif; margin-top:-40px; } p{ text-align:center; font-family: 'Roboto', sans-serif; font-size:12px; } .container { width:300px; margin: 0 auto; margin-top:15%; }"]] [:body [:div.container [:h1 "404"] [:h2 "Page Not Found"] [:p "This " [:strong (-> request-method name str/upper-case)] " request to " [:strong uri] " is not handled by the BlueGenes server, which is deployed to " [:strong bg-path] ". " [:a {:href bg-path} "Click here"] " to open BlueGenes."]]]))) ; Define the top level URL routes for the server (def routes (compojure/let-routes [mines (env->mines env) favicon* (delay (get-favicon))] (context (:bluegenes-deploy-path env) [] ;;serve compiled files, i.e. js, css, from the resources folder (resources "/") ;; The favicon is chosen from the following order of priority: 1 . ` public / favicon.ico ` being present as a resource ( admin will have to add this ) . 2 . ` /<mine>/model / images / favicon.ico ` being present on the default mine . 3 . ` public / favicon - fallback.ico ` which is always present . Hence it follows that the following route wo n't be matched if [ 1 ] is true . (GET "/favicon.ico" [] @favicon*) (GET "/version" [] (response {:version "0.1.0"})) tool/routes ;; Anything within this context is the API web service. (context "/api" [] (context "/auth" [] auth/routes) (context "/ids" [] ids/routes) (context "/rss" [] rss/routes)) ;; Linking in. ;; Handles both configured mines and the /query path. (wrap-params (wrap-keyword-params (apply compojure/routes (for [path (concat (map :namespace mines) ["query"]) :let [redirect-path (str (:bluegenes-deploy-path env) "/" (when-not (= path "query") path))]] (context (str "/" path) [] (GET "/portal.do" {params :params} (-> (found redirect-path) (assoc :session {:init {:linkIn {:target :upload :data params}}}))) (POST "/portal.do" {params :params} (-> (see-other redirect-path) (assoc :session {:init {:linkIn {:target :upload :data params}}})))))))) ;; Dynamic routes for handling permanent URL resolution on configured mines. (apply compojure/routes (for [{mine-ns :namespace :as mine} mines] (context (str "/" mine-ns) [] (GET ["/:lookup" :lookup #"[^:/.]+:[^:/.]+(?:\.rdf)?"] [lookup] (lookup/ws lookup mine))))) ;; Passes options to index for including semantic markup with HTML. (GET "/" [] (partial with-init {:semantic-markup :home :mine (first mines)})) (apply compojure/routes (for [{mine-ns :namespace :as mine} mines] (compojure/routes (GET (str "/" mine-ns) [] (partial with-init {:semantic-markup :home :mine mine})) (GET (str "/" mine-ns "/report/:class/:id") [id] (partial with-init {:semantic-markup :report :mine mine :object-id id}))))) (GET "*" [] (partial with-init {}))) (not-found not-found-page)))
null
https://raw.githubusercontent.com/intermine/bluegenes/563e886234c16ada159715cb182f33e94d606ed4/src/clj/bluegenes/routes.clj
clojure
twice, messing up session.init! Define the top level URL routes for the server serve compiled files, i.e. js, css, from the resources folder The favicon is chosen from the following order of priority: Anything within this context is the API web service. Linking in. Handles both configured mines and the /query path. Dynamic routes for handling permanent URL resolution on configured mines. Passes options to index for including semantic markup with HTML.
(ns bluegenes.routes (:require [compojure.core :as compojure :refer [GET POST defroutes context]] [compojure.route :refer [resources not-found]] [ring.util.response :as response :refer [response]] [ring.util.http-response :refer [found see-other]] [ring.middleware.params :refer [wrap-params]] [ring.middleware.keyword-params :refer [wrap-keyword-params]] [bluegenes.ws.auth :as auth] [bluegenes.ws.ids :as ids] [bluegenes.ws.rss :as rss] [bluegenes.ws.lookup :as lookup] [bluegenes.index :refer [index]] [config.core :refer [env]] [bluegenes.utils :refer [env->mines get-service-root]] [clj-http.client :as client] [bluegenes-tool-store.core :as tool] [hiccup.page :refer [html5]] [clojure.string :as str])) (defn with-init "One of BlueGenes' web service could have added some data we want passed on to the frontend to session.init, in which case we make sure to pass it on and remove it (as it gets 'consumed') from the session." [options {{:keys [init] :as session} :session}] (-> (response (index init options)) (response/content-type "text/html") This is very important - without it Firefox will request the HTML (response/charset "utf-8") (assoc :session (dissoc session :init)))) (defn get-favicon "Get a favicon for when one isn't configured." [] (let [mine-favicon (str (get-service-root env) "/model/images/favicon.ico")] (if (-> (client/get mine-favicon) (get-in [:headers "Content-Type"]) (= "image/x-icon")) (found mine-favicon) (found (str (:bluegenes-deploy-path env) "/favicon-fallback.ico"))))) (defn not-found-page [{:keys [request-method uri] :as _req}] (let [bg-path (or (:bluegenes-deploy-path env) "/")] (html5 [:head [:title "Page Not Found"] [:style "h1{ font-size:80px; font-weight:800; text-align:center; font-family: 'Roboto', sans-serif; } h2 { font-size:25px; text-align:center; font-family: 'Roboto', sans-serif; margin-top:-40px; } p{ text-align:center; font-family: 'Roboto', sans-serif; font-size:12px; } .container { width:300px; margin: 0 auto; margin-top:15%; }"]] [:body [:div.container [:h1 "404"] [:h2 "Page Not Found"] [:p "This " [:strong (-> request-method name str/upper-case)] " request to " [:strong uri] " is not handled by the BlueGenes server, which is deployed to " [:strong bg-path] ". " [:a {:href bg-path} "Click here"] " to open BlueGenes."]]]))) (def routes (compojure/let-routes [mines (env->mines env) favicon* (delay (get-favicon))] (context (:bluegenes-deploy-path env) [] (resources "/") 1 . ` public / favicon.ico ` being present as a resource ( admin will have to add this ) . 2 . ` /<mine>/model / images / favicon.ico ` being present on the default mine . 3 . ` public / favicon - fallback.ico ` which is always present . Hence it follows that the following route wo n't be matched if [ 1 ] is true . (GET "/favicon.ico" [] @favicon*) (GET "/version" [] (response {:version "0.1.0"})) tool/routes (context "/api" [] (context "/auth" [] auth/routes) (context "/ids" [] ids/routes) (context "/rss" [] rss/routes)) (wrap-params (wrap-keyword-params (apply compojure/routes (for [path (concat (map :namespace mines) ["query"]) :let [redirect-path (str (:bluegenes-deploy-path env) "/" (when-not (= path "query") path))]] (context (str "/" path) [] (GET "/portal.do" {params :params} (-> (found redirect-path) (assoc :session {:init {:linkIn {:target :upload :data params}}}))) (POST "/portal.do" {params :params} (-> (see-other redirect-path) (assoc :session {:init {:linkIn {:target :upload :data params}}})))))))) (apply compojure/routes (for [{mine-ns :namespace :as mine} mines] (context (str "/" mine-ns) [] (GET ["/:lookup" :lookup #"[^:/.]+:[^:/.]+(?:\.rdf)?"] [lookup] (lookup/ws lookup mine))))) (GET "/" [] (partial with-init {:semantic-markup :home :mine (first mines)})) (apply compojure/routes (for [{mine-ns :namespace :as mine} mines] (compojure/routes (GET (str "/" mine-ns) [] (partial with-init {:semantic-markup :home :mine mine})) (GET (str "/" mine-ns "/report/:class/:id") [id] (partial with-init {:semantic-markup :report :mine mine :object-id id}))))) (GET "*" [] (partial with-init {}))) (not-found not-found-page)))
f70824cbb06298dd0d74d357b5475f39780acef5b825e320dfcfcf2b4f7ffee2
gfngfn/SATySFi
imageHashTable.ml
open MyUtil type tag = string type bbox = float * float * float * float type key = int type value_main = | PDFImage of Pdf.t * Pdfpage.t | OtherImage of Images.format * Pdf.pdfobject * int * int * abs_path type value = tag * bbox * value_main exception CannotLoadPdf of string * abs_path * int exception CannotLoadImage of string * abs_path exception ImageOfWrongFileType of abs_path exception UnsupportedColorModel of Images.colormodel * abs_path let main_hash_table : (key, value) Hashtbl.t = Hashtbl.create 32 let current_id_ref : int ref = ref 0 let initialize () = begin Images.add_methods Images.Jpeg Images.({ check_header = Jpeg.check_header; load = Some(Jpeg.load); save = Some(Jpeg.save); load_sequence = None; save_sequence = None; }); current_id_ref := 0; Hashtbl.clear main_hash_table; end let generate_tag () : key * tag = let n = !current_id_ref in begin incr current_id_ref; (n, "/I" ^ (string_of_int n)) end let add_pdf (abspath : abs_path) (pageno : int) = let pdfext = try Pdfread.pdf_of_file None None (get_abs_path_string abspath) with | Pdf.PDFError(msg) -> raise (CannotLoadPdf(msg, abspath, pageno)) in if pageno < 1 then raise (CannotLoadPdf("Page number should be greater than 0", abspath, pageno)) else match LoadPdf.get_page pdfext (pageno - 1) with | None -> raise (CannotLoadPdf("Invalid page number", abspath, pageno)) | Some((bbox, page)) -> let (key, tag) = generate_tag () in begin Hashtbl.add main_hash_table key (tag, bbox, PDFImage(pdfext, page)); key end let add_image (abspath : abs_path) = let (imgfmt, imgheader) = try Images.file_format (get_abs_path_string abspath) with | Images.Wrong_file_type -> raise (ImageOfWrongFileType(abspath)) | Sys_error(msg) -> raise (CannotLoadImage(msg, abspath)) in let infolst = imgheader.Images.header_infos in let widdots = imgheader.Images.header_width in let hgtdots = imgheader.Images.header_height in Format.printf " ImageHashTable > length of info = % d width = % d , height = % d\n " ( infolst ) widdots hgtdots ; Format.printf "ImageHashTable> length of info = %d width = %d, height = %d\n" (List.length infolst) widdots hgtdots; *) let dpi = match Images.dpi infolst with | Some(dpi) -> dpi | None -> 72. (* -- default dots per inch -- *) in let colormodel = match infolst |> List.fold_left (fun opt info -> match opt with | Some(_) -> opt | None -> match info with | Images.Info_ColorModel(colormodel) -> Some(colormodel) | _ -> opt ) None with | None -> Images.RGB (* when no color model is specified; doubtful implementation *) | Some(colormodel) -> colormodel in let colorspace = match colormodel with | Images.Gray -> Pdf.Name("/DeviceGray") | Images.RGB -> Pdf.Name("/DeviceRGB") | Images.YCbCr -> Pdf.Name("/DeviceRGB") | Images.CMYK -> Logging.warn_cmyk_image abspath; Pdf.Name("/DeviceCMYK") | _ -> raise (UnsupportedColorModel(colormodel, abspath)) in let pdf_points_of_inches inch = 72. *. inch in let wid = pdf_points_of_inches ((float_of_int widdots) /. dpi) in let hgt = pdf_points_of_inches ((float_of_int hgtdots) /. dpi) in let bbox = (0., 0., wid, hgt) in let (key, tag) = generate_tag () in begin Hashtbl.add main_hash_table key (tag, bbox, OtherImage(imgfmt, colorspace, widdots, hgtdots, abspath)); key end let find (key : key) : value = match Hashtbl.find_opt main_hash_table key with | None -> assert false | Some(value) -> value let fold (type a) (f : key -> value -> a -> a) (init : a) : a = Hashtbl.fold f main_hash_table init
null
https://raw.githubusercontent.com/gfngfn/SATySFi/9dbd61df0ab05943b3394830c371e927df45251a/src/backend/imageHashTable.ml
ocaml
-- default dots per inch -- when no color model is specified; doubtful implementation
open MyUtil type tag = string type bbox = float * float * float * float type key = int type value_main = | PDFImage of Pdf.t * Pdfpage.t | OtherImage of Images.format * Pdf.pdfobject * int * int * abs_path type value = tag * bbox * value_main exception CannotLoadPdf of string * abs_path * int exception CannotLoadImage of string * abs_path exception ImageOfWrongFileType of abs_path exception UnsupportedColorModel of Images.colormodel * abs_path let main_hash_table : (key, value) Hashtbl.t = Hashtbl.create 32 let current_id_ref : int ref = ref 0 let initialize () = begin Images.add_methods Images.Jpeg Images.({ check_header = Jpeg.check_header; load = Some(Jpeg.load); save = Some(Jpeg.save); load_sequence = None; save_sequence = None; }); current_id_ref := 0; Hashtbl.clear main_hash_table; end let generate_tag () : key * tag = let n = !current_id_ref in begin incr current_id_ref; (n, "/I" ^ (string_of_int n)) end let add_pdf (abspath : abs_path) (pageno : int) = let pdfext = try Pdfread.pdf_of_file None None (get_abs_path_string abspath) with | Pdf.PDFError(msg) -> raise (CannotLoadPdf(msg, abspath, pageno)) in if pageno < 1 then raise (CannotLoadPdf("Page number should be greater than 0", abspath, pageno)) else match LoadPdf.get_page pdfext (pageno - 1) with | None -> raise (CannotLoadPdf("Invalid page number", abspath, pageno)) | Some((bbox, page)) -> let (key, tag) = generate_tag () in begin Hashtbl.add main_hash_table key (tag, bbox, PDFImage(pdfext, page)); key end let add_image (abspath : abs_path) = let (imgfmt, imgheader) = try Images.file_format (get_abs_path_string abspath) with | Images.Wrong_file_type -> raise (ImageOfWrongFileType(abspath)) | Sys_error(msg) -> raise (CannotLoadImage(msg, abspath)) in let infolst = imgheader.Images.header_infos in let widdots = imgheader.Images.header_width in let hgtdots = imgheader.Images.header_height in Format.printf " ImageHashTable > length of info = % d width = % d , height = % d\n " ( infolst ) widdots hgtdots ; Format.printf "ImageHashTable> length of info = %d width = %d, height = %d\n" (List.length infolst) widdots hgtdots; *) let dpi = match Images.dpi infolst with | Some(dpi) -> dpi in let colormodel = match infolst |> List.fold_left (fun opt info -> match opt with | Some(_) -> opt | None -> match info with | Images.Info_ColorModel(colormodel) -> Some(colormodel) | _ -> opt ) None with | Some(colormodel) -> colormodel in let colorspace = match colormodel with | Images.Gray -> Pdf.Name("/DeviceGray") | Images.RGB -> Pdf.Name("/DeviceRGB") | Images.YCbCr -> Pdf.Name("/DeviceRGB") | Images.CMYK -> Logging.warn_cmyk_image abspath; Pdf.Name("/DeviceCMYK") | _ -> raise (UnsupportedColorModel(colormodel, abspath)) in let pdf_points_of_inches inch = 72. *. inch in let wid = pdf_points_of_inches ((float_of_int widdots) /. dpi) in let hgt = pdf_points_of_inches ((float_of_int hgtdots) /. dpi) in let bbox = (0., 0., wid, hgt) in let (key, tag) = generate_tag () in begin Hashtbl.add main_hash_table key (tag, bbox, OtherImage(imgfmt, colorspace, widdots, hgtdots, abspath)); key end let find (key : key) : value = match Hashtbl.find_opt main_hash_table key with | None -> assert false | Some(value) -> value let fold (type a) (f : key -> value -> a -> a) (init : a) : a = Hashtbl.fold f main_hash_table init
d07738e9312345fe6abeaa28ec7a01cdd7273759b4822e9a36ebfea2109655c4
ThomasHintz/keep-the-records
ping.scm
(use zmq) (include "lookup-port.scm") (define get-ping-address-socket (make-socket 'req)) (connect-socket get-ping-address-socket (string-append "tcp:" lookup-port)) (send-message get-ping-address-socket "ping") (define socket (make-socket 'rep)) (connect-socket socket (string-append "tcp:" (receive-message* get-ping-address-socket))) (define (process) (receive-message* socket) (send-message socket "ping") (process)) (process)
null
https://raw.githubusercontent.com/ThomasHintz/keep-the-records/c20e648e831bed2ced3f2f74bfc590dc06b5f076/services/ping.scm
scheme
(use zmq) (include "lookup-port.scm") (define get-ping-address-socket (make-socket 'req)) (connect-socket get-ping-address-socket (string-append "tcp:" lookup-port)) (send-message get-ping-address-socket "ping") (define socket (make-socket 'rep)) (connect-socket socket (string-append "tcp:" (receive-message* get-ping-address-socket))) (define (process) (receive-message* socket) (send-message socket "ping") (process)) (process)
72aaf1e5e97c97cd2690f33efccf5f67e63d346a1d88d8c9b709f37bcabb2ca9
bef/erlswf
ngram.erl
%% %% @doc functions for n-gram analysis %% -module(ngram). -export([ ngram/2, ngram/3, ngramfold/2, ngramfold/3, cut_profile/2, intersect_profiles/2, expand_profiles/2, distance/2, simplified_distance/2, common_distance/2, save_profile/2, load_profile/1, merge_profiles/2, merge_profiles/1, incr_pl/3 ]). -include("ngram.hrl"). %% @doc generate n-gram profile From a list L of elements count each N successive elements . PL is the accumulator / starting point . L with ) & lt ; N are being silently ignored . %% @spec ngram(integer(), profile(), list()) -> profile() ngram(N, PL, L) when length(L) < N -> PL; ngram(N, PL, [_|R]=L) -> {Key,_} = lists:split(N, L), PL2 = incr_pl(Key, PL, 1), io : format("~p ~p ~p ~ n " , [ length(PL2 ) , Key , ) ] ) , ngram(N, PL2, R). %% @doc same as ngram(N, [], L) ngram(N, L) -> ngram(N, [], L). %% @doc increase integer value of property list entry by I incr_pl(Key, PL, I) -> Val = proplists:get_value(Key, PL, 0), PL2 = proplists:delete(Key, PL), P = proplists:property(Key, Val+I), [P|PL2]. %% @doc generate n-gram from a list of lists ngramfold(N, LL) -> ngramfold(N, [], LL). ngramfold(N, PL0, LL) -> io : ~ n " , [ length(LL ) ] ) , lists:foldl(fun(L, PL) -> ngram(N, PL, L) end, PL0, LL). %% @doc limit n-gram profile to the Length cut_profile(Length, P) when length(P) < Length -> P; cut_profile(Length, P) -> {SP, _} = lists:split(Length, lists:reverse(lists:keysort(2, P))), SP. @doc calculate P1 cap P2 / all common keys of two profiles %% the resulting profiles are of equal length %% @spec intersect_profiles(profile(), profile()) -> {profile(), profile()} intersect_profiles(P1, P2) -> P1out = lists:filter(fun({K,_}) -> lists:keymember(K, 1, P2) end, P1), P2out = lists:filter(fun({K,_}) -> lists:keymember(K, 1, P1out) end, P2), {P1out, P2out}. @doc calculate P1 cup P2 / add missing keys with value 0 %% the resulting profiles are of equal length @spec expand_profiles(profile ( ) , profile ( ) ) - > { profile ( ) , profile ( ) } expand_profiles(P1, P2) -> P1a = [{K,0} || {K,_} <- lists:filter(fun({K,_}) -> not lists:keymember(K, 1, P2) end, P1)], P2a = [{K,0} || {K,_} <- lists:filter(fun({K,_}) -> not lists:keymember(K, 1, P1) end, P2)], {lists:append(P1, P2a), lists:append(P2, P1a)}. %% @doc calculate distance between profiles. %% profiles must be of equal length and contain the same keys @spec distance(profile ( ) , profile ( ) ) - > ( ) distance(P1, P2) when length(P1) =:= length(P2) -> Ndist = lists:map(fun({K, V1}) -> {value, {K, V2}} = lists:keysearch(K, 1, P2), X = (2 * (V1-V2)) / (V1 + V2), X*X end, P1), lists:sum(Ndist). @doc simplified profile intersection ( SPI ) distance . %% intersect, then calculate distance simplified_distance(profile ( ) , profile ( ) ) - > { IntersectionLength::integer ( ) , ( ) } simplified_distance(P1, P2) -> {P1a, P2a} = intersect_profiles(P1, P2), Ndist = distance(P1a, P2a), {length(P1a), Ndist}. %% @doc common distance %% expand profiles, then calculate distance @spec common_distance(profile ( ) , profile ( ) ) - > { IntersectionLength::integer ( ) , ( ) } common_distance(P1, P2) -> {P1a, P2a} = expand_profiles(P1, P2), Ndist = distance(P1a, P2a), {length(P1a), Ndist}. @doc merge two profiles %% @spec merge_profiles(profile(), profile()) -> profile() merge_profiles(P1, P2) -> lists:foldl(fun({K, V}, Acc) -> incr_pl(K, Acc, V) end, P1, P2). %% @doc merge many profiles ( ) ] ) - > profile ( ) merge_profiles([]) -> []; merge_profiles([P1|L]) -> lists:foldl(fun(P, Acc) -> merge_profiles(Acc, P) end, P1, L). save_profile(Filename, #ngramprofile{}=P) -> file:write_file(Filename, term_to_binary(P)). load_profile(Filename) -> {ok, B} = file:read_file(Filename), binary_to_term(B).
null
https://raw.githubusercontent.com/bef/erlswf/1397591d012aaa020f9ffc0ecd5436e65814e668/src/ngram.erl
erlang
@doc functions for n-gram analysis @doc generate n-gram profile @spec ngram(integer(), profile(), list()) -> profile() @doc same as ngram(N, [], L) @doc increase integer value of property list entry by I @doc generate n-gram from a list of lists @doc limit n-gram profile to the Length the resulting profiles are of equal length @spec intersect_profiles(profile(), profile()) -> {profile(), profile()} the resulting profiles are of equal length @doc calculate distance between profiles. profiles must be of equal length and contain the same keys intersect, then calculate distance @doc common distance expand profiles, then calculate distance @spec merge_profiles(profile(), profile()) -> profile() @doc merge many profiles
-module(ngram). -export([ ngram/2, ngram/3, ngramfold/2, ngramfold/3, cut_profile/2, intersect_profiles/2, expand_profiles/2, distance/2, simplified_distance/2, common_distance/2, save_profile/2, load_profile/1, merge_profiles/2, merge_profiles/1, incr_pl/3 ]). -include("ngram.hrl"). From a list L of elements count each N successive elements . PL is the accumulator / starting point . L with ) & lt ; N are being silently ignored . ngram(N, PL, L) when length(L) < N -> PL; ngram(N, PL, [_|R]=L) -> {Key,_} = lists:split(N, L), PL2 = incr_pl(Key, PL, 1), io : format("~p ~p ~p ~ n " , [ length(PL2 ) , Key , ) ] ) , ngram(N, PL2, R). ngram(N, L) -> ngram(N, [], L). incr_pl(Key, PL, I) -> Val = proplists:get_value(Key, PL, 0), PL2 = proplists:delete(Key, PL), P = proplists:property(Key, Val+I), [P|PL2]. ngramfold(N, LL) -> ngramfold(N, [], LL). ngramfold(N, PL0, LL) -> io : ~ n " , [ length(LL ) ] ) , lists:foldl(fun(L, PL) -> ngram(N, PL, L) end, PL0, LL). cut_profile(Length, P) when length(P) < Length -> P; cut_profile(Length, P) -> {SP, _} = lists:split(Length, lists:reverse(lists:keysort(2, P))), SP. @doc calculate P1 cap P2 / all common keys of two profiles intersect_profiles(P1, P2) -> P1out = lists:filter(fun({K,_}) -> lists:keymember(K, 1, P2) end, P1), P2out = lists:filter(fun({K,_}) -> lists:keymember(K, 1, P1out) end, P2), {P1out, P2out}. @doc calculate P1 cup P2 / add missing keys with value 0 @spec expand_profiles(profile ( ) , profile ( ) ) - > { profile ( ) , profile ( ) } expand_profiles(P1, P2) -> P1a = [{K,0} || {K,_} <- lists:filter(fun({K,_}) -> not lists:keymember(K, 1, P2) end, P1)], P2a = [{K,0} || {K,_} <- lists:filter(fun({K,_}) -> not lists:keymember(K, 1, P1) end, P2)], {lists:append(P1, P2a), lists:append(P2, P1a)}. @spec distance(profile ( ) , profile ( ) ) - > ( ) distance(P1, P2) when length(P1) =:= length(P2) -> Ndist = lists:map(fun({K, V1}) -> {value, {K, V2}} = lists:keysearch(K, 1, P2), X = (2 * (V1-V2)) / (V1 + V2), X*X end, P1), lists:sum(Ndist). @doc simplified profile intersection ( SPI ) distance . simplified_distance(profile ( ) , profile ( ) ) - > { IntersectionLength::integer ( ) , ( ) } simplified_distance(P1, P2) -> {P1a, P2a} = intersect_profiles(P1, P2), Ndist = distance(P1a, P2a), {length(P1a), Ndist}. @spec common_distance(profile ( ) , profile ( ) ) - > { IntersectionLength::integer ( ) , ( ) } common_distance(P1, P2) -> {P1a, P2a} = expand_profiles(P1, P2), Ndist = distance(P1a, P2a), {length(P1a), Ndist}. @doc merge two profiles merge_profiles(P1, P2) -> lists:foldl(fun({K, V}, Acc) -> incr_pl(K, Acc, V) end, P1, P2). ( ) ] ) - > profile ( ) merge_profiles([]) -> []; merge_profiles([P1|L]) -> lists:foldl(fun(P, Acc) -> merge_profiles(Acc, P) end, P1, L). save_profile(Filename, #ngramprofile{}=P) -> file:write_file(Filename, term_to_binary(P)). load_profile(Filename) -> {ok, B} = file:read_file(Filename), binary_to_term(B).
095758ed4428b78a3eca9277b14a021d49cf14234fc8c4f9c63a30ea2859256c
mzp/coq-for-ipad
mkcamlp4.ml
(****************************************************************************) (* *) (* Objective Caml *) (* *) (* INRIA Rocquencourt *) (* *) Copyright 2006 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the GNU Library General Public License , with the special (* exception on linking described in LICENSE at the top of the Objective *) (* Caml source tree. *) (* *) (****************************************************************************) Authors : * - Daniel de Rauglaudre : initial shell version * - : rewriting in OCaml * - Daniel de Rauglaudre: initial shell version * - Nicolas Pouillard: rewriting in OCaml *) open Camlp4; open Camlp4_config; open Filename; open Format; value (interfaces, options, includes) = let rec self (interf, opts, incl) = fun [ [] -> (List.rev interf, List.rev opts, List.rev incl) | ["-I"; dir :: args] -> self (interf, opts, [dir; "-I" :: incl]) args | ["-version" :: _] -> do { printf "mkcamlp4, version %s@." version; exit 0 } | ["-vnum" :: _] -> do { printf "%s@." version; exit 0 } | [ arg :: args ] when check_suffix arg ".cmi" -> let basename = String.capitalize (Filename.chop_suffix (Filename.basename arg) ".cmi") in self ([ basename :: interf ], opts, incl) args | [ arg :: args ] -> self (interf, [ arg :: opts ], incl) args ] in self ([], [], ["."; "-I"]) (List.tl (Array.to_list Sys.argv)); value run l = let cmd = String.concat " " l in let () = Format.printf "%s@." cmd in let st = Sys.command cmd 0 in if st <> 0 then failwith ("Exit: " ^ string_of_int st) else (); value crc_ml = Filename.temp_file "crc_" ".ml"; value crc = Filename.chop_suffix crc_ml ".ml"; value clean () = run ["rm"; "-f"; crc_ml; crc^".cmi"; crc^".cmo"]; try do { run ([ocaml_standard_library^"/extract_crc"; "-I"; camlp4_standard_library] @ includes @ interfaces @ [">"; crc_ml]); let cout = open_out_gen [Open_wronly; Open_append; Open_text] 0o666 crc_ml in do { output_string cout "let _ = Dynlink.add_available_units crc_unit_list\n"; close_out cout }; run (["ocamlc"; "-I"; camlp4_standard_library; "camlp4lib.cma"; crc_ml] @ includes @ options @ ["Camlp4Bin.cmo"; "-linkall"]); clean(); } with exc -> do { clean (); raise exc };
null
https://raw.githubusercontent.com/mzp/coq-for-ipad/4fb3711723e2581a170ffd734e936f210086396e/src/ocaml-3.12.0/camlp4/mkcamlp4.ml
ocaml
************************************************************************** Objective Caml INRIA Rocquencourt exception on linking described in LICENSE at the top of the Objective Caml source tree. **************************************************************************
Copyright 2006 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the GNU Library General Public License , with the special Authors : * - Daniel de Rauglaudre : initial shell version * - : rewriting in OCaml * - Daniel de Rauglaudre: initial shell version * - Nicolas Pouillard: rewriting in OCaml *) open Camlp4; open Camlp4_config; open Filename; open Format; value (interfaces, options, includes) = let rec self (interf, opts, incl) = fun [ [] -> (List.rev interf, List.rev opts, List.rev incl) | ["-I"; dir :: args] -> self (interf, opts, [dir; "-I" :: incl]) args | ["-version" :: _] -> do { printf "mkcamlp4, version %s@." version; exit 0 } | ["-vnum" :: _] -> do { printf "%s@." version; exit 0 } | [ arg :: args ] when check_suffix arg ".cmi" -> let basename = String.capitalize (Filename.chop_suffix (Filename.basename arg) ".cmi") in self ([ basename :: interf ], opts, incl) args | [ arg :: args ] -> self (interf, [ arg :: opts ], incl) args ] in self ([], [], ["."; "-I"]) (List.tl (Array.to_list Sys.argv)); value run l = let cmd = String.concat " " l in let () = Format.printf "%s@." cmd in let st = Sys.command cmd 0 in if st <> 0 then failwith ("Exit: " ^ string_of_int st) else (); value crc_ml = Filename.temp_file "crc_" ".ml"; value crc = Filename.chop_suffix crc_ml ".ml"; value clean () = run ["rm"; "-f"; crc_ml; crc^".cmi"; crc^".cmo"]; try do { run ([ocaml_standard_library^"/extract_crc"; "-I"; camlp4_standard_library] @ includes @ interfaces @ [">"; crc_ml]); let cout = open_out_gen [Open_wronly; Open_append; Open_text] 0o666 crc_ml in do { output_string cout "let _ = Dynlink.add_available_units crc_unit_list\n"; close_out cout }; run (["ocamlc"; "-I"; camlp4_standard_library; "camlp4lib.cma"; crc_ml] @ includes @ options @ ["Camlp4Bin.cmo"; "-linkall"]); clean(); } with exc -> do { clean (); raise exc };
acaa69c31090238f3b528ffc3b17bcb4f24c514b5c21a42f47ace2a1bb0396f9
haskell/cabal
Signal.hs
# LANGUAGE CPP # module Distribution.Client.Signal ( installTerminationHandler , Terminated(..) ) where import qualified Control.Exception as Exception #ifndef mingw32_HOST_OS import Control.Concurrent (myThreadId) import Control.Monad (void) import qualified System.Posix.Signals as Signals #endif -- | Terminated is an asynchronous exception, thrown when SIGTERM is received . It 's to ' kill ' what ' ' -- is to Ctrl-C. data Terminated = Terminated instance Exception.Exception Terminated where toException = Exception.asyncExceptionToException fromException = Exception.asyncExceptionFromException instance Show Terminated where show Terminated = "terminated" -- | Install a signal handler that initiates a controlled shutdown on receiving SIGTERM by throwing an asynchronous exception at the main thread . Must be -- called from the main thread. -- It is a noop on Windows . -- installTerminationHandler :: IO () #ifdef mingw32_HOST_OS installTerminationHandler = return () #else installTerminationHandler = do mainThreadId <- myThreadId void $ Signals.installHandler Signals.sigTERM (Signals.CatchOnce $ Exception.throwTo mainThreadId Terminated) Nothing #endif
null
https://raw.githubusercontent.com/haskell/cabal/55e036a2c40586bc0f69aaa7d85aab931a0a5c80/cabal-install/src/Distribution/Client/Signal.hs
haskell
| Terminated is an asynchronous exception, thrown when is to Ctrl-C. | Install a signal handler that initiates a controlled shutdown on receiving called from the main thread.
# LANGUAGE CPP # module Distribution.Client.Signal ( installTerminationHandler , Terminated(..) ) where import qualified Control.Exception as Exception #ifndef mingw32_HOST_OS import Control.Concurrent (myThreadId) import Control.Monad (void) import qualified System.Posix.Signals as Signals #endif SIGTERM is received . It 's to ' kill ' what ' ' data Terminated = Terminated instance Exception.Exception Terminated where toException = Exception.asyncExceptionToException fromException = Exception.asyncExceptionFromException instance Show Terminated where show Terminated = "terminated" SIGTERM by throwing an asynchronous exception at the main thread . Must be It is a noop on Windows . installTerminationHandler :: IO () #ifdef mingw32_HOST_OS installTerminationHandler = return () #else installTerminationHandler = do mainThreadId <- myThreadId void $ Signals.installHandler Signals.sigTERM (Signals.CatchOnce $ Exception.throwTo mainThreadId Terminated) Nothing #endif
568cdf44357322654da4158fc12b0731af1aeb0354f8b3ed76c3169b0f80aa14
vmchale/kempe
CGen.hs
module Kempe.CGen ( cGen ) where import Data.Maybe (mapMaybe) import Kempe.AST import Kempe.Name import Language.C.AST cGen :: Declarations a c (StackType ()) -> [CFunc] cGen = mapMaybe cDecl cDecl :: KempeDecl a c (StackType ()) -> Maybe CFunc cDecl ExtFnDecl{} = Nothing cDecl TyDecl{} = Nothing cDecl FunDecl{} = Nothing cDecl (Export _ Cabi (Name n _ (StackType [] []))) = Just (CFunc n [CVoid] CVoid) cDecl (Export _ Cabi (Name n _ (StackType [] [o]))) = Just (CFunc n [CVoid] (kempeTyToCType o)) cDecl (Export _ Cabi (Name n _ (StackType ins []))) = Just (CFunc n (kempeTyToCType <$> ins) CVoid) cDecl (Export _ Cabi (Name n _ (StackType ins [o]))) = Just (CFunc n (kempeTyToCType <$> ins) (kempeTyToCType o)) cDecl (Export _ Cabi _) = error "Multiple return not suppported :(" cDecl (Export _ ArmAbi (Name n _ (StackType [] []))) = Just (CFunc n [CVoidPtr] CVoid) cDecl (Export _ ArmAbi (Name n _ (StackType [] [o]))) = Just (CFunc n [CVoidPtr] (kempeTyToCType o)) cDecl (Export _ ArmAbi (Name n _ (StackType ins []))) = Just (CFunc n (CVoidPtr : fmap kempeTyToCType ins) CVoid) cDecl (Export _ ArmAbi (Name n _ (StackType ins [o]))) = Just (CFunc n (CVoidPtr : fmap kempeTyToCType ins) (kempeTyToCType o)) cDecl (Export _ ArmAbi _) = error "Multiple return not suppported :(" cDecl (Export _ Hooked (Name n _ _)) = Just (CFunc n [CVoidPtr] CVoid) cDecl (Export _ Kabi _) = error "You probably don't want to do this." kempeTyToCType :: KempeTy a -> CType kempeTyToCType (TyBuiltin _ TyInt) = CInt kempeTyToCType (TyBuiltin _ TyBool) = CBool kempeTyToCType (TyBuiltin _ TyWord) = CUInt64 kempeTyToCType (TyBuiltin _ TyInt8) = CInt8 kempeTyToCType TyVar{} = error "Don't do that" kempeTyToCType TyApp{} = error "User-defined types cannot be exported :(" kempeTyToCType TyNamed{} = error "User-defined types cannot be exported :("
null
https://raw.githubusercontent.com/vmchale/kempe/23d59cb9343902aae33140e2b68ac0e4ab0a60a0/src/Kempe/CGen.hs
haskell
module Kempe.CGen ( cGen ) where import Data.Maybe (mapMaybe) import Kempe.AST import Kempe.Name import Language.C.AST cGen :: Declarations a c (StackType ()) -> [CFunc] cGen = mapMaybe cDecl cDecl :: KempeDecl a c (StackType ()) -> Maybe CFunc cDecl ExtFnDecl{} = Nothing cDecl TyDecl{} = Nothing cDecl FunDecl{} = Nothing cDecl (Export _ Cabi (Name n _ (StackType [] []))) = Just (CFunc n [CVoid] CVoid) cDecl (Export _ Cabi (Name n _ (StackType [] [o]))) = Just (CFunc n [CVoid] (kempeTyToCType o)) cDecl (Export _ Cabi (Name n _ (StackType ins []))) = Just (CFunc n (kempeTyToCType <$> ins) CVoid) cDecl (Export _ Cabi (Name n _ (StackType ins [o]))) = Just (CFunc n (kempeTyToCType <$> ins) (kempeTyToCType o)) cDecl (Export _ Cabi _) = error "Multiple return not suppported :(" cDecl (Export _ ArmAbi (Name n _ (StackType [] []))) = Just (CFunc n [CVoidPtr] CVoid) cDecl (Export _ ArmAbi (Name n _ (StackType [] [o]))) = Just (CFunc n [CVoidPtr] (kempeTyToCType o)) cDecl (Export _ ArmAbi (Name n _ (StackType ins []))) = Just (CFunc n (CVoidPtr : fmap kempeTyToCType ins) CVoid) cDecl (Export _ ArmAbi (Name n _ (StackType ins [o]))) = Just (CFunc n (CVoidPtr : fmap kempeTyToCType ins) (kempeTyToCType o)) cDecl (Export _ ArmAbi _) = error "Multiple return not suppported :(" cDecl (Export _ Hooked (Name n _ _)) = Just (CFunc n [CVoidPtr] CVoid) cDecl (Export _ Kabi _) = error "You probably don't want to do this." kempeTyToCType :: KempeTy a -> CType kempeTyToCType (TyBuiltin _ TyInt) = CInt kempeTyToCType (TyBuiltin _ TyBool) = CBool kempeTyToCType (TyBuiltin _ TyWord) = CUInt64 kempeTyToCType (TyBuiltin _ TyInt8) = CInt8 kempeTyToCType TyVar{} = error "Don't do that" kempeTyToCType TyApp{} = error "User-defined types cannot be exported :(" kempeTyToCType TyNamed{} = error "User-defined types cannot be exported :("
8acc05150f5a3df9f3ca92c44a16cc4db0f31d622885564f3a3a6fcfb09f04f7
bugczw/Introduction-to-Functional-Programming-in-OCaml
W6_S2.ml
REMOVE ELEMENTS FROM DICTIONARIES ( 20/20 points ) The following code is the program explained during the video sequence except that we have modified the interface a little bit . Now , it is possible to remove a key from a dictionary . Update the code to have it accepted by the type - checker . THE GIVEN PRELUDE module type = sig type ( ' key , ' value ) t val empty : ( ' key , ' value ) t val add : ( ' key , ' value ) t - > ' key - > ' value - > ( ' key , ' value ) t exception NotFound val lookup : ( ' key , ' value ) t - > ' key - > ' value val remove : ( ' key , ' value ) t - > ' key - > ( ' key , ' value ) t end ; ; REMOVE ELEMENTS FROM DICTIONARIES (20/20 points) The following code is the program explained during the video sequence except that we have modified the interface DictSig a little bit. Now, it is possible to remove a key from a dictionary. Update the code to have it accepted by the type-checker. THE GIVEN PRELUDE module type DictSig = sig type ('key, 'value) t val empty : ('key, 'value) t val add : ('key, 'value) t -> 'key -> 'value -> ('key, 'value) t exception NotFound val lookup : ('key, 'value) t -> 'key -> 'value val remove : ('key, 'value) t -> 'key -> ('key, 'value) t end ;; *) module Dict : DictSig = struct type ('key, 'value) t = | Empty | Node of ('key, 'value) t * 'key * 'value * ('key, 'value) t let empty = Empty let rec add d k v = match d with | Empty -> Node (Empty, k, v, Empty) | Node (l, k', v', r) -> if k = k' then Node (l, k, v, r) else if k < k' then Node (add l k v, k', v', r) else Node (l, k', v', add r k v) exception NotFound let rec lookup d k = match d with | Empty -> raise NotFound | Node (l, k', v', r) -> if k = k' then v' else if k < k' then lookup l k else lookup r k let rec append d1 d2 = match d2 with | Empty -> d1 | Node (l, k, v, r) -> append (append (add d1 k v) l) r let rec remove d k = match d with | Empty -> Empty | Node (l, k', v', r) -> if k = k' then append l r else Node ((remove l k), k', v', (remove r k)) end ;;
null
https://raw.githubusercontent.com/bugczw/Introduction-to-Functional-Programming-in-OCaml/13c4d1f92e7479f8eb10ea5d4c43a598b6676d0f/OCaml_MOOC_W6_ALL/Exercise/W6_S2.ml
ocaml
REMOVE ELEMENTS FROM DICTIONARIES ( 20/20 points ) The following code is the program explained during the video sequence except that we have modified the interface a little bit . Now , it is possible to remove a key from a dictionary . Update the code to have it accepted by the type - checker . THE GIVEN PRELUDE module type = sig type ( ' key , ' value ) t val empty : ( ' key , ' value ) t val add : ( ' key , ' value ) t - > ' key - > ' value - > ( ' key , ' value ) t exception NotFound val lookup : ( ' key , ' value ) t - > ' key - > ' value val remove : ( ' key , ' value ) t - > ' key - > ( ' key , ' value ) t end ; ; REMOVE ELEMENTS FROM DICTIONARIES (20/20 points) The following code is the program explained during the video sequence except that we have modified the interface DictSig a little bit. Now, it is possible to remove a key from a dictionary. Update the code to have it accepted by the type-checker. THE GIVEN PRELUDE module type DictSig = sig type ('key, 'value) t val empty : ('key, 'value) t val add : ('key, 'value) t -> 'key -> 'value -> ('key, 'value) t exception NotFound val lookup : ('key, 'value) t -> 'key -> 'value val remove : ('key, 'value) t -> 'key -> ('key, 'value) t end ;; *) module Dict : DictSig = struct type ('key, 'value) t = | Empty | Node of ('key, 'value) t * 'key * 'value * ('key, 'value) t let empty = Empty let rec add d k v = match d with | Empty -> Node (Empty, k, v, Empty) | Node (l, k', v', r) -> if k = k' then Node (l, k, v, r) else if k < k' then Node (add l k v, k', v', r) else Node (l, k', v', add r k v) exception NotFound let rec lookup d k = match d with | Empty -> raise NotFound | Node (l, k', v', r) -> if k = k' then v' else if k < k' then lookup l k else lookup r k let rec append d1 d2 = match d2 with | Empty -> d1 | Node (l, k, v, r) -> append (append (add d1 k v) l) r let rec remove d k = match d with | Empty -> Empty | Node (l, k', v', r) -> if k = k' then append l r else Node ((remove l k), k', v', (remove r k)) end ;;
05a72fb03dc68e09c2a978a5950f772f6860f081698a310b36136e4c03c252d5
denisshevchenko/circlehs
Types.hs
| Module : Network . CircleCI.Common . Types Copyright : ( c ) , 2016 License : MIT Maintainer : Stability : alpha Common types for work with CircleCI API . Module : Network.CircleCI.Common.Types Copyright : (c) Denis Shevchenko, 2016 License : MIT Maintainer : Stability : alpha Common types for work with CircleCI API. -} module Network.CircleCI.Common.Types ( AccountAPIToken (..) , Token , UserName , ProjectName , BranchName , BuildNumber (..) , Email , CircleCIResponse , ProjectPoint (..) , ErrorMessage ) where import Servant.Client import Data.Text ( Text ) import Control.Monad.Reader -- import CircleCI.Common.Run -- | CircleCI account API token. List of account API tokens can be found at . newtype AccountAPIToken = AccountAPIToken Token -- | API token as text, for Servant. type Token = Text -- | GitHub user name. type UserName = Text -- | GitHub project name. type ProjectName = Text -- | GitHub branch name. type BranchName = Text -- | Number of project's build on CircleCI. newtype BuildNumber = BuildNumber Int deriving (Eq, Show) -- | User email address. type Email = Text -- | Monad for response from CircleCI. type CircleCIResponse a = ReaderT AccountAPIToken IO (Either ServantError a) -- | GitHub project identifier, composed from user name and project name. data ProjectPoint = ProjectPoint ^ GitHub user name . , projectName :: ProjectName -- ^ GitHub project name. } -- | Message about some problem. type ErrorMessage = Text
null
https://raw.githubusercontent.com/denisshevchenko/circlehs/0c01693723a234bb46ff8d1e6e114cce91dfa032/src/Network/CircleCI/Common/Types.hs
haskell
import CircleCI.Common.Run | CircleCI account API token. List of account API tokens can be found at . | API token as text, for Servant. | GitHub user name. | GitHub project name. | GitHub branch name. | Number of project's build on CircleCI. | User email address. | Monad for response from CircleCI. | GitHub project identifier, composed from user name and project name. ^ GitHub project name. | Message about some problem.
| Module : Network . CircleCI.Common . Types Copyright : ( c ) , 2016 License : MIT Maintainer : Stability : alpha Common types for work with CircleCI API . Module : Network.CircleCI.Common.Types Copyright : (c) Denis Shevchenko, 2016 License : MIT Maintainer : Stability : alpha Common types for work with CircleCI API. -} module Network.CircleCI.Common.Types ( AccountAPIToken (..) , Token , UserName , ProjectName , BranchName , BuildNumber (..) , Email , CircleCIResponse , ProjectPoint (..) , ErrorMessage ) where import Servant.Client import Data.Text ( Text ) import Control.Monad.Reader newtype AccountAPIToken = AccountAPIToken Token type Token = Text type UserName = Text type ProjectName = Text type BranchName = Text newtype BuildNumber = BuildNumber Int deriving (Eq, Show) type Email = Text type CircleCIResponse a = ReaderT AccountAPIToken IO (Either ServantError a) data ProjectPoint = ProjectPoint ^ GitHub user name . } type ErrorMessage = Text
e635c7994a35322e840c9b5929580241c30ac3486fa37f0e83e4bcf1b0737276
travelping/eradius
eradius_client.erl
%% @doc This module contains a RADIUS client that can be used to send authentication and accounting requests. A counter is kept for every NAS in order to determine the next request i d and sender port %% for each outgoing request. The implementation naively assumes that you won't send requests to a distinct number of over the lifetime of the VM , which is why the counters are not garbage - collected . %% %% The client uses OS-assigned ports. The maximum number of open ports can be specified through the %% ``client_ports'' application environment variable, it defaults to ``20''. The number of ports should not be set too low . If ` ` N '' ports are opened , the maximum number of concurrent requests is ` ` N * 256 '' . %% The IP address used to send requests is read < emph > once</emph > ( at startup ) from the ` ` client_ip '' %% parameter. Changing it currently requires a restart. It can be given as a string or ip address tuple, %% or the atom ``undefined'' (the default), which uses whatever address the OS selects. -module(eradius_client). -export([start_link/0, send_request/2, send_request/3, send_remote_request/3, send_remote_request/4]). %% internal -export([reconfigure/0, send_remote_request_loop/8, find_suitable_peer/1, restore_upstream_server/1, store_radius_server_from_pool/3, init_server_status_metrics/0]). -behaviour(gen_server). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -import(eradius_lib, [printable_peer/2]). -include_lib("stdlib/include/ms_transform.hrl"). -include_lib("kernel/include/logger.hrl"). -include("eradius_dict.hrl"). -include("eradius_lib.hrl"). -define(SERVER, ?MODULE). -define(DEFAULT_RETRIES, 3). -define(DEFAULT_TIMEOUT, 5000). -define(RECONFIGURE_TIMEOUT, 15000). -define(GOOD_CMD(Req), (Req#radius_request.cmd == 'request' orelse Req#radius_request.cmd == 'accreq' orelse Req#radius_request.cmd == 'coareq' orelse Req#radius_request.cmd == 'discreq')). -type nas_address() :: {string() | binary() | inet:ip_address(), eradius_server:port_number(), eradius_lib:secret()}. -type options() :: [{retries, pos_integer()} | {timeout, timeout()} | {server_name, atom()} | {metrics_info, {atom(), atom(), atom()}}]. -export_type([nas_address/0, options/0]). -include_lib("kernel/include/inet.hrl"). %% ------------------------------------------------------------------------------------------ %% -- API @private start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). % @equiv send_request(NAS, Request, []) -spec send_request(nas_address(), #radius_request{}) -> {ok, binary()} | {error, 'timeout' | 'socket_down'}. send_request(NAS, Request) -> send_request(NAS, Request, []). @doc Send a radius request to the given NAS . % If no answer is received within the specified timeout, the request will be sent again. -spec send_request(nas_address(), #radius_request{}, options()) -> {ok, binary(), eradius_lib:authenticator()} | {error, 'timeout' | 'socket_down'}. send_request({Host, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_binary(Host) -> send_request({erlang:binary_to_list(Host), Port, Secret}, Request, Options); send_request({Host, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_list(Host) -> IP = get_ip(Host), send_request({IP, Port, Secret}, Request, Options); send_request({IP, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_tuple(IP) -> TS1 = erlang:monotonic_time(), ServerName = proplists:get_value(server_name, Options, undefined), MetricsInfo = make_metrics_info(Options, {IP, Port}), Retries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), Timeout = proplists:get_value(timeout, Options, ?DEFAULT_TIMEOUT), SendReqFn = fun () -> Peer = {ServerName, {IP, Port}}, update_client_requests(MetricsInfo), {Socket, ReqId} = gen_server:call(?SERVER, {wanna_send, Peer, MetricsInfo}), Response = send_request_loop(Socket, ReqId, Peer, Request#radius_request{reqid = ReqId, secret = Secret}, Retries, Timeout, MetricsInfo), proceed_response(Request, Response, Peer, TS1, MetricsInfo, Options) end, % If we have other RADIUS upstream servers check current one, % maybe it is already marked as inactive and try to find another one case proplists:get_value(failover, Options, []) of [] -> SendReqFn(); UpstreamServers -> case find_suitable_peer([{IP, Port, Secret} | UpstreamServers]) of [] -> no_active_servers; {{IP, Port, Secret}, _NewPool} -> SendReqFn(); {NewPeer, []} -> % Special case, we don't have servers in the pool anymore, but we need % to preserve `failover` option to mark current server as inactive if % it will fail NewOptions = lists:keyreplace(failover, 1, Options, {failover, undefined}), send_request(NewPeer, Request, NewOptions); {NewPeer, NewPool} -> % current server is not in list of active servers, so use another one NewOptions = lists:keyreplace(failover, 1, Options, {failover, NewPool}), send_request(NewPeer, Request, NewOptions) end end; send_request({_IP, _Port, _Secret}, _Request, _Options) -> error(badarg). @equiv send_remote_request(Node , NAS , Request , [ ] ) -spec send_remote_request(node(), nas_address(), #radius_request{}) -> {ok, binary()} | {error, 'timeout' | 'node_down' | 'socket_down'}. send_remote_request(Node, NAS, Request) -> send_remote_request(Node, NAS, Request, []). @doc Send a radius request to the given NAS through a socket on the specified node . % If no answer is received within the specified timeout, the request will be sent again. % The request will not be sent again if the remote node is unreachable. -spec send_remote_request(node(), nas_address(), #radius_request{}, options()) -> {ok, binary()} | {error, 'timeout' | 'node_down' | 'socket_down'}. send_remote_request(Node, {IP, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) -> TS1 = erlang:monotonic_time(), ServerName = proplists:get_value(server_name, Options, undefined), MetricsInfo = make_metrics_info(Options, {IP, Port}), update_client_requests(MetricsInfo), Peer = {ServerName, {IP, Port}}, try gen_server:call({?SERVER, Node}, {wanna_send, Peer, MetricsInfo}) of {Socket, ReqId} -> Request1 = case eradius_node_mon:get_remote_version(Node) of {0, Minor} when Minor < 6 -> {_, EncRequest} = eradius_lib:encode_request(Request#radius_request{reqid = ReqId, secret = Secret}), EncRequest; _ -> Request#radius_request{reqid = ReqId, secret = Secret} end, Retries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), Timeout = proplists:get_value(timeout, Options, ?DEFAULT_TIMEOUT), SenderPid = spawn(Node, ?MODULE, send_remote_request_loop, [self(), Socket, ReqId, Peer, Request1, Retries, Timeout, MetricsInfo]), SenderMonitor = monitor(process, SenderPid), Response = receive {SenderPid, Result} -> erlang:demonitor(SenderMonitor, [flush]), Result; {'DOWN', SenderMonitor, process, SenderPid, _Reason} -> {error, socket_down} end, proceed_response(Request, Response, Peer, TS1, MetricsInfo, Options) catch exit:{{nodedown, Node}, _} -> {error, node_down} end; send_remote_request(_Node, {_IP, _Port, _Secret}, _Request, _Options) -> error(badarg). restore_upstream_server({ServerIP, Port, Retries, InitialRetries}) -> ets:insert(?MODULE, {{ServerIP, Port}, Retries, InitialRetries}). proceed_response(Request, {ok, Response, Secret, Authenticator}, _Peer = {_ServerName, {ServerIP, Port}}, TS1, MetricsInfo, Options) -> update_client_request(Request#radius_request.cmd, MetricsInfo, erlang:monotonic_time() - TS1, Request), update_client_responses(MetricsInfo), case eradius_lib:decode_request(Response, Secret, Authenticator) of {bad_pdu, "Message-Authenticator Attribute is invalid" = Reason} -> update_client_response(bad_authenticator, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, "Authenticator Attribute is invalid" = Reason} -> update_client_response(bad_authenticator, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, "unknown request type" = Reason} -> update_client_response(unknown_req_type, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, Reason} -> update_client_response(dropped, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), maybe_failover(Request, noreply, {ServerIP, Port}, Options); Decoded -> update_server_status_metric(ServerIP, Port, true, Options), update_client_response(Decoded#radius_request.cmd, MetricsInfo, Request), {ok, Response, Authenticator} end; proceed_response(Request, Response, {_ServerName, {ServerIP, Port}}, TS1, MetricsInfo, Options) -> update_client_responses(MetricsInfo), update_client_request(Request#radius_request.cmd, MetricsInfo, erlang:monotonic_time() - TS1, Request), maybe_failover(Request, Response, {ServerIP, Port}, Options). maybe_failover(Request, Response, {ServerIP, Port}, Options) -> update_server_status_metric(ServerIP, Port, false, Options), case proplists:get_value(failover, Options, []) of [] -> Response; UpstreamServers -> handle_failed_request(Request, {ServerIP, Port}, UpstreamServers, Response, Options) end. handle_failed_request(Request, {ServerIP, Port} = _FailedServer, UpstreamServers, Response, Options) -> case ets:lookup(?MODULE, {ServerIP, Port}) of [{{ServerIP, Port}, Retries, InitialRetries}] -> FailedTries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), % Mark the given RADIUS server as 'non-active' if there were more tries % than possible if FailedTries >= Retries -> ets:delete(?MODULE, {ServerIP, Port}), Timeout = application:get_env(eradius, unreachable_timeout, 60), timer:apply_after(Timeout * 1000, ?MODULE, restore_upstream_server, [{ServerIP, Port, InitialRetries, InitialRetries}]); true -> RADIUS client tried to send a request to the { ServierIP , Port } RADIUS server . There were done FailedTries tries and all of them failed . % So decrease amount of tries for the given RADIUS server that % that will be used for next RADIUS requests towards this RADIUS server. ets:update_counter(?MODULE, {ServerIP, Port}, -FailedTries) end; [] -> ok end, case find_suitable_peer(UpstreamServers) of [] -> Response; {NewPeer, NewPool} -> % leave only active upstream servers NewOptions = lists:keyreplace(failover, 1, Options, {failover, NewPool}), send_request(NewPeer, Request, NewOptions) end. @private send_remote_request_loop(ReplyPid, Socket, ReqId, Peer, EncRequest, Retries, Timeout, MetricsInfo) -> ReplyPid ! {self(), send_request_loop(Socket, ReqId, Peer, EncRequest, Retries, Timeout, MetricsInfo)}. send_request_loop(Socket, ReqId, Peer, Request = #radius_request{}, Retries, Timeout, undefined) -> send_request_loop(Socket, ReqId, Peer, Request, Retries, Timeout, eradius_lib:make_addr_info(Peer)); send_request_loop(Socket, ReqId, Peer, Request, Retries, Timeout, MetricsInfo) -> {Authenticator, EncRequest} = eradius_lib:encode_request(Request), SMon = erlang:monitor(process, Socket), send_request_loop(Socket, SMon, Peer, ReqId, Authenticator, EncRequest, Timeout, Retries, MetricsInfo, Request#radius_request.secret, Request). send_request_loop(_Socket, SMon, _Peer, _ReqId, _Authenticator, _EncRequest, Timeout, 0, MetricsInfo, _Secret, Request) -> TS = erlang:convert_time_unit(Timeout, millisecond, native), update_client_request(timeout, MetricsInfo, TS, Request), erlang:demonitor(SMon, [flush]), {error, timeout}; send_request_loop(Socket, SMon, Peer = {_ServerName, {IP, Port}}, ReqId, Authenticator, EncRequest, Timeout, RetryN, MetricsInfo, Secret, Request) -> Socket ! {self(), send_request, {IP, Port}, ReqId, EncRequest}, update_client_request(pending, MetricsInfo, 1, Request), receive {Socket, response, ReqId, Response} -> update_client_request(pending, MetricsInfo, -1, Request), {ok, Response, Secret, Authenticator}; {'DOWN', SMon, process, Socket, _} -> {error, socket_down}; {Socket, error, Error} -> {error, Error} after Timeout -> TS = erlang:convert_time_unit(Timeout, millisecond, native), update_client_request(retransmission, MetricsInfo, TS, Request), send_request_loop(Socket, SMon, Peer, ReqId, Authenticator, EncRequest, Timeout, RetryN - 1, MetricsInfo, Secret, Request) end. @private update_client_requests(MetricsInfo) -> eradius_counter:inc_counter(requests, MetricsInfo). @private update_client_request(pending, MetricsInfo, Pending, _) -> if Pending =< 0 -> eradius_counter:dec_counter(pending, MetricsInfo); true -> eradius_counter:inc_counter(pending, MetricsInfo) end; update_client_request(Cmd, MetricsInfo, Ms, Request) -> eradius_counter:observe(eradius_client_request_duration_milliseconds, MetricsInfo, Ms, "Execution time of a RADIUS request"), update_client_request_by_type(Cmd, MetricsInfo, Ms, Request). @private update_client_request_by_type(request, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_access_request_duration_milliseconds, MetricsInfo, Ms, "Access-Request execution time"), eradius_counter:inc_counter(accessRequests, MetricsInfo); update_client_request_by_type(accreq, MetricsInfo, Ms, Request) -> eradius_counter:observe(eradius_client_accounting_request_duration_milliseconds, MetricsInfo, Ms, "Accounting-Request execution time"), inc_request_counter_accounting(MetricsInfo, Request); update_client_request_by_type(coareq, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_coa_request_duration_milliseconds, MetricsInfo, Ms, "Coa request execution time"), eradius_counter:inc_counter(coaRequests, MetricsInfo); update_client_request_by_type(discreq, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_disconnect_request_duration_milliseconds, MetricsInfo, Ms, "Disconnect execution time"), eradius_counter:inc_counter(discRequests, MetricsInfo); update_client_request_by_type(retransmission, MetricsInfo, _Ms, _) -> eradius_counter:inc_counter(retransmissions, MetricsInfo); update_client_request_by_type(timeout, MetricsInfo, _Ms, _) -> eradius_counter:inc_counter(timeouts, MetricsInfo); update_client_request_by_type(_, _, _, _) -> ok. @private update_client_responses(MetricsInfo) -> eradius_counter:inc_counter(replies, MetricsInfo). @private update_client_response(accept, MetricsInfo, _) -> eradius_counter:inc_counter(accessAccepts, MetricsInfo); update_client_response(reject, MetricsInfo, _) -> eradius_counter:inc_counter(accessRejects, MetricsInfo); update_client_response(challenge, MetricsInfo, _) -> eradius_counter:inc_counter(accessChallenges, MetricsInfo); update_client_response(accresp, MetricsInfo, Request) -> inc_responses_counter_accounting(MetricsInfo, Request); update_client_response(coanak, MetricsInfo, _) -> eradius_counter:inc_counter(coaNaks, MetricsInfo); update_client_response(coaack, MetricsInfo, _) -> eradius_counter:inc_counter(coaAcks, MetricsInfo); update_client_response(discnak, MetricsInfo, _) -> eradius_counter:inc_counter(discNaks, MetricsInfo); update_client_response(discack, MetricsInfo, _) -> eradius_counter:inc_counter(discAcks, MetricsInfo); update_client_response(dropped, MetricsInfo, _) -> eradius_counter:inc_counter(packetsDropped, MetricsInfo); update_client_response(bad_authenticator, MetricsInfo, _) -> eradius_counter:inc_counter(badAuthenticators, MetricsInfo); update_client_response(unknown_req_type, MetricsInfo, _) -> eradius_counter:inc_counter(unknownTypes, MetricsInfo); update_client_response(_, _, _) -> ok. @private reconfigure() -> catch gen_server:call(?SERVER, reconfigure, ?RECONFIGURE_TIMEOUT). %% ------------------------------------------------------------------------------------------ %% -- socket process manager -record(state, { socket_ip :: null | inet:ip_address(), no_ports = 1 :: pos_integer(), idcounters = maps:new() :: map(), sockets = array:new() :: array:array(), sup :: pid(), clients = [] :: [{{integer(),integer(),integer(),integer()}, integer()}] }). @private init([]) -> {ok, Sup} = eradius_client_sup:start(), case configure(#state{socket_ip = null, sup = Sup}) of {error, Error} -> {stop, Error}; Else -> Else end. @private handle_call({wanna_send, Peer = {_PeerName, PeerSocket}, _MetricsInfo}, _From, State) -> {PortIdx, ReqId, NewIdCounters} = next_port_and_req_id(PeerSocket, State#state.no_ports, State#state.idcounters), {SocketProcess, NewSockets} = find_socket_process(PortIdx, State#state.sockets, State#state.socket_ip, State#state.sup), IsCreated = lists:member(Peer, State#state.clients), NewState = case IsCreated of false -> State#state{idcounters = NewIdCounters, sockets = NewSockets, clients = [Peer | State#state.clients]}; true -> State#state{idcounters = NewIdCounters, sockets = NewSockets} end, {reply, {SocketProcess, ReqId}, NewState}; @private handle_call(reconfigure, _From, State) -> case configure(State) of {error, Error} -> {reply, Error, State}; {ok, NState} -> {reply, ok, NState} end; @private handle_call(debug, _From, State) -> {reply, {ok, State}, State}; @private handle_call(_OtherCall, _From, State) -> {noreply, State}. @private handle_cast(_Msg, State) -> {noreply, State}. @private handle_info({PortIdx, Pid}, State = #state{sockets = Sockets}) -> NSockets = update_socket_process(PortIdx, Sockets, Pid), {noreply, State#state{sockets = NSockets}}; handle_info(_Info, State) -> {noreply, State}. @private terminate(_Reason, _State) -> ok. @private code_change(_OldVsn, State, _Extra) -> {ok, State}. @private configure(State) -> case ets:info(?MODULE) of undefined -> prepare_pools(); _ -> % if ets table is already exists - which could be in a case of % reconfigure, just re-create the table and fill it with newly % configured pools of RADIUS upstream servers ets:delete(?MODULE), prepare_pools() end, {ok, ClientPortCount} = application:get_env(eradius, client_ports), {ok, ClientIP} = application:get_env(eradius, client_ip), case parse_ip(ClientIP) of {ok, Address} -> configure_address(State, ClientPortCount, Address); {error, _} -> ?LOG(error, "Invalid RADIUS client IP (parsing failed): ~p", [ClientIP]), {error, {bad_client_ip, ClientIP}} end. %% private prepare_pools() -> ets:new(?MODULE, [ordered_set, public, named_table, {keypos, 1}, {write_concurrency,true}]), lists:foreach(fun({_PoolName, Servers}) -> prepare_pool(Servers) end, application:get_env(eradius, servers_pool, [])), lists:foreach(fun(Server) -> store_upstream_servers(Server) end, application:get_env(eradius, servers, [])), init_server_status_metrics(). prepare_pool([]) -> ok; prepare_pool([{Addr, Port, _, Opts} | Servers]) -> Retries = proplists:get_value(retries, Opts, ?DEFAULT_RETRIES), store_radius_server_from_pool(Addr, Port, Retries), prepare_pool(Servers); prepare_pool([{Addr, Port, _} | Servers]) -> store_radius_server_from_pool(Addr, Port, ?DEFAULT_RETRIES), prepare_pool(Servers). store_upstream_servers({Server, _}) -> store_upstream_servers(Server); store_upstream_servers({Server, _, _}) -> store_upstream_servers(Server); store_upstream_servers(Server) -> HandlerDefinitions = application:get_env(eradius, Server, []), UpdatePoolFn = fun (HandlerOpts) -> {DefaultRoute, Routes, Retries} = eradius_proxy:get_routes_info(HandlerOpts), eradius_proxy:put_default_route_to_pool(DefaultRoute, Retries), eradius_proxy:put_routes_to_pool(Routes, Retries) end, lists:foreach(fun (HandlerDefinition) -> case HandlerDefinition of {{_, []}, _} -> ok; {{_, _, []}, _} -> ok; {{_, HandlerOpts}, _} -> UpdatePoolFn(HandlerOpts); {{_, _, HandlerOpts}, _} -> UpdatePoolFn(HandlerOpts); _HandlerDefinition -> ok end end, HandlerDefinitions). %% private store_radius_server_from_pool(Addr, Port, Retries) when is_tuple(Addr) and is_integer(Port) and is_integer(Retries) -> ets:insert(?MODULE, {{Addr, Port}, Retries, Retries}); store_radius_server_from_pool(Addr, Port, Retries) when is_list(Addr) and is_integer(Port) and is_integer(Retries) -> IP = get_ip(Addr), ets:insert(?MODULE, {{IP, Port}, Retries, Retries}); store_radius_server_from_pool(Addr, Port, Retries) -> ?LOG(error, "bad RADIUS upstream server specified in RADIUS servers pool configuration ~p", [{Addr, Port, Retries}]), error(badarg). configure_address(State = #state{socket_ip = OAdd, sockets = Sockts}, NPorts, NAdd) -> case OAdd of null -> {ok, State#state{socket_ip = NAdd, no_ports = NPorts}}; NAdd -> configure_ports(State, NPorts); _ -> ?LOG(info, "Reopening RADIUS client sockets (client_ip changed to ~s)", [inet:ntoa(NAdd)]), array:map( fun(_PortIdx, Pid) -> case Pid of undefined -> done; _ -> Pid ! close end end, Sockts), {ok, State#state{sockets = array:new(), socket_ip = NAdd, no_ports = NPorts}} end. configure_ports(State = #state{no_ports = OPorts, sockets = Sockets}, NPorts) -> if OPorts =< NPorts -> {ok, State#state{no_ports = NPorts}}; true -> Counters = fix_counters(NPorts, State#state.idcounters), NSockets = close_sockets(NPorts, Sockets), {ok, State#state{sockets = NSockets, no_ports = NPorts, idcounters = Counters}} end. fix_counters(NPorts, Counters) -> maps:map(fun(_Peer, Value = {NextPortIdx, _NextReqId}) when NextPortIdx < NPorts -> Value; (_Peer, {_NextPortIdx, NextReqId}) -> {0, NextReqId} end, Counters). close_sockets(NPorts, Sockets) -> case array:size(Sockets) =< NPorts of true -> Sockets; false -> List = array:to_list(Sockets), {_, Rest} = lists:split(NPorts, List), lists:map( fun(Pid) -> case Pid of undefined -> done; _ -> Pid ! close end end, Rest), array:resize(NPorts, Sockets) end. next_port_and_req_id(Peer, NumberOfPorts, Counters) -> case Counters of #{Peer := {NextPortIdx, ReqId}} when ReqId < 255 -> NextReqId = (ReqId + 1); #{Peer := {PortIdx, 255}} -> NextPortIdx = (PortIdx + 1) rem (NumberOfPorts - 1), NextReqId = 0; _ -> NextPortIdx = erlang:phash2(Peer, NumberOfPorts), NextReqId = 0 end, NewCounters = Counters#{Peer => {NextPortIdx, NextReqId}}, {NextPortIdx, NextReqId, NewCounters}. find_socket_process(PortIdx, Sockets, SocketIP, Sup) -> case array:get(PortIdx, Sockets) of undefined -> Res = supervisor:start_child(Sup, {PortIdx, {eradius_client_socket, start, [SocketIP, self(), PortIdx]}, transient, brutal_kill, worker, [eradius_client_socket]}), Pid = case Res of {ok, P} -> P; {error, already_present} -> {ok, P} = supervisor:restart_child(Sup, PortIdx), P end, {Pid, array:set(PortIdx, Pid, Sockets)}; Pid when is_pid(Pid) -> {Pid, Sockets} end. update_socket_process(PortIdx, Sockets, Pid) -> array:set(PortIdx, Pid, Sockets). parse_ip(undefined) -> {ok, undefined}; parse_ip(Address) when is_list(Address) -> inet_parse:address(Address); parse_ip(T = {_, _, _, _}) -> {ok, T}; parse_ip(T = {_, _, _, _, _, _}) -> {ok, T}. init_server_status_metrics() -> case application:get_env(eradius, server_status_metrics_enabled, false) of false -> ok; true -> That will be called at eradius startup and we must be sure that prometheus % application already started if server status metrics supposed to be used application:ensure_all_started(prometheus), ets:foldl(fun ({{Addr, Port}, _, _}, _Acc) -> eradius_counter:set_boolean_metric(server_status, [Addr, Port], false) end, [], ?MODULE) end. make_metrics_info(Options, {ServerIP, ServerPort}) -> ServerName = proplists:get_value(server_name, Options, undefined), ClientName = proplists:get_value(client_name, Options, undefined), ClientIP = application:get_env(eradius, client_ip, undefined), {ok, ParsedClientIP} = parse_ip(ClientIP), ClientAddrInfo = eradius_lib:make_addr_info({ClientName, {ParsedClientIP, undefined}}), ServerAddrInfo = eradius_lib:make_addr_info({ServerName, {ServerIP, ServerPort}}), {ClientAddrInfo, ServerAddrInfo}. inc_request_counter_accounting(MetricsInfo, #radius_request{attrs = Attrs}) -> Requests = ets:match_spec_run(Attrs, client_request_counter_account_match_spec_compile()), [eradius_counter:inc_counter(Type, MetricsInfo) || Type <- Requests], ok; inc_request_counter_accounting(_, _) -> ok. inc_responses_counter_accounting(MetricsInfo, #radius_request{attrs = Attrs}) -> Responses = ets:match_spec_run(Attrs, client_response_counter_account_match_spec_compile()), [eradius_counter:inc_counter(Type, MetricsInfo) || Type <- Responses], ok; inc_responses_counter_accounting(_, _) -> ok. update_server_status_metric(IP, Port, false, _Options) -> eradius_counter:set_boolean_metric(server_status, [IP, Port], false); update_server_status_metric(IP, Port, true, Options) -> UpstreamServers = proplists:get_value(failover, Options, []), set all from pool as inactive if is_list(UpstreamServers) -> lists:foreach(fun (Server) -> case Server of {ServerIP, ServerPort, _} -> eradius_counter:set_boolean_metric(server_status, [ServerIP, ServerPort], false); {ServerIP, ServerPort, _, _} -> eradius_counter:set_boolean_metric(server_status, [ServerIP, ServerPort], false); _ -> ok end end, UpstreamServers); true -> ok end, % set current service as active eradius_counter:set_boolean_metric(server_status, [IP, Port], true). client_request_counter_account_match_spec_compile() -> case persistent_term:get({?MODULE, ?FUNCTION_NAME}, undefined) of undefined -> MatchSpecCompile = ets:match_spec_compile(ets:fun2ms(fun ({?RStatus_Type, ?RStatus_Type_Start}) -> accountRequestsStart; ({?RStatus_Type, ?RStatus_Type_Stop}) -> accountRequestsStop; ({?RStatus_Type, ?RStatus_Type_Update}) -> accountRequestsUpdate; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Start}) -> accountRequestsStart; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Stop}) -> accountRequestsStop; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Update}) -> accountRequestsUpdate end)), persistent_term:put({?MODULE, ?FUNCTION_NAME}, MatchSpecCompile), MatchSpecCompile; MatchSpecCompile -> MatchSpecCompile end. client_response_counter_account_match_spec_compile() -> case persistent_term:get({?MODULE, ?FUNCTION_NAME}, undefined) of undefined -> MatchSpecCompile = ets:match_spec_compile(ets:fun2ms(fun ({?RStatus_Type, ?RStatus_Type_Start}) -> accountResponsesStart; ({?RStatus_Type, ?RStatus_Type_Stop}) -> accountResponsesStop; ({?RStatus_Type, ?RStatus_Type_Update}) -> accountResponsesUpdate; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Start}) -> accountResponsesStart; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Stop}) -> accountResponsesStop; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Update}) -> accountResponsesUpdate end)), persistent_term:put({?MODULE, ?FUNCTION_NAME}, MatchSpecCompile), MatchSpecCompile; MatchSpecCompile -> MatchSpecCompile end. find_suitable_peer(undefined) -> []; find_suitable_peer([]) -> []; find_suitable_peer([{Host, Port, Secret} | Pool]) when is_list(Host) -> try IP = get_ip(Host), find_suitable_peer([{IP, Port, Secret} | Pool]) catch _:_ -> % can't resolve ip by some reasons, just ignore it find_suitable_peer(Pool) end; find_suitable_peer([{IP, Port, Secret} | Pool]) -> case ets:lookup(?MODULE, {IP, Port}) of [] -> find_suitable_peer(Pool); [{{IP, Port}, _Retries, _InitialRetries}] -> {{IP, Port, Secret}, Pool} end; find_suitable_peer([{IP, Port, Secret, _Opts} | Pool]) -> find_suitable_peer([{IP, Port, Secret} | Pool]). get_ip(Host) -> case inet:gethostbyname(Host) of {ok, #hostent{h_addrtype = inet, h_addr_list = [IP]}} -> IP; {ok, #hostent{h_addrtype = inet, h_addr_list = [_ | _] = IPs}} -> Index = rand:uniform(length(IPs)), lists:nth(Index, IPs); _ -> error(badarg) end.
null
https://raw.githubusercontent.com/travelping/eradius/4bf56b94f76ee83349ce0356b82c45ea25bb9f1e/src/eradius_client.erl
erlang
@doc This module contains a RADIUS client that can be used to send authentication and accounting requests. for each outgoing request. The implementation naively assumes that you won't send requests to a The client uses OS-assigned ports. The maximum number of open ports can be specified through the ``client_ports'' application environment variable, it defaults to ``20''. The number of ports should not parameter. Changing it currently requires a restart. It can be given as a string or ip address tuple, or the atom ``undefined'' (the default), which uses whatever address the OS selects. internal ------------------------------------------------------------------------------------------ -- API @equiv send_request(NAS, Request, []) If no answer is received within the specified timeout, the request will be sent again. If we have other RADIUS upstream servers check current one, maybe it is already marked as inactive and try to find another Special case, we don't have servers in the pool anymore, but we need to preserve `failover` option to mark current server as inactive if it will fail current server is not in list of active servers, so use another one If no answer is received within the specified timeout, the request will be sent again. The request will not be sent again if the remote node is unreachable. Mark the given RADIUS server as 'non-active' if there were more tries than possible So decrease amount of tries for the given RADIUS server that that will be used for next RADIUS requests towards this RADIUS server. leave only active upstream servers ------------------------------------------------------------------------------------------ -- socket process manager if ets table is already exists - which could be in a case of reconfigure, just re-create the table and fill it with newly configured pools of RADIUS upstream servers private private application already started if server status metrics supposed to be used set current service as active can't resolve ip by some reasons, just ignore it
A counter is kept for every NAS in order to determine the next request i d and sender port distinct number of over the lifetime of the VM , which is why the counters are not garbage - collected . be set too low . If ` ` N '' ports are opened , the maximum number of concurrent requests is ` ` N * 256 '' . The IP address used to send requests is read < emph > once</emph > ( at startup ) from the ` ` client_ip '' -module(eradius_client). -export([start_link/0, send_request/2, send_request/3, send_remote_request/3, send_remote_request/4]). -export([reconfigure/0, send_remote_request_loop/8, find_suitable_peer/1, restore_upstream_server/1, store_radius_server_from_pool/3, init_server_status_metrics/0]). -behaviour(gen_server). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -import(eradius_lib, [printable_peer/2]). -include_lib("stdlib/include/ms_transform.hrl"). -include_lib("kernel/include/logger.hrl"). -include("eradius_dict.hrl"). -include("eradius_lib.hrl"). -define(SERVER, ?MODULE). -define(DEFAULT_RETRIES, 3). -define(DEFAULT_TIMEOUT, 5000). -define(RECONFIGURE_TIMEOUT, 15000). -define(GOOD_CMD(Req), (Req#radius_request.cmd == 'request' orelse Req#radius_request.cmd == 'accreq' orelse Req#radius_request.cmd == 'coareq' orelse Req#radius_request.cmd == 'discreq')). -type nas_address() :: {string() | binary() | inet:ip_address(), eradius_server:port_number(), eradius_lib:secret()}. -type options() :: [{retries, pos_integer()} | {timeout, timeout()} | {server_name, atom()} | {metrics_info, {atom(), atom(), atom()}}]. -export_type([nas_address/0, options/0]). -include_lib("kernel/include/inet.hrl"). @private start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). -spec send_request(nas_address(), #radius_request{}) -> {ok, binary()} | {error, 'timeout' | 'socket_down'}. send_request(NAS, Request) -> send_request(NAS, Request, []). @doc Send a radius request to the given NAS . -spec send_request(nas_address(), #radius_request{}, options()) -> {ok, binary(), eradius_lib:authenticator()} | {error, 'timeout' | 'socket_down'}. send_request({Host, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_binary(Host) -> send_request({erlang:binary_to_list(Host), Port, Secret}, Request, Options); send_request({Host, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_list(Host) -> IP = get_ip(Host), send_request({IP, Port, Secret}, Request, Options); send_request({IP, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) andalso is_tuple(IP) -> TS1 = erlang:monotonic_time(), ServerName = proplists:get_value(server_name, Options, undefined), MetricsInfo = make_metrics_info(Options, {IP, Port}), Retries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), Timeout = proplists:get_value(timeout, Options, ?DEFAULT_TIMEOUT), SendReqFn = fun () -> Peer = {ServerName, {IP, Port}}, update_client_requests(MetricsInfo), {Socket, ReqId} = gen_server:call(?SERVER, {wanna_send, Peer, MetricsInfo}), Response = send_request_loop(Socket, ReqId, Peer, Request#radius_request{reqid = ReqId, secret = Secret}, Retries, Timeout, MetricsInfo), proceed_response(Request, Response, Peer, TS1, MetricsInfo, Options) end, one case proplists:get_value(failover, Options, []) of [] -> SendReqFn(); UpstreamServers -> case find_suitable_peer([{IP, Port, Secret} | UpstreamServers]) of [] -> no_active_servers; {{IP, Port, Secret}, _NewPool} -> SendReqFn(); {NewPeer, []} -> NewOptions = lists:keyreplace(failover, 1, Options, {failover, undefined}), send_request(NewPeer, Request, NewOptions); {NewPeer, NewPool} -> NewOptions = lists:keyreplace(failover, 1, Options, {failover, NewPool}), send_request(NewPeer, Request, NewOptions) end end; send_request({_IP, _Port, _Secret}, _Request, _Options) -> error(badarg). @equiv send_remote_request(Node , NAS , Request , [ ] ) -spec send_remote_request(node(), nas_address(), #radius_request{}) -> {ok, binary()} | {error, 'timeout' | 'node_down' | 'socket_down'}. send_remote_request(Node, NAS, Request) -> send_remote_request(Node, NAS, Request, []). @doc Send a radius request to the given NAS through a socket on the specified node . -spec send_remote_request(node(), nas_address(), #radius_request{}, options()) -> {ok, binary()} | {error, 'timeout' | 'node_down' | 'socket_down'}. send_remote_request(Node, {IP, Port, Secret}, Request, Options) when ?GOOD_CMD(Request) -> TS1 = erlang:monotonic_time(), ServerName = proplists:get_value(server_name, Options, undefined), MetricsInfo = make_metrics_info(Options, {IP, Port}), update_client_requests(MetricsInfo), Peer = {ServerName, {IP, Port}}, try gen_server:call({?SERVER, Node}, {wanna_send, Peer, MetricsInfo}) of {Socket, ReqId} -> Request1 = case eradius_node_mon:get_remote_version(Node) of {0, Minor} when Minor < 6 -> {_, EncRequest} = eradius_lib:encode_request(Request#radius_request{reqid = ReqId, secret = Secret}), EncRequest; _ -> Request#radius_request{reqid = ReqId, secret = Secret} end, Retries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), Timeout = proplists:get_value(timeout, Options, ?DEFAULT_TIMEOUT), SenderPid = spawn(Node, ?MODULE, send_remote_request_loop, [self(), Socket, ReqId, Peer, Request1, Retries, Timeout, MetricsInfo]), SenderMonitor = monitor(process, SenderPid), Response = receive {SenderPid, Result} -> erlang:demonitor(SenderMonitor, [flush]), Result; {'DOWN', SenderMonitor, process, SenderPid, _Reason} -> {error, socket_down} end, proceed_response(Request, Response, Peer, TS1, MetricsInfo, Options) catch exit:{{nodedown, Node}, _} -> {error, node_down} end; send_remote_request(_Node, {_IP, _Port, _Secret}, _Request, _Options) -> error(badarg). restore_upstream_server({ServerIP, Port, Retries, InitialRetries}) -> ets:insert(?MODULE, {{ServerIP, Port}, Retries, InitialRetries}). proceed_response(Request, {ok, Response, Secret, Authenticator}, _Peer = {_ServerName, {ServerIP, Port}}, TS1, MetricsInfo, Options) -> update_client_request(Request#radius_request.cmd, MetricsInfo, erlang:monotonic_time() - TS1, Request), update_client_responses(MetricsInfo), case eradius_lib:decode_request(Response, Secret, Authenticator) of {bad_pdu, "Message-Authenticator Attribute is invalid" = Reason} -> update_client_response(bad_authenticator, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, "Authenticator Attribute is invalid" = Reason} -> update_client_response(bad_authenticator, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, "unknown request type" = Reason} -> update_client_response(unknown_req_type, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), noreply; {bad_pdu, Reason} -> update_client_response(dropped, MetricsInfo, Request), ?LOG(error, "~s INF: Noreply for request ~p. Could not decode the request, reason: ~s", [printable_peer(ServerIP, Port), Request, Reason]), maybe_failover(Request, noreply, {ServerIP, Port}, Options); Decoded -> update_server_status_metric(ServerIP, Port, true, Options), update_client_response(Decoded#radius_request.cmd, MetricsInfo, Request), {ok, Response, Authenticator} end; proceed_response(Request, Response, {_ServerName, {ServerIP, Port}}, TS1, MetricsInfo, Options) -> update_client_responses(MetricsInfo), update_client_request(Request#radius_request.cmd, MetricsInfo, erlang:monotonic_time() - TS1, Request), maybe_failover(Request, Response, {ServerIP, Port}, Options). maybe_failover(Request, Response, {ServerIP, Port}, Options) -> update_server_status_metric(ServerIP, Port, false, Options), case proplists:get_value(failover, Options, []) of [] -> Response; UpstreamServers -> handle_failed_request(Request, {ServerIP, Port}, UpstreamServers, Response, Options) end. handle_failed_request(Request, {ServerIP, Port} = _FailedServer, UpstreamServers, Response, Options) -> case ets:lookup(?MODULE, {ServerIP, Port}) of [{{ServerIP, Port}, Retries, InitialRetries}] -> FailedTries = proplists:get_value(retries, Options, ?DEFAULT_RETRIES), if FailedTries >= Retries -> ets:delete(?MODULE, {ServerIP, Port}), Timeout = application:get_env(eradius, unreachable_timeout, 60), timer:apply_after(Timeout * 1000, ?MODULE, restore_upstream_server, [{ServerIP, Port, InitialRetries, InitialRetries}]); true -> RADIUS client tried to send a request to the { ServierIP , Port } RADIUS server . There were done FailedTries tries and all of them failed . ets:update_counter(?MODULE, {ServerIP, Port}, -FailedTries) end; [] -> ok end, case find_suitable_peer(UpstreamServers) of [] -> Response; {NewPeer, NewPool} -> NewOptions = lists:keyreplace(failover, 1, Options, {failover, NewPool}), send_request(NewPeer, Request, NewOptions) end. @private send_remote_request_loop(ReplyPid, Socket, ReqId, Peer, EncRequest, Retries, Timeout, MetricsInfo) -> ReplyPid ! {self(), send_request_loop(Socket, ReqId, Peer, EncRequest, Retries, Timeout, MetricsInfo)}. send_request_loop(Socket, ReqId, Peer, Request = #radius_request{}, Retries, Timeout, undefined) -> send_request_loop(Socket, ReqId, Peer, Request, Retries, Timeout, eradius_lib:make_addr_info(Peer)); send_request_loop(Socket, ReqId, Peer, Request, Retries, Timeout, MetricsInfo) -> {Authenticator, EncRequest} = eradius_lib:encode_request(Request), SMon = erlang:monitor(process, Socket), send_request_loop(Socket, SMon, Peer, ReqId, Authenticator, EncRequest, Timeout, Retries, MetricsInfo, Request#radius_request.secret, Request). send_request_loop(_Socket, SMon, _Peer, _ReqId, _Authenticator, _EncRequest, Timeout, 0, MetricsInfo, _Secret, Request) -> TS = erlang:convert_time_unit(Timeout, millisecond, native), update_client_request(timeout, MetricsInfo, TS, Request), erlang:demonitor(SMon, [flush]), {error, timeout}; send_request_loop(Socket, SMon, Peer = {_ServerName, {IP, Port}}, ReqId, Authenticator, EncRequest, Timeout, RetryN, MetricsInfo, Secret, Request) -> Socket ! {self(), send_request, {IP, Port}, ReqId, EncRequest}, update_client_request(pending, MetricsInfo, 1, Request), receive {Socket, response, ReqId, Response} -> update_client_request(pending, MetricsInfo, -1, Request), {ok, Response, Secret, Authenticator}; {'DOWN', SMon, process, Socket, _} -> {error, socket_down}; {Socket, error, Error} -> {error, Error} after Timeout -> TS = erlang:convert_time_unit(Timeout, millisecond, native), update_client_request(retransmission, MetricsInfo, TS, Request), send_request_loop(Socket, SMon, Peer, ReqId, Authenticator, EncRequest, Timeout, RetryN - 1, MetricsInfo, Secret, Request) end. @private update_client_requests(MetricsInfo) -> eradius_counter:inc_counter(requests, MetricsInfo). @private update_client_request(pending, MetricsInfo, Pending, _) -> if Pending =< 0 -> eradius_counter:dec_counter(pending, MetricsInfo); true -> eradius_counter:inc_counter(pending, MetricsInfo) end; update_client_request(Cmd, MetricsInfo, Ms, Request) -> eradius_counter:observe(eradius_client_request_duration_milliseconds, MetricsInfo, Ms, "Execution time of a RADIUS request"), update_client_request_by_type(Cmd, MetricsInfo, Ms, Request). @private update_client_request_by_type(request, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_access_request_duration_milliseconds, MetricsInfo, Ms, "Access-Request execution time"), eradius_counter:inc_counter(accessRequests, MetricsInfo); update_client_request_by_type(accreq, MetricsInfo, Ms, Request) -> eradius_counter:observe(eradius_client_accounting_request_duration_milliseconds, MetricsInfo, Ms, "Accounting-Request execution time"), inc_request_counter_accounting(MetricsInfo, Request); update_client_request_by_type(coareq, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_coa_request_duration_milliseconds, MetricsInfo, Ms, "Coa request execution time"), eradius_counter:inc_counter(coaRequests, MetricsInfo); update_client_request_by_type(discreq, MetricsInfo, Ms, _) -> eradius_counter:observe(eradius_client_disconnect_request_duration_milliseconds, MetricsInfo, Ms, "Disconnect execution time"), eradius_counter:inc_counter(discRequests, MetricsInfo); update_client_request_by_type(retransmission, MetricsInfo, _Ms, _) -> eradius_counter:inc_counter(retransmissions, MetricsInfo); update_client_request_by_type(timeout, MetricsInfo, _Ms, _) -> eradius_counter:inc_counter(timeouts, MetricsInfo); update_client_request_by_type(_, _, _, _) -> ok. @private update_client_responses(MetricsInfo) -> eradius_counter:inc_counter(replies, MetricsInfo). @private update_client_response(accept, MetricsInfo, _) -> eradius_counter:inc_counter(accessAccepts, MetricsInfo); update_client_response(reject, MetricsInfo, _) -> eradius_counter:inc_counter(accessRejects, MetricsInfo); update_client_response(challenge, MetricsInfo, _) -> eradius_counter:inc_counter(accessChallenges, MetricsInfo); update_client_response(accresp, MetricsInfo, Request) -> inc_responses_counter_accounting(MetricsInfo, Request); update_client_response(coanak, MetricsInfo, _) -> eradius_counter:inc_counter(coaNaks, MetricsInfo); update_client_response(coaack, MetricsInfo, _) -> eradius_counter:inc_counter(coaAcks, MetricsInfo); update_client_response(discnak, MetricsInfo, _) -> eradius_counter:inc_counter(discNaks, MetricsInfo); update_client_response(discack, MetricsInfo, _) -> eradius_counter:inc_counter(discAcks, MetricsInfo); update_client_response(dropped, MetricsInfo, _) -> eradius_counter:inc_counter(packetsDropped, MetricsInfo); update_client_response(bad_authenticator, MetricsInfo, _) -> eradius_counter:inc_counter(badAuthenticators, MetricsInfo); update_client_response(unknown_req_type, MetricsInfo, _) -> eradius_counter:inc_counter(unknownTypes, MetricsInfo); update_client_response(_, _, _) -> ok. @private reconfigure() -> catch gen_server:call(?SERVER, reconfigure, ?RECONFIGURE_TIMEOUT). -record(state, { socket_ip :: null | inet:ip_address(), no_ports = 1 :: pos_integer(), idcounters = maps:new() :: map(), sockets = array:new() :: array:array(), sup :: pid(), clients = [] :: [{{integer(),integer(),integer(),integer()}, integer()}] }). @private init([]) -> {ok, Sup} = eradius_client_sup:start(), case configure(#state{socket_ip = null, sup = Sup}) of {error, Error} -> {stop, Error}; Else -> Else end. @private handle_call({wanna_send, Peer = {_PeerName, PeerSocket}, _MetricsInfo}, _From, State) -> {PortIdx, ReqId, NewIdCounters} = next_port_and_req_id(PeerSocket, State#state.no_ports, State#state.idcounters), {SocketProcess, NewSockets} = find_socket_process(PortIdx, State#state.sockets, State#state.socket_ip, State#state.sup), IsCreated = lists:member(Peer, State#state.clients), NewState = case IsCreated of false -> State#state{idcounters = NewIdCounters, sockets = NewSockets, clients = [Peer | State#state.clients]}; true -> State#state{idcounters = NewIdCounters, sockets = NewSockets} end, {reply, {SocketProcess, ReqId}, NewState}; @private handle_call(reconfigure, _From, State) -> case configure(State) of {error, Error} -> {reply, Error, State}; {ok, NState} -> {reply, ok, NState} end; @private handle_call(debug, _From, State) -> {reply, {ok, State}, State}; @private handle_call(_OtherCall, _From, State) -> {noreply, State}. @private handle_cast(_Msg, State) -> {noreply, State}. @private handle_info({PortIdx, Pid}, State = #state{sockets = Sockets}) -> NSockets = update_socket_process(PortIdx, Sockets, Pid), {noreply, State#state{sockets = NSockets}}; handle_info(_Info, State) -> {noreply, State}. @private terminate(_Reason, _State) -> ok. @private code_change(_OldVsn, State, _Extra) -> {ok, State}. @private configure(State) -> case ets:info(?MODULE) of undefined -> prepare_pools(); _ -> ets:delete(?MODULE), prepare_pools() end, {ok, ClientPortCount} = application:get_env(eradius, client_ports), {ok, ClientIP} = application:get_env(eradius, client_ip), case parse_ip(ClientIP) of {ok, Address} -> configure_address(State, ClientPortCount, Address); {error, _} -> ?LOG(error, "Invalid RADIUS client IP (parsing failed): ~p", [ClientIP]), {error, {bad_client_ip, ClientIP}} end. prepare_pools() -> ets:new(?MODULE, [ordered_set, public, named_table, {keypos, 1}, {write_concurrency,true}]), lists:foreach(fun({_PoolName, Servers}) -> prepare_pool(Servers) end, application:get_env(eradius, servers_pool, [])), lists:foreach(fun(Server) -> store_upstream_servers(Server) end, application:get_env(eradius, servers, [])), init_server_status_metrics(). prepare_pool([]) -> ok; prepare_pool([{Addr, Port, _, Opts} | Servers]) -> Retries = proplists:get_value(retries, Opts, ?DEFAULT_RETRIES), store_radius_server_from_pool(Addr, Port, Retries), prepare_pool(Servers); prepare_pool([{Addr, Port, _} | Servers]) -> store_radius_server_from_pool(Addr, Port, ?DEFAULT_RETRIES), prepare_pool(Servers). store_upstream_servers({Server, _}) -> store_upstream_servers(Server); store_upstream_servers({Server, _, _}) -> store_upstream_servers(Server); store_upstream_servers(Server) -> HandlerDefinitions = application:get_env(eradius, Server, []), UpdatePoolFn = fun (HandlerOpts) -> {DefaultRoute, Routes, Retries} = eradius_proxy:get_routes_info(HandlerOpts), eradius_proxy:put_default_route_to_pool(DefaultRoute, Retries), eradius_proxy:put_routes_to_pool(Routes, Retries) end, lists:foreach(fun (HandlerDefinition) -> case HandlerDefinition of {{_, []}, _} -> ok; {{_, _, []}, _} -> ok; {{_, HandlerOpts}, _} -> UpdatePoolFn(HandlerOpts); {{_, _, HandlerOpts}, _} -> UpdatePoolFn(HandlerOpts); _HandlerDefinition -> ok end end, HandlerDefinitions). store_radius_server_from_pool(Addr, Port, Retries) when is_tuple(Addr) and is_integer(Port) and is_integer(Retries) -> ets:insert(?MODULE, {{Addr, Port}, Retries, Retries}); store_radius_server_from_pool(Addr, Port, Retries) when is_list(Addr) and is_integer(Port) and is_integer(Retries) -> IP = get_ip(Addr), ets:insert(?MODULE, {{IP, Port}, Retries, Retries}); store_radius_server_from_pool(Addr, Port, Retries) -> ?LOG(error, "bad RADIUS upstream server specified in RADIUS servers pool configuration ~p", [{Addr, Port, Retries}]), error(badarg). configure_address(State = #state{socket_ip = OAdd, sockets = Sockts}, NPorts, NAdd) -> case OAdd of null -> {ok, State#state{socket_ip = NAdd, no_ports = NPorts}}; NAdd -> configure_ports(State, NPorts); _ -> ?LOG(info, "Reopening RADIUS client sockets (client_ip changed to ~s)", [inet:ntoa(NAdd)]), array:map( fun(_PortIdx, Pid) -> case Pid of undefined -> done; _ -> Pid ! close end end, Sockts), {ok, State#state{sockets = array:new(), socket_ip = NAdd, no_ports = NPorts}} end. configure_ports(State = #state{no_ports = OPorts, sockets = Sockets}, NPorts) -> if OPorts =< NPorts -> {ok, State#state{no_ports = NPorts}}; true -> Counters = fix_counters(NPorts, State#state.idcounters), NSockets = close_sockets(NPorts, Sockets), {ok, State#state{sockets = NSockets, no_ports = NPorts, idcounters = Counters}} end. fix_counters(NPorts, Counters) -> maps:map(fun(_Peer, Value = {NextPortIdx, _NextReqId}) when NextPortIdx < NPorts -> Value; (_Peer, {_NextPortIdx, NextReqId}) -> {0, NextReqId} end, Counters). close_sockets(NPorts, Sockets) -> case array:size(Sockets) =< NPorts of true -> Sockets; false -> List = array:to_list(Sockets), {_, Rest} = lists:split(NPorts, List), lists:map( fun(Pid) -> case Pid of undefined -> done; _ -> Pid ! close end end, Rest), array:resize(NPorts, Sockets) end. next_port_and_req_id(Peer, NumberOfPorts, Counters) -> case Counters of #{Peer := {NextPortIdx, ReqId}} when ReqId < 255 -> NextReqId = (ReqId + 1); #{Peer := {PortIdx, 255}} -> NextPortIdx = (PortIdx + 1) rem (NumberOfPorts - 1), NextReqId = 0; _ -> NextPortIdx = erlang:phash2(Peer, NumberOfPorts), NextReqId = 0 end, NewCounters = Counters#{Peer => {NextPortIdx, NextReqId}}, {NextPortIdx, NextReqId, NewCounters}. find_socket_process(PortIdx, Sockets, SocketIP, Sup) -> case array:get(PortIdx, Sockets) of undefined -> Res = supervisor:start_child(Sup, {PortIdx, {eradius_client_socket, start, [SocketIP, self(), PortIdx]}, transient, brutal_kill, worker, [eradius_client_socket]}), Pid = case Res of {ok, P} -> P; {error, already_present} -> {ok, P} = supervisor:restart_child(Sup, PortIdx), P end, {Pid, array:set(PortIdx, Pid, Sockets)}; Pid when is_pid(Pid) -> {Pid, Sockets} end. update_socket_process(PortIdx, Sockets, Pid) -> array:set(PortIdx, Pid, Sockets). parse_ip(undefined) -> {ok, undefined}; parse_ip(Address) when is_list(Address) -> inet_parse:address(Address); parse_ip(T = {_, _, _, _}) -> {ok, T}; parse_ip(T = {_, _, _, _, _, _}) -> {ok, T}. init_server_status_metrics() -> case application:get_env(eradius, server_status_metrics_enabled, false) of false -> ok; true -> That will be called at eradius startup and we must be sure that prometheus application:ensure_all_started(prometheus), ets:foldl(fun ({{Addr, Port}, _, _}, _Acc) -> eradius_counter:set_boolean_metric(server_status, [Addr, Port], false) end, [], ?MODULE) end. make_metrics_info(Options, {ServerIP, ServerPort}) -> ServerName = proplists:get_value(server_name, Options, undefined), ClientName = proplists:get_value(client_name, Options, undefined), ClientIP = application:get_env(eradius, client_ip, undefined), {ok, ParsedClientIP} = parse_ip(ClientIP), ClientAddrInfo = eradius_lib:make_addr_info({ClientName, {ParsedClientIP, undefined}}), ServerAddrInfo = eradius_lib:make_addr_info({ServerName, {ServerIP, ServerPort}}), {ClientAddrInfo, ServerAddrInfo}. inc_request_counter_accounting(MetricsInfo, #radius_request{attrs = Attrs}) -> Requests = ets:match_spec_run(Attrs, client_request_counter_account_match_spec_compile()), [eradius_counter:inc_counter(Type, MetricsInfo) || Type <- Requests], ok; inc_request_counter_accounting(_, _) -> ok. inc_responses_counter_accounting(MetricsInfo, #radius_request{attrs = Attrs}) -> Responses = ets:match_spec_run(Attrs, client_response_counter_account_match_spec_compile()), [eradius_counter:inc_counter(Type, MetricsInfo) || Type <- Responses], ok; inc_responses_counter_accounting(_, _) -> ok. update_server_status_metric(IP, Port, false, _Options) -> eradius_counter:set_boolean_metric(server_status, [IP, Port], false); update_server_status_metric(IP, Port, true, Options) -> UpstreamServers = proplists:get_value(failover, Options, []), set all from pool as inactive if is_list(UpstreamServers) -> lists:foreach(fun (Server) -> case Server of {ServerIP, ServerPort, _} -> eradius_counter:set_boolean_metric(server_status, [ServerIP, ServerPort], false); {ServerIP, ServerPort, _, _} -> eradius_counter:set_boolean_metric(server_status, [ServerIP, ServerPort], false); _ -> ok end end, UpstreamServers); true -> ok end, eradius_counter:set_boolean_metric(server_status, [IP, Port], true). client_request_counter_account_match_spec_compile() -> case persistent_term:get({?MODULE, ?FUNCTION_NAME}, undefined) of undefined -> MatchSpecCompile = ets:match_spec_compile(ets:fun2ms(fun ({?RStatus_Type, ?RStatus_Type_Start}) -> accountRequestsStart; ({?RStatus_Type, ?RStatus_Type_Stop}) -> accountRequestsStop; ({?RStatus_Type, ?RStatus_Type_Update}) -> accountRequestsUpdate; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Start}) -> accountRequestsStart; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Stop}) -> accountRequestsStop; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Update}) -> accountRequestsUpdate end)), persistent_term:put({?MODULE, ?FUNCTION_NAME}, MatchSpecCompile), MatchSpecCompile; MatchSpecCompile -> MatchSpecCompile end. client_response_counter_account_match_spec_compile() -> case persistent_term:get({?MODULE, ?FUNCTION_NAME}, undefined) of undefined -> MatchSpecCompile = ets:match_spec_compile(ets:fun2ms(fun ({?RStatus_Type, ?RStatus_Type_Start}) -> accountResponsesStart; ({?RStatus_Type, ?RStatus_Type_Stop}) -> accountResponsesStop; ({?RStatus_Type, ?RStatus_Type_Update}) -> accountResponsesUpdate; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Start}) -> accountResponsesStart; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Stop}) -> accountResponsesStop; ({#attribute{id = ?RStatus_Type}, ?RStatus_Type_Update}) -> accountResponsesUpdate end)), persistent_term:put({?MODULE, ?FUNCTION_NAME}, MatchSpecCompile), MatchSpecCompile; MatchSpecCompile -> MatchSpecCompile end. find_suitable_peer(undefined) -> []; find_suitable_peer([]) -> []; find_suitable_peer([{Host, Port, Secret} | Pool]) when is_list(Host) -> try IP = get_ip(Host), find_suitable_peer([{IP, Port, Secret} | Pool]) catch _:_ -> find_suitable_peer(Pool) end; find_suitable_peer([{IP, Port, Secret} | Pool]) -> case ets:lookup(?MODULE, {IP, Port}) of [] -> find_suitable_peer(Pool); [{{IP, Port}, _Retries, _InitialRetries}] -> {{IP, Port, Secret}, Pool} end; find_suitable_peer([{IP, Port, Secret, _Opts} | Pool]) -> find_suitable_peer([{IP, Port, Secret} | Pool]). get_ip(Host) -> case inet:gethostbyname(Host) of {ok, #hostent{h_addrtype = inet, h_addr_list = [IP]}} -> IP; {ok, #hostent{h_addrtype = inet, h_addr_list = [_ | _] = IPs}} -> Index = rand:uniform(length(IPs)), lists:nth(Index, IPs); _ -> error(badarg) end.
eead653fa3cf48b937691beef6939c039b422f7edee94ec1b716f2768242085e
Minoru/hakyll-convert
OutputFormat.hs
{-# LANGUAGE OverloadedStrings #-} module Spec.OutputFormat (tests) where import Data.Default import Data.Maybe (isJust) import qualified Data.Text as T import Hakyll.Convert.Common (DistilledPost (..)) import Hakyll.Convert.OutputFormat import Spec.SpecHelpers import Test.Tasty (TestTree, testGroup) import Test.Tasty.HUnit import Test.Tasty.QuickCheck tests :: TestTree tests = testGroup "OutputFormat" [ validOutputFormatTests, formatPathTests ] validOutputFormatTests :: TestTree validOutputFormatTests = testGroup "`validOutputFormat`" [ falseOnEmptyFormat, synchronyWithFormatPath ] where falseOnEmptyFormat = testCase "returns False if format string is empty" (validOutputFormat "" @?= False) synchronyWithFormatPath = testProperty "returns False if `formatPath` returns `Nothing`, otherwise True" ( \format -> let format' = T.pack format result = validOutputFormat format' formatPathResult = isJust (formatPath format' def) in not (null format) ==> (result && formatPathResult) || (not result && not formatPathResult) ) formatPathTests :: TestTree formatPathTests = testGroup "`formatPath`" [ noChange, lowercaseO, lowercaseS, lowercaseY, uppercaseY, lowercaseM, lowercaseD, uppercaseH, uppercaseM, uppercaseS, complexExamples, abortsProcessingOnUnknownFormat ] where noChange = let input = "Hello, world!/2020-09-03-test.markdown" in testCase "does not change text that has no percent signs in it" (formatPath input def @?= Just input) lowercaseO = testGroup "%o is replaced by the original filepath" [ testCase "works with HTTP schema" (formatPath "%o" (def {dpUri = ""}) @?= Just "post"), testCase "works with HTTPS schema" (formatPath "%o" (def {dpUri = ""}) @?= Just "post"), testCase "trailing slashes are removed" (formatPath "%o" (def {dpUri = "-09-03-hello////"}) @?= Just "2020-09-03-hello"), testCase "file extension is removed" (formatPath "%o" (def {dpUri = "-post.html"}) @?= Just "first-post"), testCase "all file extensions are removed" (formatPath "%o" (def {dpUri = "-post.aspx.gz"}) @?= Just "first-post") ] lowercaseS = testCase "%s is replaced by the original slug" (formatPath "%s" (def {dpUri = "-is-my-birthday.php"}) @?= Just "today-is-my-birthday") lowercaseY = testCase "%y is replaced by the two-digit publication year" (formatPath "%y" (def {dpDate = fromGregorian 1917 01 01 0 0 0}) @?= Just "17") uppercaseY = testCase "%Y is replaced by the four-digit publication year" (formatPath "%Y" (def {dpDate = fromGregorian 1917 1 1 0 0 0}) @?= Just "1917") lowercaseM = testCase "%m is replaced by the two-digit publication month" (formatPath "%m" (def {dpDate = fromGregorian 2011 3 1 2 3 4}) @?= Just "03") lowercaseD = testCase "%d is replaced by the two-digit publication day" (formatPath "%d" (def {dpDate = fromGregorian 2013 1 31 0 0 0}) @?= Just "31") uppercaseH = testCase "%H is replaced by the two-digit publication hour, 00 to 23" (formatPath "%H" (def {dpDate = fromGregorian 2014 1 1 23 0 0}) @?= Just "23") uppercaseM = testCase "%M is replaced by the two-digit publication minute" (formatPath "%M" (def {dpDate = fromGregorian 2015 1 2 3 59 0}) @?= Just "59") uppercaseS = testCase "%S is replaced by the two-digit publication second" (formatPath "%S" (def {dpDate = fromGregorian 2016 1 2 3 4 0}) @?= Just "00") complexExamples = testGroup "format string can contain multiple formats" [ helper "%H:%M:%S" "18:30:02", helper "old/%Y/%m/%d-%H%M%S-%s.html" "old/2003/12/13-183002-hello-world.html", helper "/posts/%y-%m-%d-%H%M%S-%s/" "/posts/03-12-13-183002-hello-world/", helper "/migrated/%o" "/migrated/~joe/hello-world", helper "99.9%% true/%o" "99.9% true/~joe/hello-world" ] where post = DistilledPost { dpUri = "/~joe/hello-world.php", dpBody = "", dpTitle = Nothing, dpTags = [], dpCategories = [], dpDate = fromGregorian 2003 12 13 18 30 2 } helper format expected = testCase format (formatPath (T.pack format) post @?= Just expected) abortsProcessingOnUnknownFormat = testGroup "returns Nothing upon encountering an unsupported format" [ testCase "unknown format %x" (formatPath "%H%M%S-%x.html" def @?= Nothing), testCase "no format specifier after percent sign" (formatPath "%H%M%" def @?= Nothing) ]
null
https://raw.githubusercontent.com/Minoru/hakyll-convert/9e5006b996586d864c5f3c6f716f3c10aeb51bbb/test/spec/Spec/OutputFormat.hs
haskell
# LANGUAGE OverloadedStrings #
module Spec.OutputFormat (tests) where import Data.Default import Data.Maybe (isJust) import qualified Data.Text as T import Hakyll.Convert.Common (DistilledPost (..)) import Hakyll.Convert.OutputFormat import Spec.SpecHelpers import Test.Tasty (TestTree, testGroup) import Test.Tasty.HUnit import Test.Tasty.QuickCheck tests :: TestTree tests = testGroup "OutputFormat" [ validOutputFormatTests, formatPathTests ] validOutputFormatTests :: TestTree validOutputFormatTests = testGroup "`validOutputFormat`" [ falseOnEmptyFormat, synchronyWithFormatPath ] where falseOnEmptyFormat = testCase "returns False if format string is empty" (validOutputFormat "" @?= False) synchronyWithFormatPath = testProperty "returns False if `formatPath` returns `Nothing`, otherwise True" ( \format -> let format' = T.pack format result = validOutputFormat format' formatPathResult = isJust (formatPath format' def) in not (null format) ==> (result && formatPathResult) || (not result && not formatPathResult) ) formatPathTests :: TestTree formatPathTests = testGroup "`formatPath`" [ noChange, lowercaseO, lowercaseS, lowercaseY, uppercaseY, lowercaseM, lowercaseD, uppercaseH, uppercaseM, uppercaseS, complexExamples, abortsProcessingOnUnknownFormat ] where noChange = let input = "Hello, world!/2020-09-03-test.markdown" in testCase "does not change text that has no percent signs in it" (formatPath input def @?= Just input) lowercaseO = testGroup "%o is replaced by the original filepath" [ testCase "works with HTTP schema" (formatPath "%o" (def {dpUri = ""}) @?= Just "post"), testCase "works with HTTPS schema" (formatPath "%o" (def {dpUri = ""}) @?= Just "post"), testCase "trailing slashes are removed" (formatPath "%o" (def {dpUri = "-09-03-hello////"}) @?= Just "2020-09-03-hello"), testCase "file extension is removed" (formatPath "%o" (def {dpUri = "-post.html"}) @?= Just "first-post"), testCase "all file extensions are removed" (formatPath "%o" (def {dpUri = "-post.aspx.gz"}) @?= Just "first-post") ] lowercaseS = testCase "%s is replaced by the original slug" (formatPath "%s" (def {dpUri = "-is-my-birthday.php"}) @?= Just "today-is-my-birthday") lowercaseY = testCase "%y is replaced by the two-digit publication year" (formatPath "%y" (def {dpDate = fromGregorian 1917 01 01 0 0 0}) @?= Just "17") uppercaseY = testCase "%Y is replaced by the four-digit publication year" (formatPath "%Y" (def {dpDate = fromGregorian 1917 1 1 0 0 0}) @?= Just "1917") lowercaseM = testCase "%m is replaced by the two-digit publication month" (formatPath "%m" (def {dpDate = fromGregorian 2011 3 1 2 3 4}) @?= Just "03") lowercaseD = testCase "%d is replaced by the two-digit publication day" (formatPath "%d" (def {dpDate = fromGregorian 2013 1 31 0 0 0}) @?= Just "31") uppercaseH = testCase "%H is replaced by the two-digit publication hour, 00 to 23" (formatPath "%H" (def {dpDate = fromGregorian 2014 1 1 23 0 0}) @?= Just "23") uppercaseM = testCase "%M is replaced by the two-digit publication minute" (formatPath "%M" (def {dpDate = fromGregorian 2015 1 2 3 59 0}) @?= Just "59") uppercaseS = testCase "%S is replaced by the two-digit publication second" (formatPath "%S" (def {dpDate = fromGregorian 2016 1 2 3 4 0}) @?= Just "00") complexExamples = testGroup "format string can contain multiple formats" [ helper "%H:%M:%S" "18:30:02", helper "old/%Y/%m/%d-%H%M%S-%s.html" "old/2003/12/13-183002-hello-world.html", helper "/posts/%y-%m-%d-%H%M%S-%s/" "/posts/03-12-13-183002-hello-world/", helper "/migrated/%o" "/migrated/~joe/hello-world", helper "99.9%% true/%o" "99.9% true/~joe/hello-world" ] where post = DistilledPost { dpUri = "/~joe/hello-world.php", dpBody = "", dpTitle = Nothing, dpTags = [], dpCategories = [], dpDate = fromGregorian 2003 12 13 18 30 2 } helper format expected = testCase format (formatPath (T.pack format) post @?= Just expected) abortsProcessingOnUnknownFormat = testGroup "returns Nothing upon encountering an unsupported format" [ testCase "unknown format %x" (formatPath "%H%M%S-%x.html" def @?= Nothing), testCase "no format specifier after percent sign" (formatPath "%H%M%" def @?= Nothing) ]
0fcfffd9732086ed213030f9fee4310b3934b936be2f77eeea1c8f63cb904747
aitorres/firelink
LivenessAnalyser.hs
module FireLink.BackEnd.LivenessAnalyser ( def, use, InterferenceGraph (..), livenessAnalyser, generateInterferenceGraph', LineLiveVariables(..), ProgramPoint(..), def1, use1 ) where import Control.Monad.State import Data.List (intercalate) import Data.Maybe (catMaybes, fromJust, isJust) import Debug.Trace (trace) import FireLink.BackEnd.CodeGenerator import FireLink.BackEnd.FlowGraphGenerator import FireLink.BackEnd.Utils import TACType import qualified Data.Graph as Graph import qualified Data.Map.Strict as Map import qualified Data.Set as Set -- | Operations that consists of an assignment of a value to a lvalue -- | TODO: Add pointer operations here when their implementation is ready assignableOperations :: [Operation] assignableOperations = [Assign, Add, Minus, Sub, Mult, Div, Mod, Get, Call, Set, Load] -- | Calculate variable definitions of a basic block, used by data-flow analysis algorithm for liveness analysis -- | Mathematically, def[B] = union of def[n] for n in B.indices def :: BasicBlock -> Set.Set TACSymEntry def = foldr def' Set.empty where def' :: TAC -> Set.Set TACSymEntry -> Set.Set TACSymEntry def' tac s = s `Set.union` def1 tac | Calculates definition for a single three - address code instruction . Basically , the left side of an instruction -- | that assigns a variable. def1 :: TAC -> Set.Set TACSymEntry def1 (ThreeAddressCode op (Just (Id v)) _ _) | op `elem` assignableOperations = Set.singleton v -- | Read will end setting a value to its parameter def1 (ThreeAddressCode Read _ (Just (Id v)) _) = Set.singleton v -- | Casting also assigns a value def1 t@(ThreeAddressCode (Cast _ _) (Just (Id v)) _ _) = Set.singleton v def1 _ = Set.empty -- | Calculate used variables in a basic block prior to any definition of the same variable inside the -- | same block. its mathematical definition is as follows: -- | use[B] = use[1] U (use[2] - def[1]) U (use[3] - def[2] - def[1]) U ... use :: BasicBlock -> Set.Set TACSymEntry use = go [Set.empty] where -- | accumulatedDefs has the current differences from the useB mathematical definition go :: [Set.Set TACSymEntry] -> BasicBlock -> Set.Set TACSymEntry go accumulatedDefs (i : is) = diffOfList (use1 i : accumulatedDefs) `Set.union` go (def1 i : accumulatedDefs) is go _ [] = Set.empty diffOfList :: Ord a => [Set.Set a] -> Set.Set a diffOfList (s : ss) = foldl Set.difference s ss -- | Calculate used variables in a single instruction. That is, their operands -- | TODO: review operations on the `otherwise` branch to see if the used values are actually ok use1 :: TAC -> Set.Set TACSymEntry use1 (ThreeAddressCode op u v w) | op `elem` assignableOperations = Set.fromList $ catTACSymEntries $ catMaybes [v, w] | isCast op = Set.fromList $ catTACSymEntries $ catMaybes [v, w] | otherwise = Set.fromList $ catTACSymEntries $ catMaybes [u, v, w] where isCast :: Operation -> Bool isCast (Cast _ _) = True isCast _ = False -- | Semantic alias for Set.Set TACSymEntry type LiveVariables = Set.Set TACSymEntry -- | (block id, instruction index) newtype ProgramPoint = ProgramPoint (Int, Int) deriving (Eq) instance Show ProgramPoint where show (ProgramPoint p) = show p data LineLiveVariables = LineLiveVariables { llvInstrId :: !ProgramPoint -- ^ Program point of this information , llvInLiveVariables :: !LiveVariables -- ^ live variables upon execution this block , llvOutLiveVariables :: !LiveVariables -- ^ live variables after execution this block } instance Show LineLiveVariables where show LineLiveVariables { llvInstrId = blockId , llvInLiveVariables = blockIn , llvOutLiveVariables = blockOut } = "Block #" ++ show blockId ++ " in = " ++ intercalate ", " (map show (Set.toList blockIn)) ++ " out = " ++ intercalate ", " (map show (Set.toList blockOut)) | Semantic alias for ( LivenessIn , LivenessOut ) type LivenessInOut = (LiveVariables, LiveVariables) type DataFlowInfo = Map.Map Graph.Vertex LivenessInOut -- | Calculates live variables at end livenessAnalyser :: FlowGraph -> [LineLiveVariables] livenessAnalyser fg@(numberedBlocks, flowGraph) = map (\(blockId, (blockIn, blockOut)) -> LineLiveVariables { llvInstrId = blockId , llvInLiveVariables = blockIn , llvOutLiveVariables = blockOut }) $ concatMap (livenessForInstruction . fst) $ Map.toList convergedInOut where initialInOut = Map.fromList $ zip graphNodes $ repeat (Set.empty, Set.empty) convergedInOut = fixedPoint livenessAnalyser' initialInOut -- | Computes for each block, the liveness information of each of their instructions -- | This is done after the dataflow execution is done and it helps in building refined -- | information about live variables in each instruction livenessForInstruction :: Int -> [(ProgramPoint, LivenessInOut)] livenessForInstruction blockId = let -- Let sn be the last instruction of a block, then out[sn] = out[B], by dataflow properties -- We can start computing the in/out for each instruction in a backwards fashion, starting -- by out[B] livenessOut = snd $ convergedInOut Map.! blockId reversedBasicBlock = reverse $ zip [0..] basicBlock basicBlock = snd $ numberedBlocks !! blockId in if blockId == (-1) || blockId == length numberedBlocks then [] else reverse $ map (\(i, inOut) -> (ProgramPoint (blockId, i), inOut)) $ go livenessOut reversedBasicBlock where -- | Computes the current instruction in/out, suppling in[s] as the out of previous instruction -- | due to the not alteration of flow in a basic block go :: LiveVariables -> [(Int, TAC)] -> [(Int, LivenessInOut)] go _ [] = [] go tacOut ((index, tac) : itacs) = let uset = use1 tac deft = def1 tac tacIn = uset `Set.union` (tacOut Set.\\ deft) in (index, (tacIn, tacOut)) : go tacIn itacs f :: (BasicBlock -> Set.Set TACSymEntry) -> Graph.Vertex -> Set.Set TACSymEntry f fun vertex | vertex == (-1) || vertex == length numberedBlocks = Set.empty | otherwise = fun $ snd $ head $ filter ((vertex ==) . fst) numberedBlocks graphNodes :: [Graph.Vertex] graphNodes = Graph.vertices flowGraph useMap :: Map.Map Graph.Vertex LiveVariables useMap = Map.fromList $ zip graphNodes $ map (f use) graphNodes defMap :: Map.Map Graph.Vertex LiveVariables defMap = Map.fromList $ zip graphNodes $ map (f def) graphNodes useB :: Graph.Vertex -> LiveVariables useB = (useMap Map.!) defB :: Graph.Vertex -> LiveVariables defB = (defMap Map.!) successorsMap :: Map.Map Graph.Vertex (Set.Set Graph.Vertex) successorsMap = Map.fromList $ zip graphNodes $ map successors' graphNodes successors :: Graph.Vertex -> Set.Set Graph.Vertex successors = (successorsMap Map.!) successors' :: Graph.Vertex -> Set.Set Graph.Vertex successors' vertex = let graphEdges = Graph.edges flowGraph outgoingEdges = filter ((== vertex) . fst) graphEdges successors' = map snd outgoingEdges in Set.fromList successors' exit :: Graph.Vertex exit = exitVertex fg | First tuple corresponds too livenessIn , second one to livenessOut livenessAnalyser' :: DataFlowInfo -> DataFlowInfo livenessAnalyser' livenessInOutZipped = go livenessInOutZipped graphNodes where go :: DataFlowInfo -> [Graph.Vertex] -> DataFlowInfo go dfi [] = dfi go dfi (blockId : blockIds) = if blockId == exit then go dfi blockIds else let outB = Set.unions $ Set.map (fst . (dfi Map.!)) $ successors blockId inB = useB blockId `Set.union` (outB Set.\\ defB blockId) in go (Map.insert blockId (inB, outB) dfi) blockIds -- | A map that matches variable names to integer representations, -- | and a graph that matches such representations' mutual interference type InterferenceGraph = (Map.Map Int TACSymEntry, Graph.Graph) -- | Given a whole program, generates the interference graph by using liveness information -- | Also returns the liveness analysis result. generateInterferenceGraph' :: FlowGraph -> (InterferenceGraph, [LineLiveVariables]) generateInterferenceGraph' flowGraph'@(numberedBlocks, flowGraph) = ( (Map.fromList $ map (\(i, j) -> (j, i)) $ Map.toList interferenceGraphVertexMap , Graph.buildG (0, Set.size programVariables - 1) interferenceGraphEdges), livenessAnalysis) where livenessAnalysis :: [LineLiveVariables] livenessAnalysis = livenessAnalyser flowGraph' getTac :: ProgramPoint -> TAC getTac (ProgramPoint (i, j)) = let basicBlock = snd $ head $ filter ((== i) . fst) numberedBlocks in basicBlock !! j getEdgesForSingleInstr :: LineLiveVariables -> Set.Set Graph.Edge getEdgesForSingleInstr LineLiveVariables { llvInstrId = instrId , llvOutLiveVariables = out } = let d = defN tac o = outN out tac = getTac instrId res = (o `Set.cartesianProduct` d) `Set.union` (d `Set.cartesianProduct` o) in Set.filter (uncurry (/=)) $ case tac of ThreeAddressCode Assign (Just (Id v)) _ _ -> let varId = vertexMapLookup v sing = Set.singleton varId toDelete = (sing `Set.cartesianProduct` d) `Set.union` (d `Set.cartesianProduct` sing) in res Set.\\ toDelete _ -> res defN :: TAC -> Set.Set Graph.Vertex defN = Set.map vertexMapLookup . def1 outN :: LiveVariables -> Set.Set Graph.Vertex outN = Set.map vertexMapLookup programVariables :: Set.Set TACSymEntry programVariables = getProgramVariables numberedBlocks interferenceGraphVertexMap :: Map.Map TACSymEntry Graph.Vertex interferenceGraphVertexMap = Map.fromList $ zip (Set.toList programVariables) [0..] vertexMapLookup :: TACSymEntry -> Int vertexMapLookup = (interferenceGraphVertexMap Map.!) interferenceGraphEdges :: [Graph.Edge] interferenceGraphEdges = Set.toList $ Set.unions $ map getEdgesForSingleInstr livenessAnalysis
null
https://raw.githubusercontent.com/aitorres/firelink/075d7aad1c053a54e39a27d8db7c3c719d243225/src/FireLink/BackEnd/LivenessAnalyser.hs
haskell
| Operations that consists of an assignment of a value to a lvalue | TODO: Add pointer operations here when their implementation is ready | Calculate variable definitions of a basic block, used by data-flow analysis algorithm for liveness analysis | Mathematically, def[B] = union of def[n] for n in B.indices | that assigns a variable. | Read will end setting a value to its parameter | Casting also assigns a value | Calculate used variables in a basic block prior to any definition of the same variable inside the | same block. its mathematical definition is as follows: | use[B] = use[1] U (use[2] - def[1]) U (use[3] - def[2] - def[1]) U ... | accumulatedDefs has the current differences from the useB mathematical definition | Calculate used variables in a single instruction. That is, their operands | TODO: review operations on the `otherwise` branch to see if the used values are actually ok | Semantic alias for Set.Set TACSymEntry | (block id, instruction index) ^ Program point of this information ^ live variables upon execution this block ^ live variables after execution this block | Calculates live variables at end | Computes for each block, the liveness information of each of their instructions | This is done after the dataflow execution is done and it helps in building refined | information about live variables in each instruction Let sn be the last instruction of a block, then out[sn] = out[B], by dataflow properties We can start computing the in/out for each instruction in a backwards fashion, starting by out[B] | Computes the current instruction in/out, suppling in[s] as the out of previous instruction | due to the not alteration of flow in a basic block | A map that matches variable names to integer representations, | and a graph that matches such representations' mutual interference | Given a whole program, generates the interference graph by using liveness information | Also returns the liveness analysis result.
module FireLink.BackEnd.LivenessAnalyser ( def, use, InterferenceGraph (..), livenessAnalyser, generateInterferenceGraph', LineLiveVariables(..), ProgramPoint(..), def1, use1 ) where import Control.Monad.State import Data.List (intercalate) import Data.Maybe (catMaybes, fromJust, isJust) import Debug.Trace (trace) import FireLink.BackEnd.CodeGenerator import FireLink.BackEnd.FlowGraphGenerator import FireLink.BackEnd.Utils import TACType import qualified Data.Graph as Graph import qualified Data.Map.Strict as Map import qualified Data.Set as Set assignableOperations :: [Operation] assignableOperations = [Assign, Add, Minus, Sub, Mult, Div, Mod, Get, Call, Set, Load] def :: BasicBlock -> Set.Set TACSymEntry def = foldr def' Set.empty where def' :: TAC -> Set.Set TACSymEntry -> Set.Set TACSymEntry def' tac s = s `Set.union` def1 tac | Calculates definition for a single three - address code instruction . Basically , the left side of an instruction def1 :: TAC -> Set.Set TACSymEntry def1 (ThreeAddressCode op (Just (Id v)) _ _) | op `elem` assignableOperations = Set.singleton v def1 (ThreeAddressCode Read _ (Just (Id v)) _) = Set.singleton v def1 t@(ThreeAddressCode (Cast _ _) (Just (Id v)) _ _) = Set.singleton v def1 _ = Set.empty use :: BasicBlock -> Set.Set TACSymEntry use = go [Set.empty] where go :: [Set.Set TACSymEntry] -> BasicBlock -> Set.Set TACSymEntry go accumulatedDefs (i : is) = diffOfList (use1 i : accumulatedDefs) `Set.union` go (def1 i : accumulatedDefs) is go _ [] = Set.empty diffOfList :: Ord a => [Set.Set a] -> Set.Set a diffOfList (s : ss) = foldl Set.difference s ss use1 :: TAC -> Set.Set TACSymEntry use1 (ThreeAddressCode op u v w) | op `elem` assignableOperations = Set.fromList $ catTACSymEntries $ catMaybes [v, w] | isCast op = Set.fromList $ catTACSymEntries $ catMaybes [v, w] | otherwise = Set.fromList $ catTACSymEntries $ catMaybes [u, v, w] where isCast :: Operation -> Bool isCast (Cast _ _) = True isCast _ = False type LiveVariables = Set.Set TACSymEntry newtype ProgramPoint = ProgramPoint (Int, Int) deriving (Eq) instance Show ProgramPoint where show (ProgramPoint p) = show p data LineLiveVariables = LineLiveVariables } instance Show LineLiveVariables where show LineLiveVariables { llvInstrId = blockId , llvInLiveVariables = blockIn , llvOutLiveVariables = blockOut } = "Block #" ++ show blockId ++ " in = " ++ intercalate ", " (map show (Set.toList blockIn)) ++ " out = " ++ intercalate ", " (map show (Set.toList blockOut)) | Semantic alias for ( LivenessIn , LivenessOut ) type LivenessInOut = (LiveVariables, LiveVariables) type DataFlowInfo = Map.Map Graph.Vertex LivenessInOut livenessAnalyser :: FlowGraph -> [LineLiveVariables] livenessAnalyser fg@(numberedBlocks, flowGraph) = map (\(blockId, (blockIn, blockOut)) -> LineLiveVariables { llvInstrId = blockId , llvInLiveVariables = blockIn , llvOutLiveVariables = blockOut }) $ concatMap (livenessForInstruction . fst) $ Map.toList convergedInOut where initialInOut = Map.fromList $ zip graphNodes $ repeat (Set.empty, Set.empty) convergedInOut = fixedPoint livenessAnalyser' initialInOut livenessForInstruction :: Int -> [(ProgramPoint, LivenessInOut)] livenessForInstruction blockId = let livenessOut = snd $ convergedInOut Map.! blockId reversedBasicBlock = reverse $ zip [0..] basicBlock basicBlock = snd $ numberedBlocks !! blockId in if blockId == (-1) || blockId == length numberedBlocks then [] else reverse $ map (\(i, inOut) -> (ProgramPoint (blockId, i), inOut)) $ go livenessOut reversedBasicBlock where go :: LiveVariables -> [(Int, TAC)] -> [(Int, LivenessInOut)] go _ [] = [] go tacOut ((index, tac) : itacs) = let uset = use1 tac deft = def1 tac tacIn = uset `Set.union` (tacOut Set.\\ deft) in (index, (tacIn, tacOut)) : go tacIn itacs f :: (BasicBlock -> Set.Set TACSymEntry) -> Graph.Vertex -> Set.Set TACSymEntry f fun vertex | vertex == (-1) || vertex == length numberedBlocks = Set.empty | otherwise = fun $ snd $ head $ filter ((vertex ==) . fst) numberedBlocks graphNodes :: [Graph.Vertex] graphNodes = Graph.vertices flowGraph useMap :: Map.Map Graph.Vertex LiveVariables useMap = Map.fromList $ zip graphNodes $ map (f use) graphNodes defMap :: Map.Map Graph.Vertex LiveVariables defMap = Map.fromList $ zip graphNodes $ map (f def) graphNodes useB :: Graph.Vertex -> LiveVariables useB = (useMap Map.!) defB :: Graph.Vertex -> LiveVariables defB = (defMap Map.!) successorsMap :: Map.Map Graph.Vertex (Set.Set Graph.Vertex) successorsMap = Map.fromList $ zip graphNodes $ map successors' graphNodes successors :: Graph.Vertex -> Set.Set Graph.Vertex successors = (successorsMap Map.!) successors' :: Graph.Vertex -> Set.Set Graph.Vertex successors' vertex = let graphEdges = Graph.edges flowGraph outgoingEdges = filter ((== vertex) . fst) graphEdges successors' = map snd outgoingEdges in Set.fromList successors' exit :: Graph.Vertex exit = exitVertex fg | First tuple corresponds too livenessIn , second one to livenessOut livenessAnalyser' :: DataFlowInfo -> DataFlowInfo livenessAnalyser' livenessInOutZipped = go livenessInOutZipped graphNodes where go :: DataFlowInfo -> [Graph.Vertex] -> DataFlowInfo go dfi [] = dfi go dfi (blockId : blockIds) = if blockId == exit then go dfi blockIds else let outB = Set.unions $ Set.map (fst . (dfi Map.!)) $ successors blockId inB = useB blockId `Set.union` (outB Set.\\ defB blockId) in go (Map.insert blockId (inB, outB) dfi) blockIds type InterferenceGraph = (Map.Map Int TACSymEntry, Graph.Graph) generateInterferenceGraph' :: FlowGraph -> (InterferenceGraph, [LineLiveVariables]) generateInterferenceGraph' flowGraph'@(numberedBlocks, flowGraph) = ( (Map.fromList $ map (\(i, j) -> (j, i)) $ Map.toList interferenceGraphVertexMap , Graph.buildG (0, Set.size programVariables - 1) interferenceGraphEdges), livenessAnalysis) where livenessAnalysis :: [LineLiveVariables] livenessAnalysis = livenessAnalyser flowGraph' getTac :: ProgramPoint -> TAC getTac (ProgramPoint (i, j)) = let basicBlock = snd $ head $ filter ((== i) . fst) numberedBlocks in basicBlock !! j getEdgesForSingleInstr :: LineLiveVariables -> Set.Set Graph.Edge getEdgesForSingleInstr LineLiveVariables { llvInstrId = instrId , llvOutLiveVariables = out } = let d = defN tac o = outN out tac = getTac instrId res = (o `Set.cartesianProduct` d) `Set.union` (d `Set.cartesianProduct` o) in Set.filter (uncurry (/=)) $ case tac of ThreeAddressCode Assign (Just (Id v)) _ _ -> let varId = vertexMapLookup v sing = Set.singleton varId toDelete = (sing `Set.cartesianProduct` d) `Set.union` (d `Set.cartesianProduct` sing) in res Set.\\ toDelete _ -> res defN :: TAC -> Set.Set Graph.Vertex defN = Set.map vertexMapLookup . def1 outN :: LiveVariables -> Set.Set Graph.Vertex outN = Set.map vertexMapLookup programVariables :: Set.Set TACSymEntry programVariables = getProgramVariables numberedBlocks interferenceGraphVertexMap :: Map.Map TACSymEntry Graph.Vertex interferenceGraphVertexMap = Map.fromList $ zip (Set.toList programVariables) [0..] vertexMapLookup :: TACSymEntry -> Int vertexMapLookup = (interferenceGraphVertexMap Map.!) interferenceGraphEdges :: [Graph.Edge] interferenceGraphEdges = Set.toList $ Set.unions $ map getEdgesForSingleInstr livenessAnalysis
bf48ca0d5b76f59a96c0b81f63917a57df438a0b9bac50c28b9f2d00ed914ab6
JeanHuguesdeRaigniac/effects-landscape
AppPolysemy.hs
module AppPolysemy where import qualified Data.Map as Map import Internal.Polysemy import Types type Eval a = Sem '[Error String, Reader Env, State Steps, Writer Variables, Trace, Embed IO] a runEval :: Env -> Steps -> Eval Value -> IO (Either String Value, Steps, Variables) runEval env steps ev = (\(variables, (steps', result)) -> (result, steps', variables)) <$> runM (traceToStdout (runWriter (runState steps (runReader env (runError ev))))) tick :: Member (State Steps) r => Sem r () tick = do st <- get put (st + 1) eval :: Members '[ State Steps, Writer Variables, Reader Env, Error String, Trace ] r => Exp -> Sem r Value eval (Lit i) = do tick embeddedLog ("Lit: " <> show i) return $ IntVal i eval (Var n) = do tick tell [n] env <- ask case Map.lookup n env of Nothing -> throwError ("unbound variable: " ++ n) Just val -> return val eval (Plus e1 e2) = do tick e1' <- eval e1 e2' <- eval e2 case (e1', e2') of (IntVal i1, IntVal i2) -> return $ IntVal (i1 + i2) _anyOtherCombination -> throwError "type error in addition" eval (Abs n e) = do tick env <- ask return $ FunVal env n e eval (App e1 e2) = do tick val1 <- eval e1 val2 <- eval e2 case val1 of FunVal env' n body -> do step <- get embeddedLog ("Step: " <> show step) embeddedLog ("Current env: " <> show env') embeddedLog ("Modified env: " <> show (Map.insert n val2 env')) local (const (Map.insert n val2 env')) (eval body) IntVal _ -> throwError "type error in application"
null
https://raw.githubusercontent.com/JeanHuguesdeRaigniac/effects-landscape/e44aea11053ac4db85b862fab027d3777d35e232/app/AppPolysemy.hs
haskell
module AppPolysemy where import qualified Data.Map as Map import Internal.Polysemy import Types type Eval a = Sem '[Error String, Reader Env, State Steps, Writer Variables, Trace, Embed IO] a runEval :: Env -> Steps -> Eval Value -> IO (Either String Value, Steps, Variables) runEval env steps ev = (\(variables, (steps', result)) -> (result, steps', variables)) <$> runM (traceToStdout (runWriter (runState steps (runReader env (runError ev))))) tick :: Member (State Steps) r => Sem r () tick = do st <- get put (st + 1) eval :: Members '[ State Steps, Writer Variables, Reader Env, Error String, Trace ] r => Exp -> Sem r Value eval (Lit i) = do tick embeddedLog ("Lit: " <> show i) return $ IntVal i eval (Var n) = do tick tell [n] env <- ask case Map.lookup n env of Nothing -> throwError ("unbound variable: " ++ n) Just val -> return val eval (Plus e1 e2) = do tick e1' <- eval e1 e2' <- eval e2 case (e1', e2') of (IntVal i1, IntVal i2) -> return $ IntVal (i1 + i2) _anyOtherCombination -> throwError "type error in addition" eval (Abs n e) = do tick env <- ask return $ FunVal env n e eval (App e1 e2) = do tick val1 <- eval e1 val2 <- eval e2 case val1 of FunVal env' n body -> do step <- get embeddedLog ("Step: " <> show step) embeddedLog ("Current env: " <> show env') embeddedLog ("Modified env: " <> show (Map.insert n val2 env')) local (const (Map.insert n val2 env')) (eval body) IntVal _ -> throwError "type error in application"
5357d9b3a06e1a25a4bf64477fb761c071dbb243e1f3233d75643cc6d9cc543e
DSiSc/why3
eliminate_definition.ml
(********************************************************************) (* *) The Why3 Verification Platform / The Why3 Development Team Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University (* *) (* This software is distributed under the terms of the GNU Lesser *) General Public License version 2.1 , with the special exception (* on linking described in file LICENSE. *) (* *) (********************************************************************) open Ident open Ty open Term open Decl (** Discard definitions of built-in symbols *) let add_id undef_ls rem_ls (ls,ld) (abst,defn) = if Sls.mem ls rem_ls then abst,defn else if Sls.mem ls undef_ls then create_param_decl ls :: abst, defn else abst, (ls,ld) :: defn (** TODO: go further? such as constructor that are removed? *) let elim_abstract undef_ls rem_pr rem_ls rem_ts d = match d.d_node with | Dlogic l -> let ld, id = List.fold_right (add_id undef_ls rem_ls) l ([],[]) in ld @ (if id = [] then [] else [create_logic_decl id]) | Dind (s, l) -> let ld, id = List.fold_right (add_id undef_ls rem_ls) l ([],[]) in ld @ (if id = [] then [] else [create_ind_decl s id]) | Dprop (Paxiom,pr,_) when Spr.mem pr rem_pr -> [] | Dtype ts when Sts.mem ts rem_ts -> [] | Ddata l -> let test_id (ts,_) = not (Sts.mem ts rem_ts) in let l = List.filter test_id l in (if l = [] then [] else [create_data_decl l]) | _ -> [d] let eliminate_builtin = Trans.on_tagged_ls Printer.meta_syntax_logic (fun undef_ls -> Trans.on_tagged_pr Printer.meta_remove_prop (fun rem_pr -> Trans.on_tagged_ls Printer.meta_remove_logic (fun rem_ls -> Trans.on_tagged_ts Printer.meta_remove_type (fun rem_ts -> Trans.decl (elim_abstract undef_ls rem_pr rem_ls rem_ts) None)))) let () = Trans.register_transform "eliminate_builtin" eliminate_builtin ~desc:"Eliminate@ propositions@ and@ definitions@ of@ symbols@ \ that@ are@ builtin@ in@ the@ prover@ (see@ 'syntax'@ and@ \ 'remove'@ clauses@ in@ the@ prover's@ driver)." * compute the meta_remove _ * given two task one included in the other let compute_diff t1 t2 = let km = Mid.set_diff (Task.task_known t1) (Task.task_known t2) in let hdone = Hdecl.create 10 in let remove_ts acc ts = (Printer.meta_remove_type, [Theory.MAts ts])::acc in let remove_ls acc ls = (Printer.meta_remove_logic, [Theory.MAls ls])::acc in let remove_pr acc pr = (Printer.meta_remove_prop, [Theory.MApr pr])::acc in Mid.fold_left (fun acc _ decl -> if Hdecl.mem hdone decl then acc else begin Hdecl.replace hdone decl (); match decl.d_node with | Dtype ts -> remove_ts acc ts | Ddata l -> List.fold_left (fun acc (ts,_) -> remove_ts acc ts) acc l | Dparam ls -> remove_ls acc ls | Dlogic l -> List.fold_left (fun acc (ls,_) -> remove_ls acc ls) acc l | Dind (_,l) -> List.fold_left (fun acc (ls,_) -> remove_ls acc ls) acc l | Dprop (_,pr,_) -> remove_pr acc pr end) [] km let compute_diff = Trans.store (fun t1 -> Trans.store (fun t2 -> compute_diff t1 t2)) (** Eliminate definitions of functions and predicates *) let rec t_insert hd t = match t.t_node with | Tif (f1,t2,t3) -> t_if f1 (t_insert hd t2) (t_insert hd t3) | Tlet (t1,bt) -> let v,t2 = t_open_bound bt in t_let_close v t1 (t_insert hd t2) | Tcase (tl,bl) -> let br b = let pl,t1 = t_open_branch b in t_close_branch pl (t_insert hd t1) in t_case tl (List.map br bl) | _ -> TermTF.t_selecti t_equ_simp t_iff_simp hd t let add_ld which meta_rewrite_def (ls,ld) (abst,defn,axl,metas) = if which ls then let vl,e = open_ls_defn ld in let nm = ls.ls_name.id_string ^ "_def" in let pr = create_prsymbol (id_derive nm ls.ls_name) in let hd = t_app ls (List.map t_var vl) e.t_ty in let ax = t_forall_close vl [] (t_insert hd e) in let ax = create_prop_decl Paxiom pr ax in let ld = create_param_decl ls in let metas = if Sls.mem ls meta_rewrite_def then Theory.create_meta Compute.meta_rewrite [Theory.MApr pr] :: metas else metas in ld :: abst, defn, ax :: axl, metas else abst, (ls,ld) :: defn, axl, metas let elim_decl which meta_rewrite_def l = let abst,defn,axl,metas = List.fold_right (add_ld which meta_rewrite_def) l ([],[],[],[]) in let defn = if defn = [] then [] else [create_logic_decl defn] in List.rev_append (List.rev_map Theory.create_decl (abst @ defn @ axl)) metas let elim which meta_rewrite_def d = match d.d_node with | Dlogic l -> elim_decl which meta_rewrite_def l | _ -> [Theory.create_decl d] let elim_recursion d = match d.d_node with | Dlogic ([s,_] as l) when Sid.mem s.ls_name d.d_syms -> elim_decl Util.ttrue Sls.empty l | Dlogic l when List.length l > 1 -> elim_decl Util.ttrue Sls.empty l | _ -> [Theory.create_decl d] let is_struct dl = (* FIXME? Shouldn't 0 be allowed too? *) List.for_all (fun (_,ld) -> List.length (ls_defn_decrease ld) = 1) dl (* FIXME? We can have non-recursive functions in a group *) let elim_non_struct_recursion d = match d.d_node with | Dlogic ((s,_) :: _ as dl) when Sid.mem s.ls_name d.d_syms && not (is_struct dl) -> elim_decl Util.ttrue Sls.empty dl | _ -> [Theory.create_decl d] let elim_mutual d = match d.d_node with | Dlogic l when List.length l > 1 -> elim_decl Util.ttrue Sls.empty l | _ -> [Theory.create_decl d] let eliminate_definition_gen which = Trans.on_tagged_ls Compute.meta_rewrite_def (fun rew -> Trans.tdecl (elim which rew) None) let eliminate_definition_func = eliminate_definition_gen (fun ls -> ls.ls_value <> None) let eliminate_definition_pred = eliminate_definition_gen (fun ls -> ls.ls_value = None) let eliminate_definition = eliminate_definition_gen Util.ttrue let eliminate_recursion = Trans.tdecl elim_recursion None let eliminate_non_struct_recursion = Trans.tdecl elim_non_struct_recursion None let eliminate_mutual_recursion = Trans.tdecl elim_mutual None let () = Trans.register_transform "eliminate_definition_func" eliminate_definition_func ~desc:"Transform@ function@ definitions@ into@ axioms."; Trans.register_transform "eliminate_definition_pred" eliminate_definition_pred ~desc:"Transform@ predicate@ definitions@ into@ axioms."; Trans.register_transform "eliminate_definition" eliminate_definition ~desc:"Transform@ function@ and@ predicate@ definitions@ into@ axioms."; Trans.register_transform "eliminate_recursion" eliminate_recursion ~desc:"Same@ as@ eliminate_definition,@ but@ only@ for@ recursive@ \ definitions."; Trans.register_transform "eliminate_non_struct_recursion" eliminate_non_struct_recursion ~desc:"Same@ as@ eliminate_recursion,@ but@ only@ for@ non-structural@ \ recursive@ definitions."; Trans.register_transform "eliminate_mutual_recursion" eliminate_mutual_recursion ~desc:"Same@ as@ eliminate_recursion,@ but@ only@ for@ mutually@ \ recursive@ definitions." (** conditional transformations, only applied when polymorphic types occur *) let eliminate_definition_if_poly = Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only (function | [] -> eliminate_definition | _ -> eliminate_recursion) let () = Trans.register_transform "eliminate_definition_if_poly" eliminate_definition_if_poly ~desc:"Same@ as@ eliminate_definition@ but@ only@ if@ polymorphism@ appear." * * * * { 2 Bisection } * * * * * * * open Task open Theory type rem = { rem_pr : Spr.t; rem_ls : Sls.t; rem_ts : Sts.t } type bisect_step = | BSdone of rem | BSstep of rem * (bool -> bisect_step) let _print_rem fmt rem = Format.fprintf fmt "@[rem_pr:@[%a@]@\nrem_ls:@[%a@]@\nrem_ts:@[%a@]@\n" (Pp.print_iter1 Spr.iter Pp.comma Pretty.print_pr) rem.rem_pr (Pp.print_iter1 Sls.iter Pp.comma Pretty.print_ls) rem.rem_ls (Pp.print_iter1 Sts.iter Pp.comma Pretty.print_ts) rem.rem_ts let rec elim_task task rem = match task with | Some ( { task_decl = { td_node = Decl decl } } as task ) - > let task = elim_task task.task_prev rem in let l = elim_abstract Sls.empty rem.rem_pr rem.rem_ls in List.fold_left l | Some task - > Task.add_tdecl ( elim_task task.task_prev rem ) task.task_decl | None - > None let rec elim_task task rem = match task with | Some ({task_decl = {td_node = Decl decl}} as task) -> let task = elim_task task.task_prev rem in let l = elim_abstract Sls.empty rem.rem_pr rem.rem_ls rem.rem_ts decl in List.fold_left Task.add_decl task l | Some task -> Task.add_tdecl (elim_task task.task_prev rem) task.task_decl | None -> None *) let add_rem rem decl = let remove_ts rem ts = { rem with rem_ts = Sts.add ts rem.rem_ts} in let remove_ls rem ls = { rem with rem_ls = Sls.add ls rem.rem_ls} in let remove_pr rem pr = { rem with rem_pr = Spr.add pr rem.rem_pr} in match decl.d_node with | Dtype ts -> remove_ts rem ts | Ddata l -> List.fold_left (fun rem (ts,_) -> remove_ts rem ts) rem l | Dparam ls -> remove_ls rem ls | Dlogic l -> List.fold_left (fun rem (ls,_) -> remove_ls rem ls) rem l | Dind (_,l) -> List.fold_left (fun rem (ls,_) -> remove_ls rem ls) rem l | Dprop (_,pr,_) -> remove_pr rem pr let _union_rem rem1 rem2 = { rem_ts = Sts.union rem1.rem_ts rem2.rem_ts; rem_ls = Sls.union rem1.rem_ls rem2.rem_ls; rem_pr = Spr.union rem1.rem_pr rem2.rem_pr; } let fold_sub f acc a i1 i2 = let acc = ref acc in for i=i1 to i2-1 do acc := f !acc a.(i) done; !acc let rec bisect_aux task a i1 i2 rem cont (* lt i lk *) = (* Format.eprintf "i1: %i, i2: %i@\nrem:%a@." i1 i2 *) (* print_rem rem; *) let call rem valid invalid = try BSstep (rem, fun b -> if b then valid () else invalid ()) with UnknownIdent _ -> invalid () in if i2 - i1 < 2 then let rem1 = add_rem rem a.(i1) in call rem1 (fun () -> assert (i2 - i1 = 1); cont rem1) (fun () -> cont rem) else let m = (i1+i2)/2 in let rem1 = fold_sub add_rem rem a m i2 in call rem1 (fun () -> bisect_aux task a i1 m rem1 cont) (fun () -> bisect_aux task a m i2 rem (fun rem1 -> (* rem c rem1 c \old(rem1) *) let rem2 = fold_sub add_rem rem1 a i1 m in call rem2 (fun () -> cont rem2) (fun () -> bisect_aux task a i1 m rem1 cont))) let bisect_step task0 = let task= match task0 with | Some {task_decl = {td_node = Decl {d_node = Dprop (Pgoal,_,_)}}; task_prev = task} -> task | _ -> raise GoalNotFound in let rec length acc = function | Some {task_decl = {td_node = Decl _}; task_prev = t} -> length (acc + 1) t | Some {task_prev = t} -> length acc t | None -> acc in let n = length 0 task in let a = Array.make n (Obj.magic 0) in let rec init acc = function | Some {task_decl = {td_node = Decl d}; task_prev = t} -> a.(acc) <- d; init (acc - 1) t | Some { task_prev = t} -> init acc t | None -> assert (acc = -1) in init (n-1) task; let empty_rem = {rem_ts = Sts.empty; rem_ls = Sls.empty; rem_pr = Spr.empty} in bisect_aux task0 a 0 n empty_rem (fun rem -> BSdone rem) let bisect f task = let rec run = function | BSdone r - > r | BSstep ( rem , c ) - > let t = rem in run ( c ( f t ) ) in run ( bisect_step task ) let bisect f task = let rec run = function | BSdone r -> r | BSstep (rem,c) -> let t = elim_task task rem in run (c (f t)) in run (bisect_step task) *) (** catch exception for debug *) let bisect_step = let res = try bisect_step task0 with exn - > Format.eprintf " bisect_step fail : % a@. " ; (* raise exn in *) (* match res with *) (* | BSdone _ as d -> d *) (* | BSstep (t,f) -> BSstep (t,fun b -> try f b with exn -> *) Format.eprintf " bisect_step fail : % a@. " ; (* raise exn) *)
null
https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/transform/eliminate_definition.ml
ocaml
****************************************************************** This software is distributed under the terms of the GNU Lesser on linking described in file LICENSE. ****************************************************************** * Discard definitions of built-in symbols * TODO: go further? such as constructor that are removed? * Eliminate definitions of functions and predicates FIXME? Shouldn't 0 be allowed too? FIXME? We can have non-recursive functions in a group * conditional transformations, only applied when polymorphic types occur lt i lk Format.eprintf "i1: %i, i2: %i@\nrem:%a@." i1 i2 print_rem rem; rem c rem1 c \old(rem1) * catch exception for debug raise exn in match res with | BSdone _ as d -> d | BSstep (t,f) -> BSstep (t,fun b -> try f b with exn -> raise exn)
The Why3 Verification Platform / The Why3 Development Team Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University General Public License version 2.1 , with the special exception open Ident open Ty open Term open Decl let add_id undef_ls rem_ls (ls,ld) (abst,defn) = if Sls.mem ls rem_ls then abst,defn else if Sls.mem ls undef_ls then create_param_decl ls :: abst, defn else abst, (ls,ld) :: defn let elim_abstract undef_ls rem_pr rem_ls rem_ts d = match d.d_node with | Dlogic l -> let ld, id = List.fold_right (add_id undef_ls rem_ls) l ([],[]) in ld @ (if id = [] then [] else [create_logic_decl id]) | Dind (s, l) -> let ld, id = List.fold_right (add_id undef_ls rem_ls) l ([],[]) in ld @ (if id = [] then [] else [create_ind_decl s id]) | Dprop (Paxiom,pr,_) when Spr.mem pr rem_pr -> [] | Dtype ts when Sts.mem ts rem_ts -> [] | Ddata l -> let test_id (ts,_) = not (Sts.mem ts rem_ts) in let l = List.filter test_id l in (if l = [] then [] else [create_data_decl l]) | _ -> [d] let eliminate_builtin = Trans.on_tagged_ls Printer.meta_syntax_logic (fun undef_ls -> Trans.on_tagged_pr Printer.meta_remove_prop (fun rem_pr -> Trans.on_tagged_ls Printer.meta_remove_logic (fun rem_ls -> Trans.on_tagged_ts Printer.meta_remove_type (fun rem_ts -> Trans.decl (elim_abstract undef_ls rem_pr rem_ls rem_ts) None)))) let () = Trans.register_transform "eliminate_builtin" eliminate_builtin ~desc:"Eliminate@ propositions@ and@ definitions@ of@ symbols@ \ that@ are@ builtin@ in@ the@ prover@ (see@ 'syntax'@ and@ \ 'remove'@ clauses@ in@ the@ prover's@ driver)." * compute the meta_remove _ * given two task one included in the other let compute_diff t1 t2 = let km = Mid.set_diff (Task.task_known t1) (Task.task_known t2) in let hdone = Hdecl.create 10 in let remove_ts acc ts = (Printer.meta_remove_type, [Theory.MAts ts])::acc in let remove_ls acc ls = (Printer.meta_remove_logic, [Theory.MAls ls])::acc in let remove_pr acc pr = (Printer.meta_remove_prop, [Theory.MApr pr])::acc in Mid.fold_left (fun acc _ decl -> if Hdecl.mem hdone decl then acc else begin Hdecl.replace hdone decl (); match decl.d_node with | Dtype ts -> remove_ts acc ts | Ddata l -> List.fold_left (fun acc (ts,_) -> remove_ts acc ts) acc l | Dparam ls -> remove_ls acc ls | Dlogic l -> List.fold_left (fun acc (ls,_) -> remove_ls acc ls) acc l | Dind (_,l) -> List.fold_left (fun acc (ls,_) -> remove_ls acc ls) acc l | Dprop (_,pr,_) -> remove_pr acc pr end) [] km let compute_diff = Trans.store (fun t1 -> Trans.store (fun t2 -> compute_diff t1 t2)) let rec t_insert hd t = match t.t_node with | Tif (f1,t2,t3) -> t_if f1 (t_insert hd t2) (t_insert hd t3) | Tlet (t1,bt) -> let v,t2 = t_open_bound bt in t_let_close v t1 (t_insert hd t2) | Tcase (tl,bl) -> let br b = let pl,t1 = t_open_branch b in t_close_branch pl (t_insert hd t1) in t_case tl (List.map br bl) | _ -> TermTF.t_selecti t_equ_simp t_iff_simp hd t let add_ld which meta_rewrite_def (ls,ld) (abst,defn,axl,metas) = if which ls then let vl,e = open_ls_defn ld in let nm = ls.ls_name.id_string ^ "_def" in let pr = create_prsymbol (id_derive nm ls.ls_name) in let hd = t_app ls (List.map t_var vl) e.t_ty in let ax = t_forall_close vl [] (t_insert hd e) in let ax = create_prop_decl Paxiom pr ax in let ld = create_param_decl ls in let metas = if Sls.mem ls meta_rewrite_def then Theory.create_meta Compute.meta_rewrite [Theory.MApr pr] :: metas else metas in ld :: abst, defn, ax :: axl, metas else abst, (ls,ld) :: defn, axl, metas let elim_decl which meta_rewrite_def l = let abst,defn,axl,metas = List.fold_right (add_ld which meta_rewrite_def) l ([],[],[],[]) in let defn = if defn = [] then [] else [create_logic_decl defn] in List.rev_append (List.rev_map Theory.create_decl (abst @ defn @ axl)) metas let elim which meta_rewrite_def d = match d.d_node with | Dlogic l -> elim_decl which meta_rewrite_def l | _ -> [Theory.create_decl d] let elim_recursion d = match d.d_node with | Dlogic ([s,_] as l) when Sid.mem s.ls_name d.d_syms -> elim_decl Util.ttrue Sls.empty l | Dlogic l when List.length l > 1 -> elim_decl Util.ttrue Sls.empty l | _ -> [Theory.create_decl d] List.for_all (fun (_,ld) -> List.length (ls_defn_decrease ld) = 1) dl let elim_non_struct_recursion d = match d.d_node with | Dlogic ((s,_) :: _ as dl) when Sid.mem s.ls_name d.d_syms && not (is_struct dl) -> elim_decl Util.ttrue Sls.empty dl | _ -> [Theory.create_decl d] let elim_mutual d = match d.d_node with | Dlogic l when List.length l > 1 -> elim_decl Util.ttrue Sls.empty l | _ -> [Theory.create_decl d] let eliminate_definition_gen which = Trans.on_tagged_ls Compute.meta_rewrite_def (fun rew -> Trans.tdecl (elim which rew) None) let eliminate_definition_func = eliminate_definition_gen (fun ls -> ls.ls_value <> None) let eliminate_definition_pred = eliminate_definition_gen (fun ls -> ls.ls_value = None) let eliminate_definition = eliminate_definition_gen Util.ttrue let eliminate_recursion = Trans.tdecl elim_recursion None let eliminate_non_struct_recursion = Trans.tdecl elim_non_struct_recursion None let eliminate_mutual_recursion = Trans.tdecl elim_mutual None let () = Trans.register_transform "eliminate_definition_func" eliminate_definition_func ~desc:"Transform@ function@ definitions@ into@ axioms."; Trans.register_transform "eliminate_definition_pred" eliminate_definition_pred ~desc:"Transform@ predicate@ definitions@ into@ axioms."; Trans.register_transform "eliminate_definition" eliminate_definition ~desc:"Transform@ function@ and@ predicate@ definitions@ into@ axioms."; Trans.register_transform "eliminate_recursion" eliminate_recursion ~desc:"Same@ as@ eliminate_definition,@ but@ only@ for@ recursive@ \ definitions."; Trans.register_transform "eliminate_non_struct_recursion" eliminate_non_struct_recursion ~desc:"Same@ as@ eliminate_recursion,@ but@ only@ for@ non-structural@ \ recursive@ definitions."; Trans.register_transform "eliminate_mutual_recursion" eliminate_mutual_recursion ~desc:"Same@ as@ eliminate_recursion,@ but@ only@ for@ mutually@ \ recursive@ definitions." let eliminate_definition_if_poly = Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only (function | [] -> eliminate_definition | _ -> eliminate_recursion) let () = Trans.register_transform "eliminate_definition_if_poly" eliminate_definition_if_poly ~desc:"Same@ as@ eliminate_definition@ but@ only@ if@ polymorphism@ appear." * * * * { 2 Bisection } * * * * * * * open Task open Theory type rem = { rem_pr : Spr.t; rem_ls : Sls.t; rem_ts : Sts.t } type bisect_step = | BSdone of rem | BSstep of rem * (bool -> bisect_step) let _print_rem fmt rem = Format.fprintf fmt "@[rem_pr:@[%a@]@\nrem_ls:@[%a@]@\nrem_ts:@[%a@]@\n" (Pp.print_iter1 Spr.iter Pp.comma Pretty.print_pr) rem.rem_pr (Pp.print_iter1 Sls.iter Pp.comma Pretty.print_ls) rem.rem_ls (Pp.print_iter1 Sts.iter Pp.comma Pretty.print_ts) rem.rem_ts let rec elim_task task rem = match task with | Some ( { task_decl = { td_node = Decl decl } } as task ) - > let task = elim_task task.task_prev rem in let l = elim_abstract Sls.empty rem.rem_pr rem.rem_ls in List.fold_left l | Some task - > Task.add_tdecl ( elim_task task.task_prev rem ) task.task_decl | None - > None let rec elim_task task rem = match task with | Some ({task_decl = {td_node = Decl decl}} as task) -> let task = elim_task task.task_prev rem in let l = elim_abstract Sls.empty rem.rem_pr rem.rem_ls rem.rem_ts decl in List.fold_left Task.add_decl task l | Some task -> Task.add_tdecl (elim_task task.task_prev rem) task.task_decl | None -> None *) let add_rem rem decl = let remove_ts rem ts = { rem with rem_ts = Sts.add ts rem.rem_ts} in let remove_ls rem ls = { rem with rem_ls = Sls.add ls rem.rem_ls} in let remove_pr rem pr = { rem with rem_pr = Spr.add pr rem.rem_pr} in match decl.d_node with | Dtype ts -> remove_ts rem ts | Ddata l -> List.fold_left (fun rem (ts,_) -> remove_ts rem ts) rem l | Dparam ls -> remove_ls rem ls | Dlogic l -> List.fold_left (fun rem (ls,_) -> remove_ls rem ls) rem l | Dind (_,l) -> List.fold_left (fun rem (ls,_) -> remove_ls rem ls) rem l | Dprop (_,pr,_) -> remove_pr rem pr let _union_rem rem1 rem2 = { rem_ts = Sts.union rem1.rem_ts rem2.rem_ts; rem_ls = Sls.union rem1.rem_ls rem2.rem_ls; rem_pr = Spr.union rem1.rem_pr rem2.rem_pr; } let fold_sub f acc a i1 i2 = let acc = ref acc in for i=i1 to i2-1 do acc := f !acc a.(i) done; !acc let call rem valid invalid = try BSstep (rem, fun b -> if b then valid () else invalid ()) with UnknownIdent _ -> invalid () in if i2 - i1 < 2 then let rem1 = add_rem rem a.(i1) in call rem1 (fun () -> assert (i2 - i1 = 1); cont rem1) (fun () -> cont rem) else let m = (i1+i2)/2 in let rem1 = fold_sub add_rem rem a m i2 in call rem1 (fun () -> bisect_aux task a i1 m rem1 cont) (fun () -> bisect_aux task a m i2 rem let rem2 = fold_sub add_rem rem1 a i1 m in call rem2 (fun () -> cont rem2) (fun () -> bisect_aux task a i1 m rem1 cont))) let bisect_step task0 = let task= match task0 with | Some {task_decl = {td_node = Decl {d_node = Dprop (Pgoal,_,_)}}; task_prev = task} -> task | _ -> raise GoalNotFound in let rec length acc = function | Some {task_decl = {td_node = Decl _}; task_prev = t} -> length (acc + 1) t | Some {task_prev = t} -> length acc t | None -> acc in let n = length 0 task in let a = Array.make n (Obj.magic 0) in let rec init acc = function | Some {task_decl = {td_node = Decl d}; task_prev = t} -> a.(acc) <- d; init (acc - 1) t | Some { task_prev = t} -> init acc t | None -> assert (acc = -1) in init (n-1) task; let empty_rem = {rem_ts = Sts.empty; rem_ls = Sls.empty; rem_pr = Spr.empty} in bisect_aux task0 a 0 n empty_rem (fun rem -> BSdone rem) let bisect f task = let rec run = function | BSdone r - > r | BSstep ( rem , c ) - > let t = rem in run ( c ( f t ) ) in run ( bisect_step task ) let bisect f task = let rec run = function | BSdone r -> r | BSstep (rem,c) -> let t = elim_task task rem in run (c (f t)) in run (bisect_step task) *) let bisect_step = let res = try bisect_step task0 with exn - > Format.eprintf " bisect_step fail : % a@. " ; Format.eprintf " bisect_step fail : % a@. " ;
d656f267cdd59e067fe8401fec85bcc01d95bed25edfb4abf643024cd2b69946
mauricioszabo/check
core_test.cljc
(ns check.core-test (:require [clojure.string :as str] [clojure.test :refer [deftest testing run-tests] :as t] [check.core :refer [check] :as check :include-macros true])) (deftest check-wraps-matcher-combinators (testing "simple checks" (check {:foo 12} => {:foo 12})) (testing "regexp checks" (check (str 10) => #"\d\d"))) (deftest matcher-combinators (testing "implements code to check strings" (check "foobar is a string" => "foobar is a string"))) (deftest check-captures-exceptions (testing "checks only for exception type" (check (throw (ex-info "Wow, some error!" {})) =throws=> #?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo))) (testing "checks for exception type, and checks more" (check (throw (ex-info "Wow, some error!" {})) =throws=> [#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo) #(check #?(:clj (.getMessage %) :cljs (.-message %)) => "Wow, some error!")]))) (deftest checks-for-in-behavior (check [1 2 3] =includes=> 2)) ; CUSTOM MATCHERS (check/defmatcher is-the-same? [expected actual] {:pass? (identical? expected actual) :failure-message "They are not the same object!"}) (deftest custom-matcher (let [obj #?(:cljs (js/Object.) :clj (Object.))] (check obj is-the-same? obj))) (defn stateful-obj [] (let [a (atom [])] (fn [x] (swap! a conj (inc x)) @a))) (deftest stateful-matchers (let [add! (stateful-obj)] (check (add! 10) => [11]) (check (add! 11) => [11 12]) (check (add! 12) => [11 12 13])))
null
https://raw.githubusercontent.com/mauricioszabo/check/fc4a3a619a8ce63d152f940de12bc96b83a4adfd/test/check/core_test.cljc
clojure
CUSTOM MATCHERS
(ns check.core-test (:require [clojure.string :as str] [clojure.test :refer [deftest testing run-tests] :as t] [check.core :refer [check] :as check :include-macros true])) (deftest check-wraps-matcher-combinators (testing "simple checks" (check {:foo 12} => {:foo 12})) (testing "regexp checks" (check (str 10) => #"\d\d"))) (deftest matcher-combinators (testing "implements code to check strings" (check "foobar is a string" => "foobar is a string"))) (deftest check-captures-exceptions (testing "checks only for exception type" (check (throw (ex-info "Wow, some error!" {})) =throws=> #?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo))) (testing "checks for exception type, and checks more" (check (throw (ex-info "Wow, some error!" {})) =throws=> [#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo) #(check #?(:clj (.getMessage %) :cljs (.-message %)) => "Wow, some error!")]))) (deftest checks-for-in-behavior (check [1 2 3] =includes=> 2)) (check/defmatcher is-the-same? [expected actual] {:pass? (identical? expected actual) :failure-message "They are not the same object!"}) (deftest custom-matcher (let [obj #?(:cljs (js/Object.) :clj (Object.))] (check obj is-the-same? obj))) (defn stateful-obj [] (let [a (atom [])] (fn [x] (swap! a conj (inc x)) @a))) (deftest stateful-matchers (let [add! (stateful-obj)] (check (add! 10) => [11]) (check (add! 11) => [11 12]) (check (add! 12) => [11 12 13])))
7726f3a351c7bffc1d37671f1f3e629038f6c1eb6d5257cf21fda1b70878a879
geneweb/geneweb
gwdLog.ml
let verbosity = ref 7 let debug = ref false let oc : out_channel option ref = ref None let log fn = match !oc with | Some oc -> fn oc | None -> () type level = [ `LOG_ALERT | `LOG_CRIT | `LOG_DEBUG | `LOG_EMERG | `LOG_ERR | `LOG_INFO | `LOG_NOTICE | `LOG_WARNING ] #ifdef SYSLOG let syslog (level : level) msg = let flags = if !debug then [`LOG_PERROR] else [] in if !verbosity >= match level with | `LOG_EMERG -> 0 | `LOG_ALERT -> 1 | `LOG_CRIT -> 2 | `LOG_ERR -> 3 | `LOG_WARNING -> 4 | `LOG_NOTICE -> 5 | `LOG_INFO -> 6 | `LOG_DEBUG -> 7 then begin let log = Syslog.openlog ~flags @@ Filename.basename @@ Sys.executable_name in Syslog.syslog log level msg ; Syslog.closelog log ; if !debug then Printexc.print_backtrace stderr ; end #endif #ifndef SYSLOG let syslog (level : level) msg = if !verbosity >= match level with | `LOG_EMERG -> 0 | `LOG_ALERT -> 1 | `LOG_CRIT -> 2 | `LOG_ERR -> 3 | `LOG_WARNING -> 4 | `LOG_NOTICE -> 5 | `LOG_INFO -> 6 | `LOG_DEBUG -> 7 then begin let tm = Unix.(time () |> localtime) in let level = match level with | `LOG_EMERG -> "EMERGENCY" | `LOG_ALERT -> "ALERT" | `LOG_CRIT -> "CRITICAL" | `LOG_ERR -> "ERROR" | `LOG_WARNING -> "WARNING" | `LOG_NOTICE -> "NOTICE" | `LOG_INFO -> "INFO" | `LOG_DEBUG -> "DEBUG" in let print oc = Printf.fprintf oc "[%s]: %s %s\n" (Mutil.sprintf_date tm :> string) level msg in begin match Sys.getenv_opt "GW_SYSLOG_FILE" with | Some fn -> let oc = open_out_gen [ Open_wronly ; Open_creat ; Open_append ] 0o644 fn in print oc ; close_out oc | None -> print stderr end ; if !debug then Printexc.print_backtrace stderr ; end #endif
null
https://raw.githubusercontent.com/geneweb/geneweb/a108e39dc5157ef13483d7b3812d5aac2858a1f0/bin/gwd/gwdLog.ml
ocaml
let verbosity = ref 7 let debug = ref false let oc : out_channel option ref = ref None let log fn = match !oc with | Some oc -> fn oc | None -> () type level = [ `LOG_ALERT | `LOG_CRIT | `LOG_DEBUG | `LOG_EMERG | `LOG_ERR | `LOG_INFO | `LOG_NOTICE | `LOG_WARNING ] #ifdef SYSLOG let syslog (level : level) msg = let flags = if !debug then [`LOG_PERROR] else [] in if !verbosity >= match level with | `LOG_EMERG -> 0 | `LOG_ALERT -> 1 | `LOG_CRIT -> 2 | `LOG_ERR -> 3 | `LOG_WARNING -> 4 | `LOG_NOTICE -> 5 | `LOG_INFO -> 6 | `LOG_DEBUG -> 7 then begin let log = Syslog.openlog ~flags @@ Filename.basename @@ Sys.executable_name in Syslog.syslog log level msg ; Syslog.closelog log ; if !debug then Printexc.print_backtrace stderr ; end #endif #ifndef SYSLOG let syslog (level : level) msg = if !verbosity >= match level with | `LOG_EMERG -> 0 | `LOG_ALERT -> 1 | `LOG_CRIT -> 2 | `LOG_ERR -> 3 | `LOG_WARNING -> 4 | `LOG_NOTICE -> 5 | `LOG_INFO -> 6 | `LOG_DEBUG -> 7 then begin let tm = Unix.(time () |> localtime) in let level = match level with | `LOG_EMERG -> "EMERGENCY" | `LOG_ALERT -> "ALERT" | `LOG_CRIT -> "CRITICAL" | `LOG_ERR -> "ERROR" | `LOG_WARNING -> "WARNING" | `LOG_NOTICE -> "NOTICE" | `LOG_INFO -> "INFO" | `LOG_DEBUG -> "DEBUG" in let print oc = Printf.fprintf oc "[%s]: %s %s\n" (Mutil.sprintf_date tm :> string) level msg in begin match Sys.getenv_opt "GW_SYSLOG_FILE" with | Some fn -> let oc = open_out_gen [ Open_wronly ; Open_creat ; Open_append ] 0o644 fn in print oc ; close_out oc | None -> print stderr end ; if !debug then Printexc.print_backtrace stderr ; end #endif
24c6a590af5ef762eba65d9c156cdcbfc03550095799f029de269f379a1eb98d
emqx/hocon
demo_schema5.erl
%%-------------------------------------------------------------------- Copyright ( c ) 2021 - 2022 EMQ Technologies Co. , Ltd. All Rights Reserved . %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- -module(demo_schema5). -include_lib("typerefl/include/types.hrl"). -behaviour(hocon_schema). -export([namespace/0, roots/0, fields/1, tags/0]). namespace() -> ?MODULE. roots() -> [ "config" ]. tags() -> [<<"tag from demo_schema5">>]. fields("config") -> [ {bool, boolean()} ].
null
https://raw.githubusercontent.com/emqx/hocon/10fefe7e0ddf3f01d45d2d631562afe614ca357f/sample-schemas/demo_schema5.erl
erlang
-------------------------------------------------------------------- you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --------------------------------------------------------------------
Copyright ( c ) 2021 - 2022 EMQ Technologies Co. , Ltd. All Rights Reserved . Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(demo_schema5). -include_lib("typerefl/include/types.hrl"). -behaviour(hocon_schema). -export([namespace/0, roots/0, fields/1, tags/0]). namespace() -> ?MODULE. roots() -> [ "config" ]. tags() -> [<<"tag from demo_schema5">>]. fields("config") -> [ {bool, boolean()} ].
5b794e15f593a01e0e3a4fd0b7776565ef174bd803e0b32a589619162fa0c48b
ros/roslisp
float-bytes.lisp
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Software License Agreement (BSD License) ;; Copyright ( c ) 2008 , Willow Garage , Inc. ;; All rights reserved. ;; ;; Redistribution and use in source and binary forms, with ;; or without modification, are permitted provided that the ;; following conditions are met: ;; ;; * Redistributions of source code must retain the above ;; copyright notice, this list of conditions and the ;; following disclaimer. ;; * Redistributions in binary form must reproduce the ;; above copyright notice, this list of conditions and ;; the following disclaimer in the documentation and/or ;; other materials provided with the distribution. * Neither the name of Willow Garage , Inc. nor the names ;; of its contributors may be used to endorse or promote ;; products derived from this software without specific ;; prior written permission. ;; ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR ;; WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A ;; PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR ;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN ;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ;; OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH ;; DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (in-package roslisp-utils) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Conversion functions for going to and from single and ;; double precision floating point values, assuming the IEEE format ( which one ? ) . ;; Code taken post to comp.lang.lisp : ;; ;; which presumably is in the public domain. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (defun encode-float-bits (float sign-byte exponent-byte mantissa-byte bias) (multiple-value-bind (original-mantissa original-exponent sign) (integer-decode-float (float float 0d0)) (multiple-value-bind (mantissa exponent) (scale original-mantissa original-exponent (1+ (byte-size mantissa-byte))) (incf exponent (byte-size mantissa-byte)) (when (zerop mantissa) (setf exponent (- bias))) (when (<= exponent (- bias)) (setf (values mantissa exponent) (denormalize original-mantissa original-exponent bias mantissa-byte))) (incf exponent bias) (when (> (integer-length exponent) (byte-size exponent-byte)) (setf mantissa 0 exponent (ldb (byte (byte-size exponent-byte) 0) (lognot 0)))) (let ((result 0)) (setf (ldb sign-byte result) (if (plusp sign) 0 1)) (setf (ldb exponent-byte result) exponent) (setf (ldb mantissa-byte result) mantissa) result)))) (defun decode-float-bits (bits sign-byte exponent-byte mantissa-byte bias) (let ((sign (if (zerop (ldb sign-byte bits)) 1 -1)) (exponent (ldb exponent-byte bits)) (mantissa (ldb mantissa-byte bits))) (if (= (logcount (ldb exponent-byte bits)) (byte-size exponent-byte)) (if (zerop mantissa) (if (plusp sign) 'positive-infinity 'negative-infinity) 'not-a-number) (progn (when (plusp exponent) (incf mantissa (expt 2 (byte-size mantissa-byte)))) (if (zerop exponent) (setf exponent (- 1 bias (byte-size mantissa-byte))) (setf exponent (- (- exponent (byte-size mantissa-byte)) bias))) (float (* sign (* mantissa (expt 2 exponent))) 0d0))))) (defun scale-integer (value bits) "Scale an integer value so it fits in the given number of bits." (if (zerop value) (values 0 0) (let ((scale (- bits (integer-length value)))) (values (round (* value (expt 2 scale))) scale)))) (defun scale (mantissa exponent mantissa-bits) "Scale an integer value so it fits in the given number of bits." (multiple-value-bind (mantissa scale) (scale-integer mantissa mantissa-bits) (values mantissa (- exponent scale)))) (defun denormalize (mantissa exponent bias mantissa-byte) (multiple-value-bind (mantissa exponent) (scale mantissa exponent (byte-size mantissa-byte)) (incf exponent (byte-size mantissa-byte)) (values (ash mantissa (- exponent (1+ (- bias)))) (- bias)))) (defun encode-single-float-bits (float) (let ((float (float float 0.0))) (encode-float-bits float (byte 1 31) (byte 8 23) (byte 23 0) 127))) (defun encode-double-float-bits (float) (let ((float (float float 0.0d0))) (encode-float-bits float (byte 1 63) (byte 11 52) (byte 52 0) 1023))) (defun decode-single-float-bits (bits) (decode-float-bits bits (byte 1 31) (byte 8 23) (byte 23 0) 127)) (defun decode-double-float-bits (bits) (decode-float-bits bits (byte 1 63) (byte 11 52) (byte 52 0) 1023))
null
https://raw.githubusercontent.com/ros/roslisp/559355a8d695e34e6dedae071a9af2c065411687/utils/float-bytes.lisp
lisp
Software License Agreement (BSD License) All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, LOSS OF USE , OR BUSINESS INTERRUPTION ) HOWEVER CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Conversion functions for going to and from single and double precision floating point values, assuming the which presumably is in the public domain.
Copyright ( c ) 2008 , Willow Garage , Inc. * Neither the name of Willow Garage , Inc. nor the names CONTRIBUTORS " AS IS " AND ANY EXPRESS OR COPYRIGHT OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN (in-package roslisp-utils) IEEE format ( which one ? ) . Code taken post to comp.lang.lisp : (defun encode-float-bits (float sign-byte exponent-byte mantissa-byte bias) (multiple-value-bind (original-mantissa original-exponent sign) (integer-decode-float (float float 0d0)) (multiple-value-bind (mantissa exponent) (scale original-mantissa original-exponent (1+ (byte-size mantissa-byte))) (incf exponent (byte-size mantissa-byte)) (when (zerop mantissa) (setf exponent (- bias))) (when (<= exponent (- bias)) (setf (values mantissa exponent) (denormalize original-mantissa original-exponent bias mantissa-byte))) (incf exponent bias) (when (> (integer-length exponent) (byte-size exponent-byte)) (setf mantissa 0 exponent (ldb (byte (byte-size exponent-byte) 0) (lognot 0)))) (let ((result 0)) (setf (ldb sign-byte result) (if (plusp sign) 0 1)) (setf (ldb exponent-byte result) exponent) (setf (ldb mantissa-byte result) mantissa) result)))) (defun decode-float-bits (bits sign-byte exponent-byte mantissa-byte bias) (let ((sign (if (zerop (ldb sign-byte bits)) 1 -1)) (exponent (ldb exponent-byte bits)) (mantissa (ldb mantissa-byte bits))) (if (= (logcount (ldb exponent-byte bits)) (byte-size exponent-byte)) (if (zerop mantissa) (if (plusp sign) 'positive-infinity 'negative-infinity) 'not-a-number) (progn (when (plusp exponent) (incf mantissa (expt 2 (byte-size mantissa-byte)))) (if (zerop exponent) (setf exponent (- 1 bias (byte-size mantissa-byte))) (setf exponent (- (- exponent (byte-size mantissa-byte)) bias))) (float (* sign (* mantissa (expt 2 exponent))) 0d0))))) (defun scale-integer (value bits) "Scale an integer value so it fits in the given number of bits." (if (zerop value) (values 0 0) (let ((scale (- bits (integer-length value)))) (values (round (* value (expt 2 scale))) scale)))) (defun scale (mantissa exponent mantissa-bits) "Scale an integer value so it fits in the given number of bits." (multiple-value-bind (mantissa scale) (scale-integer mantissa mantissa-bits) (values mantissa (- exponent scale)))) (defun denormalize (mantissa exponent bias mantissa-byte) (multiple-value-bind (mantissa exponent) (scale mantissa exponent (byte-size mantissa-byte)) (incf exponent (byte-size mantissa-byte)) (values (ash mantissa (- exponent (1+ (- bias)))) (- bias)))) (defun encode-single-float-bits (float) (let ((float (float float 0.0))) (encode-float-bits float (byte 1 31) (byte 8 23) (byte 23 0) 127))) (defun encode-double-float-bits (float) (let ((float (float float 0.0d0))) (encode-float-bits float (byte 1 63) (byte 11 52) (byte 52 0) 1023))) (defun decode-single-float-bits (bits) (decode-float-bits bits (byte 1 31) (byte 8 23) (byte 23 0) 127)) (defun decode-double-float-bits (bits) (decode-float-bits bits (byte 1 63) (byte 11 52) (byte 52 0) 1023))
e2c79de89fe350979ad7f0c9ea62eff231fb7311e100ab9f201571375495f8e7
jaked/ocamljs
printf.ml
* This file is part of ocamljs , OCaml to Javascript compiler * Copyright ( C ) 2007 - 9 Skydeck , Inc * Copyright ( C ) 2010 * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation ; either * version 2 of the License , or ( at your option ) any later version . * * This library is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU * Library General Public License for more details . * * You should have received a copy of the GNU Library General Public * License along with this library ; if not , write to the Free * Software Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , * MA 02111 - 1307 , USA * This file is part of ocamljs, OCaml to Javascript compiler * Copyright (C) 2007-9 Skydeck, Inc * Copyright (C) 2010 Jake Donham * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the Free * Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, * MA 02111-1307, USA *) (***********************************************************************) (* *) (* Objective Caml *) (* *) and , projet Cristal , INRIA Rocquencourt (* *) Copyright 1996 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the GNU Library General Public License , with (* the special exception on linking described in file ../LICENSE. *) (* *) (***********************************************************************) $ I d : printf.ml 9412 2009 - 11 - 09 11:42:39Z weis $ external format_float: string -> float -> string = "caml_format_float" external format_int: string -> int -> string = "caml_format_int" external format_int32: string -> int32 -> string = "caml_int32_format" external format_nativeint: string -> nativeint -> string = "caml_nativeint_format" external format_int64: string -> int64 -> string = "caml_int64_format" module Sformat = struct type index;; external unsafe_index_of_int : int -> index = "%identity" ;; let index_of_int i = if i >= 0 then unsafe_index_of_int i else failwith ("Sformat.index_of_int: negative argument " ^ string_of_int i) ;; external int_of_index : index -> int = "%identity" ;; let add_int_index i idx = index_of_int (i + int_of_index idx);; let succ_index = add_int_index 1;; Literal position are one - based ( hence pred p instead of p ) . let index_of_literal_position p = index_of_int (pred p);; external length : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int = "%string_length" ;; external get : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int -> char = "%string_safe_get" ;; external unsafe_get : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int -> char = "%string_unsafe_get" ;; external unsafe_to_string : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string = "%identity" ;; let sub fmt idx len = String.sub (unsafe_to_string fmt) (int_of_index idx) len ;; let to_string fmt = sub fmt (unsafe_index_of_int 0) (length fmt) ;; end ;; let bad_conversion sfmt i c = invalid_arg ("Printf: bad conversion %" ^ String.make 1 c ^ ", at char number " ^ string_of_int i ^ " in format string ``" ^ sfmt ^ "''") ;; let bad_conversion_format fmt i c = bad_conversion (Sformat.to_string fmt) i c ;; let incomplete_format fmt = invalid_arg ("Printf: premature end of format string ``" ^ Sformat.to_string fmt ^ "''") ;; (* Parses a string conversion to return the specified length and the padding direction. *) let parse_string_conversion sfmt = let rec parse neg i = if i >= String.length sfmt then (0, neg) else match String.unsafe_get sfmt i with | '1'..'9' -> (int_of_string (String.sub sfmt i (String.length sfmt - i - 1)), neg) | '-' -> parse true (succ i) | _ -> parse neg (succ i) in try parse false 1 with | Failure _ -> bad_conversion sfmt 0 's' ;; (* Pad a (sub) string into a blank string of length [p], on the right if [neg] is true, on the left otherwise. *) let pad_string pad_char p neg s i len = if p = len && i = 0 then s else if p <= len then String.sub s i len else let res = String.make p pad_char in if neg then String.blit s i res 0 len else String.blit s i res (p - len) len; res Format a string given a % s format , e.g. % 40s or % -20s . To do ? : ignore other flags ( # , + , etc ) . To do ?: ignore other flags (#, +, etc). *) let format_string sfmt s = let (p, neg) = parse_string_conversion sfmt in pad_string ' ' p neg s 0 (String.length s) ;; (* Extract a format string out of [fmt] between [start] and [stop] inclusive. ['*'] in the format are replaced by integers taken from the [widths] list. [extract_format] returns a string which is the string representation of the resulting format string. *) let extract_format fmt start stop widths = let skip_positional_spec start = match Sformat.unsafe_get fmt start with | '0'..'9' -> let rec skip_int_literal i = match Sformat.unsafe_get fmt i with | '0'..'9' -> skip_int_literal (succ i) | '$' -> succ i | _ -> start in skip_int_literal (succ start) | _ -> start in let start = skip_positional_spec (succ start) in let b = Buffer.create (stop - start + 10) in Buffer.add_char b '%'; let rec fill_format i widths = if i <= stop then match (Sformat.unsafe_get fmt i, widths) with | ('*', h :: t) -> Buffer.add_string b (string_of_int h); let i = skip_positional_spec (succ i) in fill_format i t | ('*', []) -> assert false (* Should not happen since this is ill-typed. *) | (c, _) -> Buffer.add_char b c; fill_format (succ i) widths in fill_format start (List.rev widths); Buffer.contents b ;; let extract_format_float conv fmt start stop widths = let sfmt = extract_format fmt start stop widths in match conv with | 'F' -> sfmt.[String.length sfmt - 1] <- 'g'; sfmt | _ -> sfmt ;; (* Returns the position of the next character following the meta format string, starting from position [i], inside a given format [fmt]. According to the character [conv], the meta format string is enclosed by the delimiters %{ and %} (when [conv = '{']) or %( and %) (when [conv = '(']). Hence, [sub_format] returns the index of the character following the [')'] or ['}'] that ends the meta format, according to the character [conv]. *) let sub_format incomplete_format bad_conversion_format conv fmt i = let len = Sformat.length fmt in let rec sub_fmt c i = let close = if c = '(' then ')' else (* '{' *) '}' in let rec sub j = if j >= len then incomplete_format fmt else match Sformat.get fmt j with | '%' -> sub_sub (succ j) | _ -> sub (succ j) and sub_sub j = if j >= len then incomplete_format fmt else match Sformat.get fmt j with | '(' | '{' as c -> let j = sub_fmt c (succ j) in sub (succ j) | '}' | ')' as c -> if c = close then succ j else bad_conversion_format fmt i c | _ -> sub (succ j) in sub i in sub_fmt conv i ;; let sub_format_for_printf conv = sub_format incomplete_format bad_conversion_format conv;; let iter_on_format_args fmt add_conv add_char = let lim = Sformat.length fmt - 1 in let rec scan_flags skip i = if i > lim then incomplete_format fmt else match Sformat.unsafe_get fmt i with | '*' -> scan_flags skip (add_conv skip i 'i') (* | '$' -> scan_flags skip (succ i) *** PR#4321 *) | '#' | '-' | ' ' | '+' -> scan_flags skip (succ i) | '_' -> scan_flags true (succ i) | '0'..'9' | '.' -> scan_flags skip (succ i) | _ -> scan_conv skip i and scan_conv skip i = if i > lim then incomplete_format fmt else match Sformat.unsafe_get fmt i with | '%' | '!' | ',' -> succ i | 's' | 'S' | '[' -> add_conv skip i 's' | 'c' | 'C' -> add_conv skip i 'c' | 'd' | 'i' |'o' | 'u' | 'x' | 'X' | 'N' -> add_conv skip i 'i' | 'f' | 'e' | 'E' | 'g' | 'G' | 'F' -> add_conv skip i 'f' | 'B' | 'b' -> add_conv skip i 'B' | 'a' | 'r' | 't' as conv -> add_conv skip i conv | 'l' | 'n' | 'L' as conv -> let j = succ i in if j > lim then add_conv skip i 'i' else begin match Sformat.get fmt j with | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' -> add_char (add_conv skip i conv) 'i' | c -> add_conv skip i 'i' end | '{' as conv -> (* Just get a regular argument, skipping the specification. *) let i = add_conv skip i conv in (* To go on, find the index of the next char after the meta format. *) let j = sub_format_for_printf conv fmt i in (* Add the meta specification to the summary anyway. *) let rec loop i = if i < j - 2 then loop (add_char i (Sformat.get fmt i)) in loop i; (* Go on, starting at the closing brace to properly close the meta specification in the summary. *) scan_conv skip (j - 1) | '(' as conv -> (* Use the static format argument specification instead of the runtime format argument value: they must have the same type anyway. *) scan_fmt (add_conv skip i conv) | '}' | ')' as conv -> add_conv skip i conv | conv -> bad_conversion_format fmt i conv and scan_fmt i = if i < lim then if Sformat.get fmt i = '%' then scan_fmt (scan_flags false (succ i)) else scan_fmt (succ i) else i in ignore (scan_fmt 0) ;; (* Returns a string that summarizes the typing information that a given format string contains. For instance, [summarize_format_type "A number %d\n"] is "%i". It also checks the well-formedness of the format string. *) let summarize_format_type fmt = let len = Sformat.length fmt in let b = Buffer.create len in let add_char i c = Buffer.add_char b c; succ i in let add_conv skip i c = if skip then Buffer.add_string b "%_" else Buffer.add_char b '%'; add_char i c in iter_on_format_args fmt add_conv add_char; Buffer.contents b ;; module Ac = struct type ac = { mutable ac_rglr : int; mutable ac_skip : int; mutable ac_rdrs : int; } end ;; open Ac;; (* Computes the number of arguments of a format (including the flag arguments if any). *) let ac_of_format fmt = let ac = { ac_rglr = 0; ac_skip = 0; ac_rdrs = 0; } in let incr_ac skip c = let inc = if c = 'a' then 2 else 1 in if c = 'r' then ac.ac_rdrs <- ac.ac_rdrs + 1; if skip then ac.ac_skip <- ac.ac_skip + inc else ac.ac_rglr <- ac.ac_rglr + inc in let add_conv skip i c = (* Just finishing a meta format: no additional argument to record. *) if c <> ')' && c <> '}' then incr_ac skip c; succ i and add_char i c = succ i in iter_on_format_args fmt add_conv add_char; ac ;; let count_arguments_of_format fmt = let ac = ac_of_format fmt in (* For printing only regular arguments have to be counted. *) ac.ac_rglr ;; let list_iter_i f l = let rec loop i = function | [] -> () | [x] -> f i x (* Tail calling [f] *) | x :: xs -> f i x; loop (succ i) xs in loop 0 l ;; ` ` Abstracting '' version of : returns a ( curried ) function that will print when totally applied . Note : in the following , we are careful not to be badly caught by the compiler optimizations for the representation of arrays . will print when totally applied. Note: in the following, we are careful not to be badly caught by the compiler optimizations for the representation of arrays. *) let kapr kpr fmt = match count_arguments_of_format fmt with | 0 -> kpr fmt [||] | 1 -> Obj.magic (fun x -> let a = Array.make 1 (Obj.repr 0) in a.(0) <- x; kpr fmt a) | 2 -> Obj.magic (fun x y -> let a = Array.make 2 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; kpr fmt a) | 3 -> Obj.magic (fun x y z -> let a = Array.make 3 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; kpr fmt a) | 4 -> Obj.magic (fun x y z t -> let a = Array.make 4 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; kpr fmt a) | 5 -> Obj.magic (fun x y z t u -> let a = Array.make 5 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; a.(4) <- u; kpr fmt a) | 6 -> Obj.magic (fun x y z t u v -> let a = Array.make 6 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; a.(4) <- u; a.(5) <- v; kpr fmt a) | nargs -> let rec loop i args = if i >= nargs then let a = Array.make nargs (Obj.repr 0) in list_iter_i (fun i arg -> a.(nargs - i - 1) <- arg) args; kpr fmt a else Obj.magic (fun x -> loop (succ i) (x :: args)) in loop 0 [] ;; type positional_specification = | Spec_none | Spec_index of Sformat.index ;; To scan an optional positional parameter specification , i.e. an integer followed by a [ $ ] . Calling [ got_spec ] with appropriate arguments , we ` ` return '' a positional specification and an index to go on scanning the [ fmt ] format at hand . Note that this is optimized for the regular case , i.e. no positional parameter , since in this case we juste ` ` return '' the constant [ Spec_none ] ; in case we have a positional parameter , we ` ` return '' a [ Spec_index ] [ positional_specification ] which a bit more costly . Note also that we do not support [ * $ ] specifications , since this would lead to type checking problems : a [ * $ ] positional specification means ` ` take the next argument to [ printf ] ( which must be an integer value ) '' , name this integer value $ n$ ; [ * $ ] now designates parameter $ n$. Unfortunately , the type of a parameter specified via a [ * $ ] positional specification should be the type of the corresponding argument to [ printf ] , hence this should be the type of the $ n$-th argument to [ printf ] with $ n$ being the { \em value } of the integer argument defining [ * ] ; we clearly can not statically guess the value of this parameter in the general case . Put it another way : this means type dependency , which is completely out of scope of the type algebra . i.e. an integer followed by a [$]. Calling [got_spec] with appropriate arguments, we ``return'' a positional specification and an index to go on scanning the [fmt] format at hand. Note that this is optimized for the regular case, i.e. no positional parameter, since in this case we juste ``return'' the constant [Spec_none]; in case we have a positional parameter, we ``return'' a [Spec_index] [positional_specification] which a bit more costly. Note also that we do not support [*$] specifications, since this would lead to type checking problems: a [*$] positional specification means ``take the next argument to [printf] (which must be an integer value)'', name this integer value $n$; [*$] now designates parameter $n$. Unfortunately, the type of a parameter specified via a [*$] positional specification should be the type of the corresponding argument to [printf], hence this should be the type of the $n$-th argument to [printf] with $n$ being the {\em value} of the integer argument defining [*]; we clearly cannot statically guess the value of this parameter in the general case. Put it another way: this means type dependency, which is completely out of scope of the Caml type algebra. *) let scan_positional_spec fmt got_spec n i = match Sformat.unsafe_get fmt i with | '0'..'9' as d -> let rec get_int_literal accu j = match Sformat.unsafe_get fmt j with | '0'..'9' as d -> get_int_literal (10 * accu + (int_of_char d - 48)) (succ j) | '$' -> if accu = 0 then failwith "printf: bad positional specification (0)." else got_spec (Spec_index (Sformat.index_of_literal_position accu)) (succ j) Not a positional specification : tell so the caller , and go back to scanning the format from the original [ i ] position we were called at first . scanning the format from the original [i] position we were called at first. *) | _ -> got_spec Spec_none i in get_int_literal (int_of_char d - 48) (succ i) (* No positional specification: tell so the caller, and go back to scanning the format from the original [i] position. *) | _ -> got_spec Spec_none i ;; (* Get the index of the next argument to printf, according to the given positional specification. *) let next_index spec n = match spec with | Spec_none -> Sformat.succ_index n | Spec_index _ -> n ;; (* Get the index of the actual argument to printf, according to its optional positional specification. *) let get_index spec n = match spec with | Spec_none -> n | Spec_index p -> p ;; Format a float argument as a valid . let format_float_lexeme = let valid_float_lexeme sfmt s = let l = String.length s in if l = 0 then "nan" else let add_dot sfmt s = s ^ "." in let rec loop i = if i >= l then add_dot sfmt s else match s.[i] with | '.' -> s | _ -> loop (i + 1) in loop 0 in (fun sfmt x -> let s = format_float sfmt x in match classify_float x with | FP_normal | FP_subnormal | FP_zero -> valid_float_lexeme sfmt s | FP_nan | FP_infinite -> s) ;; Decode a format string and act on it . [ fmt ] is the [ printf ] format string , and [ pos ] points to a [ % ] character in the format string . After consuming the appropriate number of arguments and formatting them , one of the following five continuations described below is called : - [ cont_s ] for outputting a string ( arguments : arg num , string , next pos ) - [ cont_a ] for performing a % a action ( arguments : arg num , fn , arg , next pos ) - [ cont_t ] for performing a % t action ( arguments : arg num , fn , next pos ) - [ cont_f ] for performing a flush action ( arguments : arg num , next pos ) - [ cont_m ] for performing a % ( action ( arguments : arg num , sfmt , next pos ) " arg num " is the index in array [ args ] of the next argument to [ printf ] . " next pos " is the position in [ fmt ] of the first character following the % conversion specification in [ fmt ] . [fmt] is the [printf] format string, and [pos] points to a [%] character in the format string. After consuming the appropriate number of arguments and formatting them, one of the following five continuations described below is called: - [cont_s] for outputting a string (arguments: arg num, string, next pos) - [cont_a] for performing a %a action (arguments: arg num, fn, arg, next pos) - [cont_t] for performing a %t action (arguments: arg num, fn, next pos) - [cont_f] for performing a flush action (arguments: arg num, next pos) - [cont_m] for performing a %( action (arguments: arg num, sfmt, next pos) "arg num" is the index in array [args] of the next argument to [printf]. "next pos" is the position in [fmt] of the first character following the %conversion specification in [fmt]. *) (* Note: here, rather than test explicitly against [Sformat.length fmt] to detect the end of the format, we use [Sformat.unsafe_get] and rely on the fact that we'll get a "null" character if we access one past the end of the string. These "null" characters are then caught by the [_ -> bad_conversion] clauses below. Don't do this at home, kids. *) let scan_format fmt args n pos cont_s cont_a cont_t cont_f cont_m = let get_arg spec n = Obj.magic (args.(Sformat.int_of_index (get_index spec n))) in let rec scan_positional n widths i = let got_spec spec i = scan_flags spec n widths i in scan_positional_spec fmt got_spec n i and scan_flags spec n widths i = match Sformat.unsafe_get fmt i with | '*' -> let got_spec wspec i = let (width : int) = get_arg wspec n in scan_flags spec (next_index wspec n) (width :: widths) i in scan_positional_spec fmt got_spec n (succ i) | '0'..'9' | '.' | '#' | '-' | ' ' | '+' -> scan_flags spec n widths (succ i) | _ -> scan_conv spec n widths i and scan_conv spec n widths i = match Sformat.unsafe_get fmt i with | '%' -> cont_s n "%" (succ i) | 's' | 'S' as conv -> let (x : string) = get_arg spec n in let x = if conv = 's' then x else "\"" ^ String.escaped x ^ "\"" in let s = (* Optimize for common case %s *) if i = succ pos then x else format_string (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'c' | 'C' as conv -> let (x : char) = get_arg spec n in let s = if conv = 'c' then String.make 1 x else "'" ^ Char.escaped x ^ "'" in cont_s (next_index spec n) s (succ i) | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' | 'N' -> let (x : int) = get_arg spec n in let s = format_int (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'f' | 'e' | 'E' | 'g' | 'G' -> let (x : float) = get_arg spec n in let s = format_float (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'F' -> let (x : float) = get_arg spec n in let s = if widths = [] then Pervasives.string_of_float x else format_float_lexeme (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'B' | 'b' -> let (x : bool) = get_arg spec n in cont_s (next_index spec n) (string_of_bool x) (succ i) | 'a' -> let printer = get_arg spec n in If the printer spec is Spec_none , go on as usual . If the printer spec is Spec_index p , printer 's argument spec is Spec_index ( succ_index p ) . If the printer spec is Spec_index p, printer's argument spec is Spec_index (succ_index p). *) let n = Sformat.succ_index (get_index spec n) in let arg = get_arg Spec_none n in cont_a (next_index spec n) printer arg (succ i) | 't' -> let printer = get_arg spec n in cont_t (next_index spec n) printer (succ i) | 'l' | 'n' | 'L' as conv -> begin match Sformat.unsafe_get fmt (succ i) with | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' -> let i = succ i in let s = match conv with | 'l' -> let (x : int32) = get_arg spec n in format_int32 (extract_format fmt pos i widths) x | 'n' -> let (x : nativeint) = get_arg spec n in format_nativeint (extract_format fmt pos i widths) x | _ -> let (x : int64) = get_arg spec n in format_int64 (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | _ -> let (x : int) = get_arg spec n in let s = format_int (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) end | ',' -> cont_s n "" (succ i) | '!' -> cont_f n (succ i) | '{' | '(' as conv (* ')' '}' *) -> let (xf : ('a, 'b, 'c, 'd, 'e, 'f) format6) = get_arg spec n in let i = succ i in let j = sub_format_for_printf conv fmt i in if conv = '{' (* '}' *) then (* Just print the format argument as a specification. *) cont_s (next_index spec n) (summarize_format_type xf) j else (* Use the format argument instead of the format specification. *) cont_m (next_index spec n) xf j | (* '(' *) ')' -> cont_s n "" (succ i) | conv -> bad_conversion_format fmt i conv in scan_positional n [] (succ pos) ;; let mkprintf to_s get_out outc outs flush k fmt = (* [out] is global to this definition of [pr], and must be shared by all its recursive calls (if any). *) let out = get_out fmt in let rec pr k n fmt v = let len = Sformat.length fmt in let rec doprn n i = if i >= len then Obj.magic (k out) else match Sformat.unsafe_get fmt i with | '%' -> scan_format fmt v n i cont_s cont_a cont_t cont_f cont_m | c -> outc out c; doprn n (succ i) and cont_s n s i = outs out s; doprn n i and cont_a n printer arg i = if to_s then outs out ((Obj.magic printer : unit -> _ -> string) () arg) else printer out arg; doprn n i and cont_t n printer i = if to_s then outs out ((Obj.magic printer : unit -> string) ()) else printer out; doprn n i and cont_f n i = flush out; doprn n i and cont_m n xf i = let m = Sformat.add_int_index (count_arguments_of_format xf) n in pr (Obj.magic (fun _ -> doprn m i)) n xf v in doprn n 0 in let kpr = pr k (Sformat.index_of_int 0) in kapr kpr fmt ;; let kfprintf k oc = mkprintf false (fun _ -> oc) output_char output_string flush k ;; let ifprintf oc = kapr (fun _ -> Obj.magic ignore);; let fprintf oc = kfprintf ignore oc;; let printf fmt = fprintf stdout fmt;; let eprintf fmt = fprintf stderr fmt;; let kbprintf k b = mkprintf false (fun _ -> b) Buffer.add_char Buffer.add_string ignore k ;; let bprintf b = kbprintf ignore b;; let get_buff fmt = let len = 2 * Sformat.length fmt in Buffer.create len ;; let get_contents b = let s = Buffer.contents b in Buffer.clear b; s ;; let get_cont k b = k (get_contents b);; let ksprintf k = mkprintf true get_buff Buffer.add_char Buffer.add_string ignore (get_cont k) ;; let kprintf = ksprintf;; let sprintf fmt = ksprintf (fun s -> s) fmt;; module CamlinternalPr = struct module Sformat = Sformat;; module Tformat = struct type ac = Ac.ac = { mutable ac_rglr : int; mutable ac_skip : int; mutable ac_rdrs : int; } ;; let ac_of_format = ac_of_format;; let sub_format = sub_format;; let summarize_format_type = summarize_format_type;; let scan_format = scan_format;; let kapr = kapr;; end ;; end ;;
null
https://raw.githubusercontent.com/jaked/ocamljs/378080ff1c8033bb15ed2bd29bf1443e301d7af8/src/stdlib/patches/3.11.2/printf.ml
ocaml
********************************************************************* Objective Caml the special exception on linking described in file ../LICENSE. ********************************************************************* Parses a string conversion to return the specified length and the padding direction. Pad a (sub) string into a blank string of length [p], on the right if [neg] is true, on the left otherwise. Extract a format string out of [fmt] between [start] and [stop] inclusive. ['*'] in the format are replaced by integers taken from the [widths] list. [extract_format] returns a string which is the string representation of the resulting format string. Should not happen since this is ill-typed. Returns the position of the next character following the meta format string, starting from position [i], inside a given format [fmt]. According to the character [conv], the meta format string is enclosed by the delimiters %{ and %} (when [conv = '{']) or %( and %) (when [conv = '(']). Hence, [sub_format] returns the index of the character following the [')'] or ['}'] that ends the meta format, according to the character [conv]. '{' | '$' -> scan_flags skip (succ i) *** PR#4321 Just get a regular argument, skipping the specification. To go on, find the index of the next char after the meta format. Add the meta specification to the summary anyway. Go on, starting at the closing brace to properly close the meta specification in the summary. Use the static format argument specification instead of the runtime format argument value: they must have the same type anyway. Returns a string that summarizes the typing information that a given format string contains. For instance, [summarize_format_type "A number %d\n"] is "%i". It also checks the well-formedness of the format string. Computes the number of arguments of a format (including the flag arguments if any). Just finishing a meta format: no additional argument to record. For printing only regular arguments have to be counted. Tail calling [f] No positional specification: tell so the caller, and go back to scanning the format from the original [i] position. Get the index of the next argument to printf, according to the given positional specification. Get the index of the actual argument to printf, according to its optional positional specification. Note: here, rather than test explicitly against [Sformat.length fmt] to detect the end of the format, we use [Sformat.unsafe_get] and rely on the fact that we'll get a "null" character if we access one past the end of the string. These "null" characters are then caught by the [_ -> bad_conversion] clauses below. Don't do this at home, kids. Optimize for common case %s ')' '}' '}' Just print the format argument as a specification. Use the format argument instead of the format specification. '(' [out] is global to this definition of [pr], and must be shared by all its recursive calls (if any).
* This file is part of ocamljs , OCaml to Javascript compiler * Copyright ( C ) 2007 - 9 Skydeck , Inc * Copyright ( C ) 2010 * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation ; either * version 2 of the License , or ( at your option ) any later version . * * This library is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU * Library General Public License for more details . * * You should have received a copy of the GNU Library General Public * License along with this library ; if not , write to the Free * Software Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , * MA 02111 - 1307 , USA * This file is part of ocamljs, OCaml to Javascript compiler * Copyright (C) 2007-9 Skydeck, Inc * Copyright (C) 2010 Jake Donham * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the Free * Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, * MA 02111-1307, USA *) and , projet Cristal , INRIA Rocquencourt Copyright 1996 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the GNU Library General Public License , with $ I d : printf.ml 9412 2009 - 11 - 09 11:42:39Z weis $ external format_float: string -> float -> string = "caml_format_float" external format_int: string -> int -> string = "caml_format_int" external format_int32: string -> int32 -> string = "caml_int32_format" external format_nativeint: string -> nativeint -> string = "caml_nativeint_format" external format_int64: string -> int64 -> string = "caml_int64_format" module Sformat = struct type index;; external unsafe_index_of_int : int -> index = "%identity" ;; let index_of_int i = if i >= 0 then unsafe_index_of_int i else failwith ("Sformat.index_of_int: negative argument " ^ string_of_int i) ;; external int_of_index : index -> int = "%identity" ;; let add_int_index i idx = index_of_int (i + int_of_index idx);; let succ_index = add_int_index 1;; Literal position are one - based ( hence pred p instead of p ) . let index_of_literal_position p = index_of_int (pred p);; external length : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int = "%string_length" ;; external get : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int -> char = "%string_safe_get" ;; external unsafe_get : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> int -> char = "%string_unsafe_get" ;; external unsafe_to_string : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string = "%identity" ;; let sub fmt idx len = String.sub (unsafe_to_string fmt) (int_of_index idx) len ;; let to_string fmt = sub fmt (unsafe_index_of_int 0) (length fmt) ;; end ;; let bad_conversion sfmt i c = invalid_arg ("Printf: bad conversion %" ^ String.make 1 c ^ ", at char number " ^ string_of_int i ^ " in format string ``" ^ sfmt ^ "''") ;; let bad_conversion_format fmt i c = bad_conversion (Sformat.to_string fmt) i c ;; let incomplete_format fmt = invalid_arg ("Printf: premature end of format string ``" ^ Sformat.to_string fmt ^ "''") ;; let parse_string_conversion sfmt = let rec parse neg i = if i >= String.length sfmt then (0, neg) else match String.unsafe_get sfmt i with | '1'..'9' -> (int_of_string (String.sub sfmt i (String.length sfmt - i - 1)), neg) | '-' -> parse true (succ i) | _ -> parse neg (succ i) in try parse false 1 with | Failure _ -> bad_conversion sfmt 0 's' ;; let pad_string pad_char p neg s i len = if p = len && i = 0 then s else if p <= len then String.sub s i len else let res = String.make p pad_char in if neg then String.blit s i res 0 len else String.blit s i res (p - len) len; res Format a string given a % s format , e.g. % 40s or % -20s . To do ? : ignore other flags ( # , + , etc ) . To do ?: ignore other flags (#, +, etc). *) let format_string sfmt s = let (p, neg) = parse_string_conversion sfmt in pad_string ' ' p neg s 0 (String.length s) ;; let extract_format fmt start stop widths = let skip_positional_spec start = match Sformat.unsafe_get fmt start with | '0'..'9' -> let rec skip_int_literal i = match Sformat.unsafe_get fmt i with | '0'..'9' -> skip_int_literal (succ i) | '$' -> succ i | _ -> start in skip_int_literal (succ start) | _ -> start in let start = skip_positional_spec (succ start) in let b = Buffer.create (stop - start + 10) in Buffer.add_char b '%'; let rec fill_format i widths = if i <= stop then match (Sformat.unsafe_get fmt i, widths) with | ('*', h :: t) -> Buffer.add_string b (string_of_int h); let i = skip_positional_spec (succ i) in fill_format i t | ('*', []) -> | (c, _) -> Buffer.add_char b c; fill_format (succ i) widths in fill_format start (List.rev widths); Buffer.contents b ;; let extract_format_float conv fmt start stop widths = let sfmt = extract_format fmt start stop widths in match conv with | 'F' -> sfmt.[String.length sfmt - 1] <- 'g'; sfmt | _ -> sfmt ;; let sub_format incomplete_format bad_conversion_format conv fmt i = let len = Sformat.length fmt in let rec sub_fmt c i = let rec sub j = if j >= len then incomplete_format fmt else match Sformat.get fmt j with | '%' -> sub_sub (succ j) | _ -> sub (succ j) and sub_sub j = if j >= len then incomplete_format fmt else match Sformat.get fmt j with | '(' | '{' as c -> let j = sub_fmt c (succ j) in sub (succ j) | '}' | ')' as c -> if c = close then succ j else bad_conversion_format fmt i c | _ -> sub (succ j) in sub i in sub_fmt conv i ;; let sub_format_for_printf conv = sub_format incomplete_format bad_conversion_format conv;; let iter_on_format_args fmt add_conv add_char = let lim = Sformat.length fmt - 1 in let rec scan_flags skip i = if i > lim then incomplete_format fmt else match Sformat.unsafe_get fmt i with | '*' -> scan_flags skip (add_conv skip i 'i') | '#' | '-' | ' ' | '+' -> scan_flags skip (succ i) | '_' -> scan_flags true (succ i) | '0'..'9' | '.' -> scan_flags skip (succ i) | _ -> scan_conv skip i and scan_conv skip i = if i > lim then incomplete_format fmt else match Sformat.unsafe_get fmt i with | '%' | '!' | ',' -> succ i | 's' | 'S' | '[' -> add_conv skip i 's' | 'c' | 'C' -> add_conv skip i 'c' | 'd' | 'i' |'o' | 'u' | 'x' | 'X' | 'N' -> add_conv skip i 'i' | 'f' | 'e' | 'E' | 'g' | 'G' | 'F' -> add_conv skip i 'f' | 'B' | 'b' -> add_conv skip i 'B' | 'a' | 'r' | 't' as conv -> add_conv skip i conv | 'l' | 'n' | 'L' as conv -> let j = succ i in if j > lim then add_conv skip i 'i' else begin match Sformat.get fmt j with | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' -> add_char (add_conv skip i conv) 'i' | c -> add_conv skip i 'i' end | '{' as conv -> let i = add_conv skip i conv in let j = sub_format_for_printf conv fmt i in let rec loop i = if i < j - 2 then loop (add_char i (Sformat.get fmt i)) in loop i; scan_conv skip (j - 1) | '(' as conv -> scan_fmt (add_conv skip i conv) | '}' | ')' as conv -> add_conv skip i conv | conv -> bad_conversion_format fmt i conv and scan_fmt i = if i < lim then if Sformat.get fmt i = '%' then scan_fmt (scan_flags false (succ i)) else scan_fmt (succ i) else i in ignore (scan_fmt 0) ;; let summarize_format_type fmt = let len = Sformat.length fmt in let b = Buffer.create len in let add_char i c = Buffer.add_char b c; succ i in let add_conv skip i c = if skip then Buffer.add_string b "%_" else Buffer.add_char b '%'; add_char i c in iter_on_format_args fmt add_conv add_char; Buffer.contents b ;; module Ac = struct type ac = { mutable ac_rglr : int; mutable ac_skip : int; mutable ac_rdrs : int; } end ;; open Ac;; let ac_of_format fmt = let ac = { ac_rglr = 0; ac_skip = 0; ac_rdrs = 0; } in let incr_ac skip c = let inc = if c = 'a' then 2 else 1 in if c = 'r' then ac.ac_rdrs <- ac.ac_rdrs + 1; if skip then ac.ac_skip <- ac.ac_skip + inc else ac.ac_rglr <- ac.ac_rglr + inc in let add_conv skip i c = if c <> ')' && c <> '}' then incr_ac skip c; succ i and add_char i c = succ i in iter_on_format_args fmt add_conv add_char; ac ;; let count_arguments_of_format fmt = let ac = ac_of_format fmt in ac.ac_rglr ;; let list_iter_i f l = let rec loop i = function | [] -> () | x :: xs -> f i x; loop (succ i) xs in loop 0 l ;; ` ` Abstracting '' version of : returns a ( curried ) function that will print when totally applied . Note : in the following , we are careful not to be badly caught by the compiler optimizations for the representation of arrays . will print when totally applied. Note: in the following, we are careful not to be badly caught by the compiler optimizations for the representation of arrays. *) let kapr kpr fmt = match count_arguments_of_format fmt with | 0 -> kpr fmt [||] | 1 -> Obj.magic (fun x -> let a = Array.make 1 (Obj.repr 0) in a.(0) <- x; kpr fmt a) | 2 -> Obj.magic (fun x y -> let a = Array.make 2 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; kpr fmt a) | 3 -> Obj.magic (fun x y z -> let a = Array.make 3 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; kpr fmt a) | 4 -> Obj.magic (fun x y z t -> let a = Array.make 4 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; kpr fmt a) | 5 -> Obj.magic (fun x y z t u -> let a = Array.make 5 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; a.(4) <- u; kpr fmt a) | 6 -> Obj.magic (fun x y z t u v -> let a = Array.make 6 (Obj.repr 0) in a.(0) <- x; a.(1) <- y; a.(2) <- z; a.(3) <- t; a.(4) <- u; a.(5) <- v; kpr fmt a) | nargs -> let rec loop i args = if i >= nargs then let a = Array.make nargs (Obj.repr 0) in list_iter_i (fun i arg -> a.(nargs - i - 1) <- arg) args; kpr fmt a else Obj.magic (fun x -> loop (succ i) (x :: args)) in loop 0 [] ;; type positional_specification = | Spec_none | Spec_index of Sformat.index ;; To scan an optional positional parameter specification , i.e. an integer followed by a [ $ ] . Calling [ got_spec ] with appropriate arguments , we ` ` return '' a positional specification and an index to go on scanning the [ fmt ] format at hand . Note that this is optimized for the regular case , i.e. no positional parameter , since in this case we juste ` ` return '' the constant [ Spec_none ] ; in case we have a positional parameter , we ` ` return '' a [ Spec_index ] [ positional_specification ] which a bit more costly . Note also that we do not support [ * $ ] specifications , since this would lead to type checking problems : a [ * $ ] positional specification means ` ` take the next argument to [ printf ] ( which must be an integer value ) '' , name this integer value $ n$ ; [ * $ ] now designates parameter $ n$. Unfortunately , the type of a parameter specified via a [ * $ ] positional specification should be the type of the corresponding argument to [ printf ] , hence this should be the type of the $ n$-th argument to [ printf ] with $ n$ being the { \em value } of the integer argument defining [ * ] ; we clearly can not statically guess the value of this parameter in the general case . Put it another way : this means type dependency , which is completely out of scope of the type algebra . i.e. an integer followed by a [$]. Calling [got_spec] with appropriate arguments, we ``return'' a positional specification and an index to go on scanning the [fmt] format at hand. Note that this is optimized for the regular case, i.e. no positional parameter, since in this case we juste ``return'' the constant [Spec_none]; in case we have a positional parameter, we ``return'' a [Spec_index] [positional_specification] which a bit more costly. Note also that we do not support [*$] specifications, since this would lead to type checking problems: a [*$] positional specification means ``take the next argument to [printf] (which must be an integer value)'', name this integer value $n$; [*$] now designates parameter $n$. Unfortunately, the type of a parameter specified via a [*$] positional specification should be the type of the corresponding argument to [printf], hence this should be the type of the $n$-th argument to [printf] with $n$ being the {\em value} of the integer argument defining [*]; we clearly cannot statically guess the value of this parameter in the general case. Put it another way: this means type dependency, which is completely out of scope of the Caml type algebra. *) let scan_positional_spec fmt got_spec n i = match Sformat.unsafe_get fmt i with | '0'..'9' as d -> let rec get_int_literal accu j = match Sformat.unsafe_get fmt j with | '0'..'9' as d -> get_int_literal (10 * accu + (int_of_char d - 48)) (succ j) | '$' -> if accu = 0 then failwith "printf: bad positional specification (0)." else got_spec (Spec_index (Sformat.index_of_literal_position accu)) (succ j) Not a positional specification : tell so the caller , and go back to scanning the format from the original [ i ] position we were called at first . scanning the format from the original [i] position we were called at first. *) | _ -> got_spec Spec_none i in get_int_literal (int_of_char d - 48) (succ i) | _ -> got_spec Spec_none i ;; let next_index spec n = match spec with | Spec_none -> Sformat.succ_index n | Spec_index _ -> n ;; let get_index spec n = match spec with | Spec_none -> n | Spec_index p -> p ;; Format a float argument as a valid . let format_float_lexeme = let valid_float_lexeme sfmt s = let l = String.length s in if l = 0 then "nan" else let add_dot sfmt s = s ^ "." in let rec loop i = if i >= l then add_dot sfmt s else match s.[i] with | '.' -> s | _ -> loop (i + 1) in loop 0 in (fun sfmt x -> let s = format_float sfmt x in match classify_float x with | FP_normal | FP_subnormal | FP_zero -> valid_float_lexeme sfmt s | FP_nan | FP_infinite -> s) ;; Decode a format string and act on it . [ fmt ] is the [ printf ] format string , and [ pos ] points to a [ % ] character in the format string . After consuming the appropriate number of arguments and formatting them , one of the following five continuations described below is called : - [ cont_s ] for outputting a string ( arguments : arg num , string , next pos ) - [ cont_a ] for performing a % a action ( arguments : arg num , fn , arg , next pos ) - [ cont_t ] for performing a % t action ( arguments : arg num , fn , next pos ) - [ cont_f ] for performing a flush action ( arguments : arg num , next pos ) - [ cont_m ] for performing a % ( action ( arguments : arg num , sfmt , next pos ) " arg num " is the index in array [ args ] of the next argument to [ printf ] . " next pos " is the position in [ fmt ] of the first character following the % conversion specification in [ fmt ] . [fmt] is the [printf] format string, and [pos] points to a [%] character in the format string. After consuming the appropriate number of arguments and formatting them, one of the following five continuations described below is called: - [cont_s] for outputting a string (arguments: arg num, string, next pos) - [cont_a] for performing a %a action (arguments: arg num, fn, arg, next pos) - [cont_t] for performing a %t action (arguments: arg num, fn, next pos) - [cont_f] for performing a flush action (arguments: arg num, next pos) - [cont_m] for performing a %( action (arguments: arg num, sfmt, next pos) "arg num" is the index in array [args] of the next argument to [printf]. "next pos" is the position in [fmt] of the first character following the %conversion specification in [fmt]. *) let scan_format fmt args n pos cont_s cont_a cont_t cont_f cont_m = let get_arg spec n = Obj.magic (args.(Sformat.int_of_index (get_index spec n))) in let rec scan_positional n widths i = let got_spec spec i = scan_flags spec n widths i in scan_positional_spec fmt got_spec n i and scan_flags spec n widths i = match Sformat.unsafe_get fmt i with | '*' -> let got_spec wspec i = let (width : int) = get_arg wspec n in scan_flags spec (next_index wspec n) (width :: widths) i in scan_positional_spec fmt got_spec n (succ i) | '0'..'9' | '.' | '#' | '-' | ' ' | '+' -> scan_flags spec n widths (succ i) | _ -> scan_conv spec n widths i and scan_conv spec n widths i = match Sformat.unsafe_get fmt i with | '%' -> cont_s n "%" (succ i) | 's' | 'S' as conv -> let (x : string) = get_arg spec n in let x = if conv = 's' then x else "\"" ^ String.escaped x ^ "\"" in let s = if i = succ pos then x else format_string (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'c' | 'C' as conv -> let (x : char) = get_arg spec n in let s = if conv = 'c' then String.make 1 x else "'" ^ Char.escaped x ^ "'" in cont_s (next_index spec n) s (succ i) | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' | 'N' -> let (x : int) = get_arg spec n in let s = format_int (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'f' | 'e' | 'E' | 'g' | 'G' -> let (x : float) = get_arg spec n in let s = format_float (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'F' -> let (x : float) = get_arg spec n in let s = if widths = [] then Pervasives.string_of_float x else format_float_lexeme (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | 'B' | 'b' -> let (x : bool) = get_arg spec n in cont_s (next_index spec n) (string_of_bool x) (succ i) | 'a' -> let printer = get_arg spec n in If the printer spec is Spec_none , go on as usual . If the printer spec is Spec_index p , printer 's argument spec is Spec_index ( succ_index p ) . If the printer spec is Spec_index p, printer's argument spec is Spec_index (succ_index p). *) let n = Sformat.succ_index (get_index spec n) in let arg = get_arg Spec_none n in cont_a (next_index spec n) printer arg (succ i) | 't' -> let printer = get_arg spec n in cont_t (next_index spec n) printer (succ i) | 'l' | 'n' | 'L' as conv -> begin match Sformat.unsafe_get fmt (succ i) with | 'd' | 'i' | 'o' | 'u' | 'x' | 'X' -> let i = succ i in let s = match conv with | 'l' -> let (x : int32) = get_arg spec n in format_int32 (extract_format fmt pos i widths) x | 'n' -> let (x : nativeint) = get_arg spec n in format_nativeint (extract_format fmt pos i widths) x | _ -> let (x : int64) = get_arg spec n in format_int64 (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) | _ -> let (x : int) = get_arg spec n in let s = format_int (extract_format fmt pos i widths) x in cont_s (next_index spec n) s (succ i) end | ',' -> cont_s n "" (succ i) | '!' -> cont_f n (succ i) let (xf : ('a, 'b, 'c, 'd, 'e, 'f) format6) = get_arg spec n in let i = succ i in let j = sub_format_for_printf conv fmt i in cont_s (next_index spec n) (summarize_format_type xf) j else cont_m (next_index spec n) xf j cont_s n "" (succ i) | conv -> bad_conversion_format fmt i conv in scan_positional n [] (succ pos) ;; let mkprintf to_s get_out outc outs flush k fmt = let out = get_out fmt in let rec pr k n fmt v = let len = Sformat.length fmt in let rec doprn n i = if i >= len then Obj.magic (k out) else match Sformat.unsafe_get fmt i with | '%' -> scan_format fmt v n i cont_s cont_a cont_t cont_f cont_m | c -> outc out c; doprn n (succ i) and cont_s n s i = outs out s; doprn n i and cont_a n printer arg i = if to_s then outs out ((Obj.magic printer : unit -> _ -> string) () arg) else printer out arg; doprn n i and cont_t n printer i = if to_s then outs out ((Obj.magic printer : unit -> string) ()) else printer out; doprn n i and cont_f n i = flush out; doprn n i and cont_m n xf i = let m = Sformat.add_int_index (count_arguments_of_format xf) n in pr (Obj.magic (fun _ -> doprn m i)) n xf v in doprn n 0 in let kpr = pr k (Sformat.index_of_int 0) in kapr kpr fmt ;; let kfprintf k oc = mkprintf false (fun _ -> oc) output_char output_string flush k ;; let ifprintf oc = kapr (fun _ -> Obj.magic ignore);; let fprintf oc = kfprintf ignore oc;; let printf fmt = fprintf stdout fmt;; let eprintf fmt = fprintf stderr fmt;; let kbprintf k b = mkprintf false (fun _ -> b) Buffer.add_char Buffer.add_string ignore k ;; let bprintf b = kbprintf ignore b;; let get_buff fmt = let len = 2 * Sformat.length fmt in Buffer.create len ;; let get_contents b = let s = Buffer.contents b in Buffer.clear b; s ;; let get_cont k b = k (get_contents b);; let ksprintf k = mkprintf true get_buff Buffer.add_char Buffer.add_string ignore (get_cont k) ;; let kprintf = ksprintf;; let sprintf fmt = ksprintf (fun s -> s) fmt;; module CamlinternalPr = struct module Sformat = Sformat;; module Tformat = struct type ac = Ac.ac = { mutable ac_rglr : int; mutable ac_skip : int; mutable ac_rdrs : int; } ;; let ac_of_format = ac_of_format;; let sub_format = sub_format;; let summarize_format_type = summarize_format_type;; let scan_format = scan_format;; let kapr = kapr;; end ;; end ;;
ea06eaa969b85da6c5a43aba0207efb6845542bcd732c7cd5c8431b3f0994ae4
camlspotter/ocamloscope.2
ocamldoc.ml
open Spotlib.Spot open List let get_doc (atrs : Typedtree.attributes) : (string * Location.t) list = let open Location in let open Parsetree in let a = function | {txt="ocaml.doc"}, payload -> begin match payload with | PStr [{ pstr_loc= loc ; pstr_desc= Pstr_eval ({pexp_desc= Pexp_constant (Pconst_string (s, _))}, _)}] -> begin match s with | "/*" -> (* (**/**) *) None | _ -> Some (s, loc) end | _ -> assert false end | _ -> None in filter_map a atrs let extract_structure str = let docs = ref [] in let module E = Attr.Make(struct let f attrs = docs := get_doc attrs @ !docs end) in E.iter_structure str; !docs let extract_signature sg = let docs = ref [] in let module E = Attr.Make(struct let f attrs = docs := get_doc attrs @ !docs end) in E.iter_signature sg; !docs module DocSet = Set.Make(struct type t = string * Location.t let compare = compare end) let partition_ok_and_ambiguous ds = let all, amb = fold_left (fun (all, amb) d -> if DocSet.mem d all then (all, DocSet.add d amb) else (DocSet.add d all, amb)) (DocSet.empty, DocSet.empty) ds in (DocSet.diff all amb, amb) let warn_ambiguous ds = flip DocSet.iter ds & fun (_s,l) -> !!% "Warning: %a: ambiguous OCamlDoc comment was ignored@." Location.print_compact l module Re = Ppx_orakuda.Regexp.Re_pcre open Re.Literal let normalize s = let ss = s |> {s|(^\s+|\s+$)//g|s} (* Remove start and end spaces *) XXX Bug of ppx_orakuda : \r and are not accepted in { s| .. |s } ! ! |> Re.split {m|\.\s+|m} (* split into sentences *) in let rec get len = function | _ when len <= 0 -> [ "..." ] | [] -> [] | s::ss -> s :: get (len - String.length s) ss in let s = String.concat ". " & get 140 ss in if String.length s <= 280 then s else String.sub s 0 277 ^ "..."
null
https://raw.githubusercontent.com/camlspotter/ocamloscope.2/49b5977a283cdd373021d41cb3620222351a2efe/ocamldoc.ml
ocaml
(**/* Remove start and end spaces split into sentences
open Spotlib.Spot open List let get_doc (atrs : Typedtree.attributes) : (string * Location.t) list = let open Location in let open Parsetree in let a = function | {txt="ocaml.doc"}, payload -> begin match payload with | PStr [{ pstr_loc= loc ; pstr_desc= Pstr_eval ({pexp_desc= Pexp_constant (Pconst_string (s, _))}, _)}] -> begin match s with None | _ -> Some (s, loc) end | _ -> assert false end | _ -> None in filter_map a atrs let extract_structure str = let docs = ref [] in let module E = Attr.Make(struct let f attrs = docs := get_doc attrs @ !docs end) in E.iter_structure str; !docs let extract_signature sg = let docs = ref [] in let module E = Attr.Make(struct let f attrs = docs := get_doc attrs @ !docs end) in E.iter_signature sg; !docs module DocSet = Set.Make(struct type t = string * Location.t let compare = compare end) let partition_ok_and_ambiguous ds = let all, amb = fold_left (fun (all, amb) d -> if DocSet.mem d all then (all, DocSet.add d amb) else (DocSet.add d all, amb)) (DocSet.empty, DocSet.empty) ds in (DocSet.diff all amb, amb) let warn_ambiguous ds = flip DocSet.iter ds & fun (_s,l) -> !!% "Warning: %a: ambiguous OCamlDoc comment was ignored@." Location.print_compact l module Re = Ppx_orakuda.Regexp.Re_pcre open Re.Literal let normalize s = let ss = XXX Bug of ppx_orakuda : \r and are not accepted in { s| .. |s } ! ! in let rec get len = function | _ when len <= 0 -> [ "..." ] | [] -> [] | s::ss -> s :: get (len - String.length s) ss in let s = String.concat ". " & get 140 ss in if String.length s <= 280 then s else String.sub s 0 277 ^ "..."
b2795d161bde245817525a3174e2c88123017a3d4c44bdba60baca42a70c26e7
Piervit/gufo
gufoModList.ml
This file is part of . is free software : you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or ( at your option ) any later version . is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for more details . You should have received a copy of the GNU General Public License along with . If not , see < / > . Author : This file is part of Gufo. Gufo is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Gufo is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Gufo. If not, see </>. Author: Pierre Vittet *) (*the system module for List. *) open Gufo.MCore open GenUtils let listtypes = IntMap.empty The code of iter and filter is directly in , because modules can not directly call to avoid circular dependancies . directly call GufoEngine to avoid circular dependancies.*) let iter args scope = assert false let filter args scope = assert false let map args scope = assert false let fold_left args scope = assert false let length args scope = match args with | [MOSimple_val (MOList_val mtvlist);] -> MOSimple_val (MOBase_val (MOTypeIntVal (List.length mtvlist))) | _ -> assert false let topvars = [ { mosmv_name = "iter"; mosmv_description = "Iterate over the elements of the list."; mosmv_intname = 2; mosmv_type = MOFun_type ([ MOFun_type([MOAll_type 1], MOUnit_type) ; MOList_type( MOAll_type 1 ) ], MOUnit_type) ; mosmv_action= iter; }; { mosmv_name = "length"; mosmv_description = "Return the size of the given list."; mosmv_intname = 1; mosmv_type = MOFun_type ([ MOList_type( MOAll_type 1 )], MOBase_type (MTypeInt)) ; mosmv_action= length; }; { mosmv_name = "filter"; mosmv_description = "Return a new list with only the element of the list respecting the predicate."; mosmv_intname = 3; mosmv_type = MOFun_type ([MOFun_type ([MOAll_type 1], MOBase_type (MTypeBool)); MOList_type( MOAll_type 1 ); ], MOList_type( MOAll_type 1 )) ; mosmv_action= filter; }; { mosmv_name = "map"; mosmv_description = "Apply the given function to elements of the list."; mosmv_intname = 4; mosmv_type = MOFun_type ([MOFun_type ([MOAll_type 1], MOAll_type 2 ); MOList_type( MOAll_type 1 ); ], MOList_type( MOAll_type 2 )) ; mosmv_action= map; }; { mosmv_name = "fold_left"; mosmv_description = ""; mosmv_intname = 4; mosmv_type = MOFun_type ([ MOFun_type ([MOAll_type 2; MOAll_type 1], MOAll_type 2 ); MOAll_type 2 ; MOList_type( MOAll_type 1 ); ], MOAll_type 2 ) ; mosmv_action= fold_left; }; ] let mosysmodule = { mosm_name= "List"; mosm_types = listtypes; mosm_typstr2int= StringMap.empty; mosm_typstrfield2int= StringMap.empty; mosm_typstrfield2inttype = StringMap.empty; mosm_typfield2inttype = IntMap.empty; mosm_topvar= List.fold_left (fun acc vars -> IntMap.add vars.mosmv_intname vars acc) IntMap.empty topvars; mosm_varstr2int= List.fold_left (fun acc vars -> StringMap.add vars.mosmv_name vars.mosmv_intname acc) StringMap.empty topvars; }
null
https://raw.githubusercontent.com/Piervit/gufo/cb9b4f88d6e887ccfe50ea3bc7214486f2df3078/src/gufoModList.ml
ocaml
the system module for List.
This file is part of . is free software : you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or ( at your option ) any later version . is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for more details . You should have received a copy of the GNU General Public License along with . If not , see < / > . Author : This file is part of Gufo. Gufo is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Gufo is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Gufo. If not, see </>. Author: Pierre Vittet *) open Gufo.MCore open GenUtils let listtypes = IntMap.empty The code of iter and filter is directly in , because modules can not directly call to avoid circular dependancies . directly call GufoEngine to avoid circular dependancies.*) let iter args scope = assert false let filter args scope = assert false let map args scope = assert false let fold_left args scope = assert false let length args scope = match args with | [MOSimple_val (MOList_val mtvlist);] -> MOSimple_val (MOBase_val (MOTypeIntVal (List.length mtvlist))) | _ -> assert false let topvars = [ { mosmv_name = "iter"; mosmv_description = "Iterate over the elements of the list."; mosmv_intname = 2; mosmv_type = MOFun_type ([ MOFun_type([MOAll_type 1], MOUnit_type) ; MOList_type( MOAll_type 1 ) ], MOUnit_type) ; mosmv_action= iter; }; { mosmv_name = "length"; mosmv_description = "Return the size of the given list."; mosmv_intname = 1; mosmv_type = MOFun_type ([ MOList_type( MOAll_type 1 )], MOBase_type (MTypeInt)) ; mosmv_action= length; }; { mosmv_name = "filter"; mosmv_description = "Return a new list with only the element of the list respecting the predicate."; mosmv_intname = 3; mosmv_type = MOFun_type ([MOFun_type ([MOAll_type 1], MOBase_type (MTypeBool)); MOList_type( MOAll_type 1 ); ], MOList_type( MOAll_type 1 )) ; mosmv_action= filter; }; { mosmv_name = "map"; mosmv_description = "Apply the given function to elements of the list."; mosmv_intname = 4; mosmv_type = MOFun_type ([MOFun_type ([MOAll_type 1], MOAll_type 2 ); MOList_type( MOAll_type 1 ); ], MOList_type( MOAll_type 2 )) ; mosmv_action= map; }; { mosmv_name = "fold_left"; mosmv_description = ""; mosmv_intname = 4; mosmv_type = MOFun_type ([ MOFun_type ([MOAll_type 2; MOAll_type 1], MOAll_type 2 ); MOAll_type 2 ; MOList_type( MOAll_type 1 ); ], MOAll_type 2 ) ; mosmv_action= fold_left; }; ] let mosysmodule = { mosm_name= "List"; mosm_types = listtypes; mosm_typstr2int= StringMap.empty; mosm_typstrfield2int= StringMap.empty; mosm_typstrfield2inttype = StringMap.empty; mosm_typfield2inttype = IntMap.empty; mosm_topvar= List.fold_left (fun acc vars -> IntMap.add vars.mosmv_intname vars acc) IntMap.empty topvars; mosm_varstr2int= List.fold_left (fun acc vars -> StringMap.add vars.mosmv_name vars.mosmv_intname acc) StringMap.empty topvars; }
9db425db53e2a2ce592991e9783d36c2d8e2a16a6c9527ef5890d362f41fb297
bitnomial/prometheus
Scrape.hs
{-# LANGUAGE OverloadedStrings #-} module System.Metrics.Prometheus.Http.Scrape ( Path, serveMetrics, serveMetricsT, prometheusApp, ) where import Control.Applicative ((<$>)) import Control.Monad.IO.Class ( MonadIO, liftIO, ) import Data.Text (Text) import Network.HTTP.Types ( hContentType, methodGet, status200, status404, ) import Network.Wai ( Application, Request, Response, pathInfo, requestMethod, responseBuilder, responseLBS, ) import Network.Wai.Handler.Warp (Port, run) import System.Metrics.Prometheus.Concurrent.RegistryT ( RegistryT, sample, ) import System.Metrics.Prometheus.Encode.Text (encodeMetrics) import System.Metrics.Prometheus.Registry (RegistrySample) -- | The HTTP web route on which to serve data -- -- For example: -- * @:9090 / metrics@ should use a path of * @/@ should use a path of @[]@. type Path = [Text] serveMetrics :: MonadIO m => Port -> Path -> IO RegistrySample -> m () serveMetrics port path = liftIO . run port . prometheusApp path serveMetricsT :: MonadIO m => Port -> Path -> RegistryT m () serveMetricsT port path = liftIO . serveMetrics port path =<< sample prometheusApp :: Path -> IO RegistrySample -> Application prometheusApp path runSample request respond | isPrometheusRequest path request = respond =<< prometheusResponse <$> runSample | otherwise = respond response404 where prometheusResponse = responseBuilder status200 headers . encodeMetrics headers = [(hContentType, "text/plain; version=0.0.4")] response404 :: Response response404 = responseLBS status404 header404 body404 where header404 = [(hContentType, "text/plain")] body404 = "404" isPrometheusRequest :: Path -> Request -> Bool isPrometheusRequest path request = isGet && matchesPath where matchesPath = pathInfo request == path isGet = requestMethod request == methodGet
null
https://raw.githubusercontent.com/bitnomial/prometheus/b7d3160e2a4d971fe03bef6b43fe8bf15a9c9349/src/System/Metrics/Prometheus/Http/Scrape.hs
haskell
# LANGUAGE OverloadedStrings # | The HTTP web route on which to serve data For example:
module System.Metrics.Prometheus.Http.Scrape ( Path, serveMetrics, serveMetricsT, prometheusApp, ) where import Control.Applicative ((<$>)) import Control.Monad.IO.Class ( MonadIO, liftIO, ) import Data.Text (Text) import Network.HTTP.Types ( hContentType, methodGet, status200, status404, ) import Network.Wai ( Application, Request, Response, pathInfo, requestMethod, responseBuilder, responseLBS, ) import Network.Wai.Handler.Warp (Port, run) import System.Metrics.Prometheus.Concurrent.RegistryT ( RegistryT, sample, ) import System.Metrics.Prometheus.Encode.Text (encodeMetrics) import System.Metrics.Prometheus.Registry (RegistrySample) * @:9090 / metrics@ should use a path of * @/@ should use a path of @[]@. type Path = [Text] serveMetrics :: MonadIO m => Port -> Path -> IO RegistrySample -> m () serveMetrics port path = liftIO . run port . prometheusApp path serveMetricsT :: MonadIO m => Port -> Path -> RegistryT m () serveMetricsT port path = liftIO . serveMetrics port path =<< sample prometheusApp :: Path -> IO RegistrySample -> Application prometheusApp path runSample request respond | isPrometheusRequest path request = respond =<< prometheusResponse <$> runSample | otherwise = respond response404 where prometheusResponse = responseBuilder status200 headers . encodeMetrics headers = [(hContentType, "text/plain; version=0.0.4")] response404 :: Response response404 = responseLBS status404 header404 body404 where header404 = [(hContentType, "text/plain")] body404 = "404" isPrometheusRequest :: Path -> Request -> Bool isPrometheusRequest path request = isGet && matchesPath where matchesPath = pathInfo request == path isGet = requestMethod request == methodGet
1c3e578bf73cd1a46afc6b90f4525a13f693448a919fc9b4679fe665f0bcad1d
luminus-framework/luminus-template
formats.clj
(ns <<project-ns>>.middleware.formats (:require [luminus-transit.time :as time] [muuntaja.core :as m])) (def instance (m/create (-> m/default-options (update-in [:formats "application/transit+json" :decoder-opts] (partial merge time/time-deserialization-handlers)) (update-in [:formats "application/transit+json" :encoder-opts] (partial merge time/time-serialization-handlers)))))
null
https://raw.githubusercontent.com/luminus-framework/luminus-template/3278aa727cef0a173ed3ca722dfd6afa6b4bbc8f/resources/leiningen/new/luminus/core/src/formats.clj
clojure
(ns <<project-ns>>.middleware.formats (:require [luminus-transit.time :as time] [muuntaja.core :as m])) (def instance (m/create (-> m/default-options (update-in [:formats "application/transit+json" :decoder-opts] (partial merge time/time-deserialization-handlers)) (update-in [:formats "application/transit+json" :encoder-opts] (partial merge time/time-serialization-handlers)))))
b1c547a929595ce0ac16fa2a7132db63cbadff862127ef169ad4c8e44b190dd3
pilosus/pip-license-checker
data.clj
Copyright © 2020 - 2023 ;; ;; This program and the accompanying materials are made available under the terms of the Eclipse Public License 2.0 which is available at ;; -2.0. ;; This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse ;; Public License, v. 2.0 are satisfied: GNU General Public License as published by the Free Software Foundation , either version 2 of the License , or ( at your option ) any later version , with the GNU Classpath Exception which is available at . ;; SPDX - License - Identifier : EPL-2.0 OR GPL-2.0 WITH Classpath - exception-2.0 (ns pip-license-checker.data "Data structures and helper functions for them" (:gen-class)) (defrecord Log ;; Log representation keyword , one of : : error , : info , : debug string of a logger name , e.g. " PyPI::version " message ;; string ]) (defrecord License [name ;; nilable String type ;; nilable String logs ;; nilable vector of Log ]) (defrecord Requirement [name ;; nilable String version ;; nilable String nilable vector of vectors of format [ op , version ] ; nil for non - Python ]) (defrecord PyPiProject PyPI project as represented on /<project-name > [status ;; keyword requirement ;; Requirement rec api-response ;; nilable parsed JSON license ;; License rec logs ;; nilable vector of Log ]) (defrecord Dependency ;; General representation of dependency - PyPI project or external dep [requirement ;; Requirement rec license ;; License rec logs ;; nilable vector of Log ]) ;; Processed entities (defrecord ReportDependency [name ;; nilable String version ;; nilable String ]) (defrecord ReportLicense [name ;; nilable String type ;; nilable String ]) (defrecord ReportItem [dependency ;; ReportDependency license ;; ReportLicense misc ;; nilable String ]) (defrecord ReportHeader [items ;; list of String totals ;; list of String ]) (defrecord Report nilable list of ReportHeader list of ReportItem totals ;; nilalbe Map of String (license types): Integer (frequencies) nilable list of ( license types ) ])
null
https://raw.githubusercontent.com/pilosus/pip-license-checker/dada0db8c57df06ed8919fcbb0c9633c1cba492c/src/pip_license_checker/data.clj
clojure
This program and the accompanying materials are made available under the -2.0. Public License, v. 2.0 are satisfied: GNU General Public License as published by Log representation string nilable String nilable String nilable vector of Log nilable String nilable String nil for non - Python keyword Requirement rec nilable parsed JSON License rec nilable vector of Log General representation of dependency - PyPI project or external dep Requirement rec License rec nilable vector of Log Processed entities nilable String nilable String nilable String nilable String ReportDependency ReportLicense nilable String list of String list of String nilalbe Map of String (license types): Integer (frequencies)
Copyright © 2020 - 2023 terms of the Eclipse Public License 2.0 which is available at This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse the Free Software Foundation , either version 2 of the License , or ( at your option ) any later version , with the GNU Classpath Exception which is available at . SPDX - License - Identifier : EPL-2.0 OR GPL-2.0 WITH Classpath - exception-2.0 (ns pip-license-checker.data "Data structures and helper functions for them" (:gen-class)) (defrecord Log keyword , one of : : error , : info , : debug string of a logger name , e.g. " PyPI::version " ]) (defrecord License ]) (defrecord Requirement ]) (defrecord PyPiProject PyPI project as represented on /<project-name > ]) (defrecord Dependency ]) (defrecord ReportDependency ]) (defrecord ReportLicense ]) (defrecord ReportItem ]) (defrecord ReportHeader ]) (defrecord Report nilable list of ReportHeader list of ReportItem nilable list of ( license types ) ])
8fa9a82607c7ea8e9339e43ebc55c09cd8975ddd8a6c16e26db8df14bb875fd6
capnproto/capnp-ocaml
methods.ml
[@@@ocaml.warning "-3"] module CamlBytes = Bytes module List = Base.List module Queue = Base.Queue module Bytes = Base.Bytes module Int64 = Base.Int64 module IO = Capnp_unix.IO module Codecs = Capnp.Codecs let message_of_builder = Capnp.BytesMessage.StructStorage.message_of_builder (* This is a wrapper for writing to a file descriptor and simultaneously counting the number of bytes written. *) module CountingOutputStream = struct type t = { fd : Unix.file_descr; mutable throughput : int; } let rec write chan ~buf ~pos ~len = try let bytes_written = UnixLabels.single_write_substring ~buf ~pos ~len chan.fd in let () = chan.throughput <- chan.throughput + bytes_written in bytes_written with Unix.Unix_error (EINTR, _, _) -> write chan ~buf ~pos ~len let wrap_write_context ~compression stream = IO.WriteContext.create ~write ~compression stream end module type BENCHMARK_SIG = sig val sync_client : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val async_client : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val server : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val pass_by_object : iters:int -> int val pass_by_bytes : compression:Codecs.compression_t -> iters:int -> int end let rec select ~read ~write = try UnixLabels.select ~read ~write ~timeout:(-1.0) (* (never timeout) *) with Unix.Unix_error (EINTR, _, _) -> select ~read ~write module Benchmark (TestCase : TestCaseSig.TEST_CASE) (RequestReader : TestCaseSig.READER with type struct_t = TestCase.request_t) (ResponseReader : TestCaseSig.READER with type struct_t = TestCase.response_t) : BENCHMARK_SIG = struct (* [sync_client] issues a randomized request and waits for the response, looping up to the specified number of iterations. *) let sync_client ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in for _i = 0 to iters - 1 do let (request, expectation) = TestCase.setup_request () in let req_message = message_of_builder request in IO.WriteContext.write_message out_context req_message; match IO.ReadContext.read_message in_context with | Some resp_message -> let response = ResponseReader.of_message resp_message in if not (TestCase.check_response response expectation) then failwith "incorrect response." else () | None -> failwith "EOF before message was received." done; out_stream.CountingOutputStream.throughput (* [async_client] issues randomized requests in a pipelined manner, matching up the corresponding responses asynchronously. Unlike the capnproto C++ benchmark, this runs in a single thread and uses a [select] loop to determine appropriate times to write and read. *) let async_client ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let () = Unix.set_nonblock output_fd in let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in let expectations = Queue.create () in let num_sent = ref 0 in let final_send_complete = ref false in while !num_sent < iters || (not (Queue.is_empty expectations)) do let write_watch_fds = if !final_send_complete then [] else [output_fd] in let (ready_read, ready_write, _) = select ~read:[input_fd] ~write:write_watch_fds ~except:[input_fd] in if not (List.is_empty ready_read) then begin let (_ : int) = IO.ReadContext.read in_context in let rec loop () = match IO.ReadContext.dequeue_message in_context with | Some resp_message -> let response = ResponseReader.of_message resp_message in let expect = Queue.dequeue_exn expectations in if not (TestCase.check_response response expect) then failwith "incorrect response." else loop () | None -> () in loop () end; if not (List.is_empty ready_write) then begin begin try while IO.WriteContext.write out_context > 0 do () done with | Unix.Unix_error (Unix.EAGAIN, _, _) | Unix.Unix_error (Unix.EWOULDBLOCK, _, _) -> () end; let bytes_remaining = IO.WriteContext.bytes_remaining out_context in if !num_sent = iters then if bytes_remaining = 0 then final_send_complete := true else () A large queue is n't actually helpful here , it just increases GC pressure . GC pressure. *) else if Queue.length expectations < 4 then begin let (request, expect) = TestCase.setup_request () in let req_message = message_of_builder request in IO.WriteContext.enqueue_message out_context req_message; Queue.enqueue expectations expect; num_sent := !num_sent + 1 end end done; out_stream.CountingOutputStream.throughput (* [server] receives incoming requests one at a time, immediately writing a response for each request in turn. *) let server ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in for _i = 0 to iters - 1 do match IO.ReadContext.read_message in_context with | Some req_message -> let request = RequestReader.of_message req_message in let response = TestCase.handle_request request in let resp_message = message_of_builder response in IO.WriteContext.write_message out_context resp_message | None -> failwith "EOF before all messages were read." done; out_stream.CountingOutputStream.throughput [ pass_by_object ] constructs a randomized request and generates a response for the request , looping up to the specified number of iterations . Everything happens synchronously in one process , and no serialization takes place . the request, looping up to the specified number of iterations. Everything happens synchronously in one process, and no serialization takes place. *) let pass_by_object ~(iters : int) : int = let object_size_counter = ref 0 in for _i = 0 to iters - 1 do let (req_builder, expectation) = TestCase.setup_request () in let resp_builder = TestCase.handle_request (Capnp.BytesMessage.StructStorage.reader_of_builder req_builder) in if not (TestCase.check_response (Capnp.BytesMessage.StructStorage.reader_of_builder resp_builder) expectation) then failwith "incorrect response." else (); object_size_counter := !object_size_counter + (Capnp.BytesMessage.Message.total_size (message_of_builder req_builder)) + (Capnp.BytesMessage.Message.total_size (message_of_builder resp_builder)) done; !object_size_counter [ pass_by_bytes ] constructs a randomized request and generates a response for the request , looping up to the specified number of iterations . Everything happens synchronously in one process . The request and response are converted from objects to strings and back , in both directions . the request, looping up to the specified number of iterations. Everything happens synchronously in one process. The request and response are converted from objects to strings and back, in both directions. *) let pass_by_bytes ~(compression : Codecs.compression_t) ~(iters : int) = let throughput = ref 0 in for _i = 0 to iters - 1 do let (req_builder, expectation) = TestCase.setup_request () in let flattened_request = let req_message = message_of_builder req_builder in Codecs.serialize ~compression req_message in throughput := !throughput + (String.length flattened_request); let req_stream = Codecs.FramedStream.of_string ~compression flattened_request in let flattened_response = match Codecs.FramedStream.get_next_frame req_stream with | Result.Ok req_message -> let resp_builder = TestCase.handle_request (RequestReader.of_message req_message) in let resp_message = message_of_builder resp_builder in Codecs.serialize ~compression resp_message | Result.Error _ -> failwith "failed to decode complete request." in throughput := !throughput + (String.length flattened_response); let resp_stream = Codecs.FramedStream.of_string ~compression flattened_response in match Codecs.FramedStream.get_next_frame resp_stream with | Result.Ok resp_message -> if not (TestCase.check_response (ResponseReader.of_message resp_message) expectation) then failwith "incorrect response." else () | Result.Error _ -> failwith "failed to decode complete response." done; !throughput end let rec read fd ~buf = try Unix.read fd buf 0 (Bytes.length buf) with Unix.Unix_error (EINTR, _, _) -> read fd ~buf let rec write fd ~buf = try Unix.single_write fd buf 0 (Bytes.length buf) with Unix.Unix_error (EINTR, _, _) -> write fd ~buf (* [pass_by_pipe] forks off a child (client) process connected to the current (server) process by a pipe. The [client_func] and [server_func] are then used to carry out a benchmark method using the provided pipe transport. *) let pass_by_pipe client_func server_func : int = let (client_to_server_read, client_to_server_write) = Unix.pipe () in let (server_to_client_read, server_to_client_write) = Unix.pipe () in match Unix.fork () with | err when err < 0 -> failwith "fork failed!" | 0 -> (* child/client *) Unix.close client_to_server_read; Unix.close server_to_client_write; let throughput = client_func ~input_fd:server_to_client_read ~output_fd:client_to_server_write in let tp64 = Int64.of_int throughput in let buf = CamlBytes.create 8 in let () = EndianBytes.LittleEndian.set_int64 buf 0 tp64 in let bytes_written = write client_to_server_write ~buf in assert (bytes_written = 8); exit 0 | child_pid -> (* server *) Unix.close client_to_server_write; Unix.close server_to_client_read; let throughput = server_func ~input_fd:client_to_server_read ~output_fd:server_to_client_write in let tp64_buf = Bytes.create 8 in let bytes_read = read client_to_server_read ~buf:tp64_buf in assert (bytes_read = 8); let tp64_buf = Bytes.unsafe_to_string ~no_mutation_while_string_reachable:tp64_buf in let tp64 = EndianString.LittleEndian.get_int64 tp64_buf 0 in let throughput = throughput + (Int64.to_int_exn tp64) in Unix.close client_to_server_read; Unix.close server_to_client_write; let pid, status = Unix.waitpid [] child_pid in assert (pid = child_pid); match status with | WEXITED 0 -> throughput | _ -> failwith "waitpid: child process failed!"
null
https://raw.githubusercontent.com/capnproto/capnp-ocaml/dda3d811aa7734110d7af051465011f1f823ffb9/src/benchmark/methods.ml
ocaml
This is a wrapper for writing to a file descriptor and simultaneously counting the number of bytes written. (never timeout) [sync_client] issues a randomized request and waits for the response, looping up to the specified number of iterations. [async_client] issues randomized requests in a pipelined manner, matching up the corresponding responses asynchronously. Unlike the capnproto C++ benchmark, this runs in a single thread and uses a [select] loop to determine appropriate times to write and read. [server] receives incoming requests one at a time, immediately writing a response for each request in turn. [pass_by_pipe] forks off a child (client) process connected to the current (server) process by a pipe. The [client_func] and [server_func] are then used to carry out a benchmark method using the provided pipe transport. child/client server
[@@@ocaml.warning "-3"] module CamlBytes = Bytes module List = Base.List module Queue = Base.Queue module Bytes = Base.Bytes module Int64 = Base.Int64 module IO = Capnp_unix.IO module Codecs = Capnp.Codecs let message_of_builder = Capnp.BytesMessage.StructStorage.message_of_builder module CountingOutputStream = struct type t = { fd : Unix.file_descr; mutable throughput : int; } let rec write chan ~buf ~pos ~len = try let bytes_written = UnixLabels.single_write_substring ~buf ~pos ~len chan.fd in let () = chan.throughput <- chan.throughput + bytes_written in bytes_written with Unix.Unix_error (EINTR, _, _) -> write chan ~buf ~pos ~len let wrap_write_context ~compression stream = IO.WriteContext.create ~write ~compression stream end module type BENCHMARK_SIG = sig val sync_client : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val async_client : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val server : input_fd:Unix.file_descr -> output_fd:Unix.file_descr -> compression:Codecs.compression_t -> iters:int -> int val pass_by_object : iters:int -> int val pass_by_bytes : compression:Codecs.compression_t -> iters:int -> int end let rec select ~read ~write = with Unix.Unix_error (EINTR, _, _) -> select ~read ~write module Benchmark (TestCase : TestCaseSig.TEST_CASE) (RequestReader : TestCaseSig.READER with type struct_t = TestCase.request_t) (ResponseReader : TestCaseSig.READER with type struct_t = TestCase.response_t) : BENCHMARK_SIG = struct let sync_client ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in for _i = 0 to iters - 1 do let (request, expectation) = TestCase.setup_request () in let req_message = message_of_builder request in IO.WriteContext.write_message out_context req_message; match IO.ReadContext.read_message in_context with | Some resp_message -> let response = ResponseReader.of_message resp_message in if not (TestCase.check_response response expectation) then failwith "incorrect response." else () | None -> failwith "EOF before message was received." done; out_stream.CountingOutputStream.throughput let async_client ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let () = Unix.set_nonblock output_fd in let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in let expectations = Queue.create () in let num_sent = ref 0 in let final_send_complete = ref false in while !num_sent < iters || (not (Queue.is_empty expectations)) do let write_watch_fds = if !final_send_complete then [] else [output_fd] in let (ready_read, ready_write, _) = select ~read:[input_fd] ~write:write_watch_fds ~except:[input_fd] in if not (List.is_empty ready_read) then begin let (_ : int) = IO.ReadContext.read in_context in let rec loop () = match IO.ReadContext.dequeue_message in_context with | Some resp_message -> let response = ResponseReader.of_message resp_message in let expect = Queue.dequeue_exn expectations in if not (TestCase.check_response response expect) then failwith "incorrect response." else loop () | None -> () in loop () end; if not (List.is_empty ready_write) then begin begin try while IO.WriteContext.write out_context > 0 do () done with | Unix.Unix_error (Unix.EAGAIN, _, _) | Unix.Unix_error (Unix.EWOULDBLOCK, _, _) -> () end; let bytes_remaining = IO.WriteContext.bytes_remaining out_context in if !num_sent = iters then if bytes_remaining = 0 then final_send_complete := true else () A large queue is n't actually helpful here , it just increases GC pressure . GC pressure. *) else if Queue.length expectations < 4 then begin let (request, expect) = TestCase.setup_request () in let req_message = message_of_builder request in IO.WriteContext.enqueue_message out_context req_message; Queue.enqueue expectations expect; num_sent := !num_sent + 1 end end done; out_stream.CountingOutputStream.throughput let server ~(input_fd : Unix.file_descr) ~(output_fd : Unix.file_descr) ~(compression : Codecs.compression_t) ~(iters : int) : int = let in_context = IO.create_read_context_for_fd ~compression input_fd in let out_stream = { CountingOutputStream.fd = output_fd; CountingOutputStream.throughput = 0; } in let out_context = CountingOutputStream.wrap_write_context ~compression out_stream in for _i = 0 to iters - 1 do match IO.ReadContext.read_message in_context with | Some req_message -> let request = RequestReader.of_message req_message in let response = TestCase.handle_request request in let resp_message = message_of_builder response in IO.WriteContext.write_message out_context resp_message | None -> failwith "EOF before all messages were read." done; out_stream.CountingOutputStream.throughput [ pass_by_object ] constructs a randomized request and generates a response for the request , looping up to the specified number of iterations . Everything happens synchronously in one process , and no serialization takes place . the request, looping up to the specified number of iterations. Everything happens synchronously in one process, and no serialization takes place. *) let pass_by_object ~(iters : int) : int = let object_size_counter = ref 0 in for _i = 0 to iters - 1 do let (req_builder, expectation) = TestCase.setup_request () in let resp_builder = TestCase.handle_request (Capnp.BytesMessage.StructStorage.reader_of_builder req_builder) in if not (TestCase.check_response (Capnp.BytesMessage.StructStorage.reader_of_builder resp_builder) expectation) then failwith "incorrect response." else (); object_size_counter := !object_size_counter + (Capnp.BytesMessage.Message.total_size (message_of_builder req_builder)) + (Capnp.BytesMessage.Message.total_size (message_of_builder resp_builder)) done; !object_size_counter [ pass_by_bytes ] constructs a randomized request and generates a response for the request , looping up to the specified number of iterations . Everything happens synchronously in one process . The request and response are converted from objects to strings and back , in both directions . the request, looping up to the specified number of iterations. Everything happens synchronously in one process. The request and response are converted from objects to strings and back, in both directions. *) let pass_by_bytes ~(compression : Codecs.compression_t) ~(iters : int) = let throughput = ref 0 in for _i = 0 to iters - 1 do let (req_builder, expectation) = TestCase.setup_request () in let flattened_request = let req_message = message_of_builder req_builder in Codecs.serialize ~compression req_message in throughput := !throughput + (String.length flattened_request); let req_stream = Codecs.FramedStream.of_string ~compression flattened_request in let flattened_response = match Codecs.FramedStream.get_next_frame req_stream with | Result.Ok req_message -> let resp_builder = TestCase.handle_request (RequestReader.of_message req_message) in let resp_message = message_of_builder resp_builder in Codecs.serialize ~compression resp_message | Result.Error _ -> failwith "failed to decode complete request." in throughput := !throughput + (String.length flattened_response); let resp_stream = Codecs.FramedStream.of_string ~compression flattened_response in match Codecs.FramedStream.get_next_frame resp_stream with | Result.Ok resp_message -> if not (TestCase.check_response (ResponseReader.of_message resp_message) expectation) then failwith "incorrect response." else () | Result.Error _ -> failwith "failed to decode complete response." done; !throughput end let rec read fd ~buf = try Unix.read fd buf 0 (Bytes.length buf) with Unix.Unix_error (EINTR, _, _) -> read fd ~buf let rec write fd ~buf = try Unix.single_write fd buf 0 (Bytes.length buf) with Unix.Unix_error (EINTR, _, _) -> write fd ~buf let pass_by_pipe client_func server_func : int = let (client_to_server_read, client_to_server_write) = Unix.pipe () in let (server_to_client_read, server_to_client_write) = Unix.pipe () in match Unix.fork () with | err when err < 0 -> failwith "fork failed!" | 0 -> Unix.close client_to_server_read; Unix.close server_to_client_write; let throughput = client_func ~input_fd:server_to_client_read ~output_fd:client_to_server_write in let tp64 = Int64.of_int throughput in let buf = CamlBytes.create 8 in let () = EndianBytes.LittleEndian.set_int64 buf 0 tp64 in let bytes_written = write client_to_server_write ~buf in assert (bytes_written = 8); exit 0 | child_pid -> Unix.close client_to_server_write; Unix.close server_to_client_read; let throughput = server_func ~input_fd:client_to_server_read ~output_fd:server_to_client_write in let tp64_buf = Bytes.create 8 in let bytes_read = read client_to_server_read ~buf:tp64_buf in assert (bytes_read = 8); let tp64_buf = Bytes.unsafe_to_string ~no_mutation_while_string_reachable:tp64_buf in let tp64 = EndianString.LittleEndian.get_int64 tp64_buf 0 in let throughput = throughput + (Int64.to_int_exn tp64) in Unix.close client_to_server_read; Unix.close server_to_client_write; let pid, status = Unix.waitpid [] child_pid in assert (pid = child_pid); match status with | WEXITED 0 -> throughput | _ -> failwith "waitpid: child process failed!"
fd07195993a5c8a12625ada0c7a3f15200f71f454e28b3b50f36655c7901dcd0
floybix/cppnx
svg.cljs
(ns org.nfrac.cppnx.svg (:require [org.nfrac.cppnx.core :as cppnx] [goog.dom :as dom] [reagent.core :as reagent :refer [atom]] [clojure.string :as str] [clojure.core.async :as async :refer [<! put!]])) (defn indexed [xs] (map-indexed vector xs)) (def dragging (atom nil)) ;; prevent scrolling when dragging (defonce window-touchmove (aset js/window "ontouchmove" (fn [e] (when @dragging (.preventDefault e))))) (defn offset-from-svg [e] (let [el (dom/getElementByClass "cppn-graph") r (.getBoundingClientRect el)] [(- (.-clientX e) (.-left r)) (- (.-clientY e) (.-top r))])) (defn abs [x] (if (neg? x) (- x) x)) (defn find-node-at-xy [by-node x y rx ry] (some (fn [[node info]] (let [nx (:x info) ny (:y info)] (when (and (< (abs (- nx x)) rx) (< (abs (- ny y)) ry)) node))) by-node)) (defn valid-drop? [cppn from to] (let [final? (cppnx/finals cppn) input? (:inputs cppn)] (and from to (not= from to) (not (and (final? from) (final? to))) (not (and (input? from) (input? to)))))) (defn cppn-svg [cppn selection event-c] (let [strata (cppnx/cppn-strata cppn) zerod? (:zerod cppn #{}) row-px 70 height-px (* row-px (count strata)) width-px 400 radius-x (/ width-px 18) radius-y (* row-px 0.2) by-node (into {} (for [[row nodes] (indexed strata) [j node] (indexed (sort nodes))] [node {:x (+ (* width-px (/ (+ j 0.5) (inc (count nodes)))) (* (mod row 3) (* width-px 0.02))) :y (* row-px (+ row 0.5)) :deps (-> cppn :edges (get node)) :label (-> cppn :nodes (get node node) name)}])) drag-move (fn [e] (when @dragging (let [[x y] (offset-from-svg e)] (js/requestAnimationFrame (fn [_] (let [targ (find-node-at-xy by-node x y radius-x radius-y) ok? (when targ (valid-drop? cppn (:node @dragging) targ))] (swap! dragging assoc :at [x y] :target (when ok? targ)))))))) drop (fn [e] (when (:target @dragging) (put! event-c {:event :link :from (:node @dragging) :to (:target @dragging)})) (reset! dragging nil)) bg-click (fn [e] (.preventDefault e) (put! event-c {:event :select :node nil}))] [:svg.cppn-graph {:style {:width "100%" :height (str height-px "px") :border "solid 1px black" :font-size "12px"} :onMouseMove drag-move :onTouchMove (fn [e] (drag-move (aget (.-changedTouches e) 0))) :onMouseLeave (fn [e] (reset! dragging nil)) :onMouseUp drop :onTouchEnd (fn [e] (drop (aget (.-changedTouches e) 0))) :onClick bg-click} ;; edges (into [:g] (for [[node info] by-node [from w] (:deps info) :let [from-info (by-node from)]] [:polyline {:points (str/join " " [(:x info) (:y info) (:x from-info) (:y from-info)]) :stroke (if (pos? w) "#000" "#f00") :stroke-width (Math/sqrt (Math/abs w))}])) ;; drag-in-progress arrow (when @dragging (let [info (by-node (:node @dragging)) [x y] (:at @dragging)] [:polyline {:points (str/join " " [(:x info) (:y info) x y]) :stroke (if (:target @dragging) "#0f0" "#888") :stroke-width 2}])) ;; nodes (into [:g] (for [[node info] by-node :let [drag-start (fn [e] (let [[x y] (offset-from-svg e)] (reset! dragging {:node node :at [x y]}))) click (fn [e] (.preventDefault e) (.stopPropagation e) (put! event-c {:event :select :node node}))]] [:g [:ellipse {:cx (:x info) :cy (:y info) :rx radius-x :ry radius-y :fill (if (= selection node) "#bdf" "#eee") :stroke (if (zerod? node) "#000" (if (= selection node) "#888" "#ddd")) :stroke-width (if (zerod? node) "4" "1") note - SVG does n't support actual drag&drop events :onMouseDown drag-start :onTouchStart (fn [e] (drag-start (aget (.-changedTouches e) 0))) :onClick click :style {:cursor "alias" :user-select "none"}}] [:text {:style {:pointer-events "none"} :text-anchor "middle" :x (:x info) :y (+ (:y info) 4)} (str (:label info))]]))]))
null
https://raw.githubusercontent.com/floybix/cppnx/4acde1c249f3a978d6eab1226aeaa5d7c54ddfc2/src/org/nfrac/cppnx/svg.cljs
clojure
prevent scrolling when dragging edges drag-in-progress arrow nodes
(ns org.nfrac.cppnx.svg (:require [org.nfrac.cppnx.core :as cppnx] [goog.dom :as dom] [reagent.core :as reagent :refer [atom]] [clojure.string :as str] [clojure.core.async :as async :refer [<! put!]])) (defn indexed [xs] (map-indexed vector xs)) (def dragging (atom nil)) (defonce window-touchmove (aset js/window "ontouchmove" (fn [e] (when @dragging (.preventDefault e))))) (defn offset-from-svg [e] (let [el (dom/getElementByClass "cppn-graph") r (.getBoundingClientRect el)] [(- (.-clientX e) (.-left r)) (- (.-clientY e) (.-top r))])) (defn abs [x] (if (neg? x) (- x) x)) (defn find-node-at-xy [by-node x y rx ry] (some (fn [[node info]] (let [nx (:x info) ny (:y info)] (when (and (< (abs (- nx x)) rx) (< (abs (- ny y)) ry)) node))) by-node)) (defn valid-drop? [cppn from to] (let [final? (cppnx/finals cppn) input? (:inputs cppn)] (and from to (not= from to) (not (and (final? from) (final? to))) (not (and (input? from) (input? to)))))) (defn cppn-svg [cppn selection event-c] (let [strata (cppnx/cppn-strata cppn) zerod? (:zerod cppn #{}) row-px 70 height-px (* row-px (count strata)) width-px 400 radius-x (/ width-px 18) radius-y (* row-px 0.2) by-node (into {} (for [[row nodes] (indexed strata) [j node] (indexed (sort nodes))] [node {:x (+ (* width-px (/ (+ j 0.5) (inc (count nodes)))) (* (mod row 3) (* width-px 0.02))) :y (* row-px (+ row 0.5)) :deps (-> cppn :edges (get node)) :label (-> cppn :nodes (get node node) name)}])) drag-move (fn [e] (when @dragging (let [[x y] (offset-from-svg e)] (js/requestAnimationFrame (fn [_] (let [targ (find-node-at-xy by-node x y radius-x radius-y) ok? (when targ (valid-drop? cppn (:node @dragging) targ))] (swap! dragging assoc :at [x y] :target (when ok? targ)))))))) drop (fn [e] (when (:target @dragging) (put! event-c {:event :link :from (:node @dragging) :to (:target @dragging)})) (reset! dragging nil)) bg-click (fn [e] (.preventDefault e) (put! event-c {:event :select :node nil}))] [:svg.cppn-graph {:style {:width "100%" :height (str height-px "px") :border "solid 1px black" :font-size "12px"} :onMouseMove drag-move :onTouchMove (fn [e] (drag-move (aget (.-changedTouches e) 0))) :onMouseLeave (fn [e] (reset! dragging nil)) :onMouseUp drop :onTouchEnd (fn [e] (drop (aget (.-changedTouches e) 0))) :onClick bg-click} (into [:g] (for [[node info] by-node [from w] (:deps info) :let [from-info (by-node from)]] [:polyline {:points (str/join " " [(:x info) (:y info) (:x from-info) (:y from-info)]) :stroke (if (pos? w) "#000" "#f00") :stroke-width (Math/sqrt (Math/abs w))}])) (when @dragging (let [info (by-node (:node @dragging)) [x y] (:at @dragging)] [:polyline {:points (str/join " " [(:x info) (:y info) x y]) :stroke (if (:target @dragging) "#0f0" "#888") :stroke-width 2}])) (into [:g] (for [[node info] by-node :let [drag-start (fn [e] (let [[x y] (offset-from-svg e)] (reset! dragging {:node node :at [x y]}))) click (fn [e] (.preventDefault e) (.stopPropagation e) (put! event-c {:event :select :node node}))]] [:g [:ellipse {:cx (:x info) :cy (:y info) :rx radius-x :ry radius-y :fill (if (= selection node) "#bdf" "#eee") :stroke (if (zerod? node) "#000" (if (= selection node) "#888" "#ddd")) :stroke-width (if (zerod? node) "4" "1") note - SVG does n't support actual drag&drop events :onMouseDown drag-start :onTouchStart (fn [e] (drag-start (aget (.-changedTouches e) 0))) :onClick click :style {:cursor "alias" :user-select "none"}}] [:text {:style {:pointer-events "none"} :text-anchor "middle" :x (:x info) :y (+ (:y info) 4)} (str (:label info))]]))]))
05fab4546d39044c9033e26cd094eb505d50e5149cb1f437a4e76202dd410037
hiratara/Haskell-Nyumon-Sample
ParserTest.hs
{-# LANGUAGE OverloadedStrings #-} module ParserTest where import Data.Hjq.Parser import Test.HUnit parserTest :: Test parserTest = TestList [ jqFilterParserTest , jqFilterParserSpacesTest , jqQueryParserTest , jqQueryParserSpacesTest ] jqFilterParserTest :: Test jqFilterParserTest = TestList [ "jqFilterParser test 1" ~: parseJqFilter "." ~?= Right JqNil , "jqFilterParser test 2" ~: parseJqFilter ".[0]" ~?= Right (JqIndex 0 JqNil) , "jqFilterParser test 3" ~: parseJqFilter ".fieldName" ~?= Right (JqField "fieldName" JqNil) , "jqFilterParser test 4" ~: parseJqFilter ".[0].fieldName" ~?= Right (JqIndex 0 (JqField "fieldName" JqNil)) , "jqFilterParser test 5" ~: parseJqFilter ".fieldName[0]" ~?= Right (JqField "fieldName" (JqIndex 0 JqNil)) ] jqFilterParserSpacesTest :: Test jqFilterParserSpacesTest = TestList [ "jqFilterParser spaces test 1" ~: parseJqFilter " . " ~?= Right JqNil , "jqFilterParser spaces test 2" ~: parseJqFilter " . [ 0 ] " ~?= Right (JqIndex 0 JqNil) , "jqFilterParser spaces test 3" ~: parseJqFilter " . fieldName " ~?= Right (JqField "fieldName" JqNil) , "jqFilterParser spaces test 4" ~: parseJqFilter " . [ 0 ] . fieldName " ~?= Right (JqIndex 0 (JqField "fieldName" JqNil)) , "jqFilterParser spaces test 5" ~: parseJqFilter " . fieldName [ 0 ] " ~?= Right (JqField "fieldName" (JqIndex 0 JqNil)) ] jqQueryParserTest :: Test jqQueryParserTest = TestList [ "jqQueryParser test 1" ~: parseJqQuery "[]" ~?= Right (JqQueryArray []) , "jqQueryParser test 2" ~: parseJqQuery "[.hoge,.piyo]" ~?= Right (JqQueryArray [JqQueryFilter (JqField "hoge" JqNil), JqQueryFilter (JqField "piyo" JqNil)]) , "jqQueryParser test 3" ~: parseJqQuery "{\"hoge\":[],\"piyo\":[]}" ~?= Right (JqQueryObject [("hoge", JqQueryArray []), ("piyo", JqQueryArray [])]) ] jqQueryParserSpacesTest :: Test jqQueryParserSpacesTest = TestList [ "jqQueryParser spaces test 1" ~: parseJqQuery " [ ] " ~?= Right (JqQueryArray []) , "jqQueryParser spaces test 2" ~: parseJqQuery " [ . hoge , . piyo ] " ~?= Right (JqQueryArray [JqQueryFilter (JqField "hoge" JqNil), JqQueryFilter (JqField "piyo" JqNil)]) , "jqQueryParser spaces test 3" ~: parseJqQuery "{ \"hoge\" : [ ] , \"piyo\" : [ ] } " ~?= Right (JqQueryObject [("hoge", JqQueryArray []), ("piyo", JqQueryArray [])]) ]
null
https://raw.githubusercontent.com/hiratara/Haskell-Nyumon-Sample/ac52b741e3b96722f6fc104cfa84078e39f7a241/chap09-samples/test/ParserTest.hs
haskell
# LANGUAGE OverloadedStrings #
module ParserTest where import Data.Hjq.Parser import Test.HUnit parserTest :: Test parserTest = TestList [ jqFilterParserTest , jqFilterParserSpacesTest , jqQueryParserTest , jqQueryParserSpacesTest ] jqFilterParserTest :: Test jqFilterParserTest = TestList [ "jqFilterParser test 1" ~: parseJqFilter "." ~?= Right JqNil , "jqFilterParser test 2" ~: parseJqFilter ".[0]" ~?= Right (JqIndex 0 JqNil) , "jqFilterParser test 3" ~: parseJqFilter ".fieldName" ~?= Right (JqField "fieldName" JqNil) , "jqFilterParser test 4" ~: parseJqFilter ".[0].fieldName" ~?= Right (JqIndex 0 (JqField "fieldName" JqNil)) , "jqFilterParser test 5" ~: parseJqFilter ".fieldName[0]" ~?= Right (JqField "fieldName" (JqIndex 0 JqNil)) ] jqFilterParserSpacesTest :: Test jqFilterParserSpacesTest = TestList [ "jqFilterParser spaces test 1" ~: parseJqFilter " . " ~?= Right JqNil , "jqFilterParser spaces test 2" ~: parseJqFilter " . [ 0 ] " ~?= Right (JqIndex 0 JqNil) , "jqFilterParser spaces test 3" ~: parseJqFilter " . fieldName " ~?= Right (JqField "fieldName" JqNil) , "jqFilterParser spaces test 4" ~: parseJqFilter " . [ 0 ] . fieldName " ~?= Right (JqIndex 0 (JqField "fieldName" JqNil)) , "jqFilterParser spaces test 5" ~: parseJqFilter " . fieldName [ 0 ] " ~?= Right (JqField "fieldName" (JqIndex 0 JqNil)) ] jqQueryParserTest :: Test jqQueryParserTest = TestList [ "jqQueryParser test 1" ~: parseJqQuery "[]" ~?= Right (JqQueryArray []) , "jqQueryParser test 2" ~: parseJqQuery "[.hoge,.piyo]" ~?= Right (JqQueryArray [JqQueryFilter (JqField "hoge" JqNil), JqQueryFilter (JqField "piyo" JqNil)]) , "jqQueryParser test 3" ~: parseJqQuery "{\"hoge\":[],\"piyo\":[]}" ~?= Right (JqQueryObject [("hoge", JqQueryArray []), ("piyo", JqQueryArray [])]) ] jqQueryParserSpacesTest :: Test jqQueryParserSpacesTest = TestList [ "jqQueryParser spaces test 1" ~: parseJqQuery " [ ] " ~?= Right (JqQueryArray []) , "jqQueryParser spaces test 2" ~: parseJqQuery " [ . hoge , . piyo ] " ~?= Right (JqQueryArray [JqQueryFilter (JqField "hoge" JqNil), JqQueryFilter (JqField "piyo" JqNil)]) , "jqQueryParser spaces test 3" ~: parseJqQuery "{ \"hoge\" : [ ] , \"piyo\" : [ ] } " ~?= Right (JqQueryObject [("hoge", JqQueryArray []), ("piyo", JqQueryArray [])]) ]
44c1635776868d821aec6d0d98c255d1aeff744b61c79632ce2f5301238490a7
yallop/ocaml-ctypes
functions.ml
* Copyright ( c ) 2014 . * * This file is distributed under the terms of the MIT License . * See the file LICENSE for details . * Copyright (c) 2014 Jeremy Yallop. * * This file is distributed under the terms of the MIT License. * See the file LICENSE for details. *) Foreign function bindings for the OO - style tests . open Ctypes open Foreign module Stubs (F: Ctypes.FOREIGN) = struct open F let cast base p = from_voidp base (to_voidp p) (* We'll build part of the hierarchy in C and part in OCaml. animal ^ ^ | | chorse camel *) (** Create the base class and its method table **) type animal and animal_methods let animal_methods : animal_methods structure typ = structure "animal methods" and animal : animal structure typ = structure "animal" (* class layout (vtable pointer, no instance variables) *) let animal_vtable = field animal "animal_vtable" (ptr animal_methods) let () = seal animal method table layout ( two virtual methods ) let (-:) ty label = field animal_methods label ty let say = Foreign.funptr Ctypes.(ptr animal @-> returning string) -: "say" let identify = Foreign.funptr Ctypes.(ptr animal @-> returning string) -: "identify" let () = seal animal_methods let call_say cinstance = !@((getf (!@cinstance) animal_vtable) |-> say) cinstance let call_identify cinstance = !@((getf (!@cinstance) animal_vtable) |-> identify) cinstance (* constructor *) class animalc ~cinstance = object method say : string = call_say cinstance method identify : string = call_identify cinstance method cinstance = cinstance end (** Create a sub class and its method table **) type camel and camel_methods let camel_methods : camel_methods structure typ = structure "camel methods" and camel : camel structure typ = structure "camel" class layout ( vtable pointer , one instance variable ) let (-:) ty label = field camel label ty let camel_vtable = ptr camel_methods -: "camel_vtable" let nhumps = int -: "nhumps" let () = seal camel method table layout ( one additional virtual method ) let (-:) ty label = field camel_methods label ty let _ = animal_methods -: "_" let humps = Foreign.funptr Ctypes.(ptr camel @-> returning int) -: "humps" let () = seal camel_methods let call_humps cinstance = !@((getf (!@cinstance) camel_vtable) |-> humps) cinstance (* constructor *) class camelc ~cinstance = object inherit animalc ~cinstance:(cast animal cinstance) method humps : int = call_humps cinstance end let check_name = foreign "check_name" (ptr animal @-> string @-> returning int) let new_chorse = foreign "new_chorse" (int @-> returning (ptr animal)) end
null
https://raw.githubusercontent.com/yallop/ocaml-ctypes/52ff621f47dbc1ee5a90c30af0ae0474549946b4/tests/test-oo_style/stubs/functions.ml
ocaml
We'll build part of the hierarchy in C and part in OCaml. animal ^ ^ | | chorse camel * Create the base class and its method table * class layout (vtable pointer, no instance variables) constructor * Create a sub class and its method table * constructor
* Copyright ( c ) 2014 . * * This file is distributed under the terms of the MIT License . * See the file LICENSE for details . * Copyright (c) 2014 Jeremy Yallop. * * This file is distributed under the terms of the MIT License. * See the file LICENSE for details. *) Foreign function bindings for the OO - style tests . open Ctypes open Foreign module Stubs (F: Ctypes.FOREIGN) = struct open F let cast base p = from_voidp base (to_voidp p) type animal and animal_methods let animal_methods : animal_methods structure typ = structure "animal methods" and animal : animal structure typ = structure "animal" let animal_vtable = field animal "animal_vtable" (ptr animal_methods) let () = seal animal method table layout ( two virtual methods ) let (-:) ty label = field animal_methods label ty let say = Foreign.funptr Ctypes.(ptr animal @-> returning string) -: "say" let identify = Foreign.funptr Ctypes.(ptr animal @-> returning string) -: "identify" let () = seal animal_methods let call_say cinstance = !@((getf (!@cinstance) animal_vtable) |-> say) cinstance let call_identify cinstance = !@((getf (!@cinstance) animal_vtable) |-> identify) cinstance class animalc ~cinstance = object method say : string = call_say cinstance method identify : string = call_identify cinstance method cinstance = cinstance end type camel and camel_methods let camel_methods : camel_methods structure typ = structure "camel methods" and camel : camel structure typ = structure "camel" class layout ( vtable pointer , one instance variable ) let (-:) ty label = field camel label ty let camel_vtable = ptr camel_methods -: "camel_vtable" let nhumps = int -: "nhumps" let () = seal camel method table layout ( one additional virtual method ) let (-:) ty label = field camel_methods label ty let _ = animal_methods -: "_" let humps = Foreign.funptr Ctypes.(ptr camel @-> returning int) -: "humps" let () = seal camel_methods let call_humps cinstance = !@((getf (!@cinstance) camel_vtable) |-> humps) cinstance class camelc ~cinstance = object inherit animalc ~cinstance:(cast animal cinstance) method humps : int = call_humps cinstance end let check_name = foreign "check_name" (ptr animal @-> string @-> returning int) let new_chorse = foreign "new_chorse" (int @-> returning (ptr animal)) end
867bbbed1a1ff601127cbaa0a7b6548b11c67ecc0083b4ff057ad17a43b9bce6
mstewartgallus/hs-callbypushvalue
Cps.hs
{-# LANGUAGE ConstraintKinds #-} # LANGUAGE TypeOperators # module Cps (Cps, HasThunk (..), HasReturn (..), HasFn (..), HasCall (..), HasLabel (..)) where import Common import Global import HasCode import HasConstants import HasData import HasLet import HasStack import HasTerminal import HasTuple -- | -- I do n't understand this but apparently the CPS transform of Call By Push Value is similar to the λμ calculus . -- type Cps t = (HasConstants t, HasTerminal t, HasCall t, HasCode t, HasStack t, HasFn t, HasReturn t, HasThunk t, HasLabel t, HasLet t, HasTuple t) class HasData t => HasCall t where call :: Global a -> Data t (U (FromType a)) class (HasData t, HasCode t, HasStack t) => HasFn t where lambda :: Stack t (a ~> b) -> (Data t a -> Stack t b -> Code t c) -> Code t c (<*>) :: Data t a -> Stack t b -> Stack t (a ~> b) infixr 4 <*> | Decomposition of returns type into a into callcc style class (HasData t, HasCode t, HasStack t) => HasReturn t where returns :: Data t a -> Stack t (F a) -> Code t Void letTo :: SSet a -> (Data t a -> Code t Void) -> Stack t (F a) -- | Decomposition of thunks into cps style class (HasData t, HasCode t, HasStack t) => HasThunk t where thunk :: SAlgebra a -> (Stack t a -> Code t Void) -> Data t (U a) force :: Data t (U a) -> Stack t a -> Code t Void class (HasStack t, HasCode t) => HasLabel t where label :: Stack t a -> (Stack t a -> Code t b) -> Code t b label = flip whereLabel whereLabel :: (Stack t a -> Code t b) -> Stack t a -> Code t b whereLabel = flip label
null
https://raw.githubusercontent.com/mstewartgallus/hs-callbypushvalue/d8770b7e9e444e1261901f5ee435fcefb0f7ad75/src/Cps.hs
haskell
# LANGUAGE ConstraintKinds # | | Decomposition of thunks into cps style
# LANGUAGE TypeOperators # module Cps (Cps, HasThunk (..), HasReturn (..), HasFn (..), HasCall (..), HasLabel (..)) where import Common import Global import HasCode import HasConstants import HasData import HasLet import HasStack import HasTerminal import HasTuple I do n't understand this but apparently the CPS transform of Call By Push Value is similar to the λμ calculus . type Cps t = (HasConstants t, HasTerminal t, HasCall t, HasCode t, HasStack t, HasFn t, HasReturn t, HasThunk t, HasLabel t, HasLet t, HasTuple t) class HasData t => HasCall t where call :: Global a -> Data t (U (FromType a)) class (HasData t, HasCode t, HasStack t) => HasFn t where lambda :: Stack t (a ~> b) -> (Data t a -> Stack t b -> Code t c) -> Code t c (<*>) :: Data t a -> Stack t b -> Stack t (a ~> b) infixr 4 <*> | Decomposition of returns type into a into callcc style class (HasData t, HasCode t, HasStack t) => HasReturn t where returns :: Data t a -> Stack t (F a) -> Code t Void letTo :: SSet a -> (Data t a -> Code t Void) -> Stack t (F a) class (HasData t, HasCode t, HasStack t) => HasThunk t where thunk :: SAlgebra a -> (Stack t a -> Code t Void) -> Data t (U a) force :: Data t (U a) -> Stack t a -> Code t Void class (HasStack t, HasCode t) => HasLabel t where label :: Stack t a -> (Stack t a -> Code t b) -> Code t b label = flip whereLabel whereLabel :: (Stack t a -> Code t b) -> Stack t a -> Code t b whereLabel = flip label
65fce0ff9835be8e58e72f31f183a9a6a8e49cdbc005965e70ef70897d420d81
hammerlab/biokepi
gatk.ml
open Biokepi_run_environment open Common module Remove = Workflow_utilities.Remove module Configuration = struct module Gatk_config () = struct type t = { (** The name of the configuration, specific to Biokepi. *) name: string; * options . This filter is applied automatically by all GATK tools in order to protect them from crashing on reads that are grossly malformed . There are a few issues ( such as the absence of sequence bases ) that will cause the run to fail with an error , but these cases can be preempted by setting flags that cause the problem reads to also be filtered . This filter is applied automatically by all GATK tools in order to protect them from crashing on reads that are grossly malformed. There are a few issues (such as the absence of sequence bases) that will cause the run to fail with an error, but these cases can be preempted by setting flags that cause the problem reads to also be filtered. *) filter_reads_with_n_cigar: bool; * Ignore reads with CIGAR containing the N operator , instead of failing with an error with an error *) filter_mismatching_base_and_quals: bool; (** Ignore reads with mismatching numbers of bases and base qualities, instead of failing with an error.*) filter_bases_not_stored: bool; (** Ignore reads with no stored bases (i.e. '*' where the sequence should be), instead of failing with an error *) java_heap_memory: string option; (* not rendered; needs to be explicitly passed to the `java` command *) * Heap size to be passed to ` java ` , e.g. 8 g , 256 m. (** Other parameters: *) parameters: (string * string) list; } let name t = t.name let memory_param t = Option.value_map ~default:"" ~f:(fun m -> sprintf "-Xmx%s" m) t.java_heap_memory let to_json t: Yojson.Basic.json = let {name; filter_reads_with_n_cigar; filter_mismatching_base_and_quals; filter_bases_not_stored; java_heap_memory; parameters} = t in `Assoc [ "name", `String name; "filter_reads_with_N_cigar", `Bool filter_reads_with_n_cigar; "filter_mismatching_base_and_quals", `Bool filter_mismatching_base_and_quals; "filter_bases_not_stored", `Bool filter_bases_not_stored; "java_heap_memory", `String (Option.value ~default:"" java_heap_memory); "parameters", `Assoc (List.map parameters ~f:(fun (a, b) -> a, `String b)); ] let render {name; filter_reads_with_n_cigar; filter_mismatching_base_and_quals; filter_bases_not_stored; parameters; _} = (if filter_reads_with_n_cigar then "--filter_reads_with_N_cigar" else "") :: (if filter_mismatching_base_and_quals then "--filter_mismatching_base_and_quals" else "") :: (if filter_bases_not_stored then "--filter_bases_not_stored" else "") :: List.concat_map parameters ~f:(fun (a, b) -> [a; b]) |> List.filter ~f:(fun s -> not (String.is_empty s)) let default = {name = "default"; filter_reads_with_n_cigar = false; filter_mismatching_base_and_quals = false; filter_bases_not_stored = false; java_heap_memory = None; parameters = []} end module Indel_realigner = struct include Gatk_config () end module Realigner_target_creator = struct include Gatk_config () end module Bqsr = struct include Gatk_config () end module Print_reads = struct include Gatk_config () end type indel_realigner = (Indel_realigner.t * Realigner_target_creator.t) type bqsr = (Bqsr.t * Print_reads.t) let default_indel_realigner = (Indel_realigner.default, Realigner_target_creator.default) let default_bqsr = (Bqsr.default, Print_reads.default) module Mutect2 = struct type t = { name: string; use_dbsnp: bool; use_cosmic: bool; additional_arguments: string list; } let create ?(use_dbsnp = true) ?(use_cosmic = true) name additional_arguments = {name; use_dbsnp; use_cosmic; additional_arguments} let to_json {name; use_dbsnp; use_cosmic; additional_arguments} : Yojson.Basic.json = `Assoc [ "name", `String name; "use-cosmic", `Bool use_cosmic; "use-dbsnp", `Bool use_dbsnp; "additional-arguments", `List (List.map additional_arguments ~f:(fun s -> `String s)); ] let default = create "default" [] let default_without_cosmic = create ~use_cosmic:false ~use_dbsnp:true "default_without_cosmic" [] let compile ~reference {name; use_dbsnp; use_cosmic; additional_arguments} = let with_db use opt_name get_exn = if not use then None else let node = get_exn reference in Some ( [opt_name; node#product#path], [KEDSL.depends_on node]) in let args, edges = List.filter_opt [ with_db use_dbsnp "--dbsnp" Reference_genome.dbsnp_exn; with_db use_cosmic "--cosmic" Reference_genome.cosmic_exn; ] |> List.split in (`Arguments (List.concat args @ additional_arguments), `Edges (List.concat edges)) let name t = t.name end end For now we have the two steps in the same target but this could be split in two . c.f . We want to be able to run the indel - realigner on , so we can not use the usual ` ~result_prefix ` argument : See the documentation for the ` --nWayOut ` option : #--nWayOut See also -practice-for-multi-sample-non-human-indel-realignment Also , the documentation is incomplete ( or buggy ) , the option ` --nWayOut ` will output the Bam files in the current directory ( i.e. the one GATK is running in ) . So , unless the user uses the ` ? run_directory ` option , we extract that directory from the input - bams ; if they do not coincide we consider this an error . On top of that we use the GADT ` _ KEDSL.bam_orf_bams ` to return have 2 possible return types : bam_file workflow_node or bam_list workflow_node For now we have the two steps in the same target but this could be split in two. c.f. We want to be able to run the indel-realigner on mutliple bams, so we cannot use the usual `~result_prefix` argument: See the documentation for the `--nWayOut` option: #--nWayOut See also -practice-for-multi-sample-non-human-indel-realignment Also, the documentation is incomplete (or buggy), the option `--nWayOut` will output the Bam files in the current directory (i.e. the one GATK is running in). So, unless the user uses the `?run_directory` option, we extract that directory from the input-bams; if they do not coincide we consider this an error. On top of that we use the GADT `_ KEDSL.bam_orf_bams` to return have 2 possible return types: bam_file workflow_node or bam_list workflow_node *) open Configuration We limit this to 20 characters to attempt to keep the length of the resulting filenames below the common maximum length of 255 . filenames below the common maximum length of 255. *) let indel_realigner_output_filename_tag ~configuration:(ir_config, target_config) ?region input_bams = let digest_of_input = (List.map input_bams ~f:(fun o -> o#product#path) @ [ir_config.Configuration.Indel_realigner.name; target_config.Configuration.Realigner_target_creator.name; Option.value_map ~f:(fun r -> "-" ^ Region.to_filename r) region ~default:""]) |> String.concat ~sep:"" (* we make this file “unique” with an MD5 sum of the input paths *) |> Digest.string |> Digest.to_hex in (String.take digest_of_input ~index:11) ^ "-indelreal" let indel_realigner : type a. ?compress:bool -> ?on_region: Region.t -> configuration:(Indel_realigner.t * Realigner_target_creator.t) -> run_with:Machine.t -> ?run_directory: string -> a KEDSL.bam_or_bams -> a = fun ?(compress=false) ?(on_region = `Full) ~configuration ~run_with ?run_directory input_bam_or_bams -> let open KEDSL in let input_bam_1, more_input_bams = (* this an at-least-length-1 list :) *) match input_bam_or_bams with | Single_bam bam -> bam, [] | Bam_workflow_list [] -> failwithf "Empty bam-list in Gatk.indel_realigner`" | Bam_workflow_list (one :: more) -> (one, more) in let run_directory = match run_directory with | None -> let dir = Filename.dirname input_bam_1#product#path in List.iter more_input_bams ~f:(fun bam -> if Filename.dirname bam#product#path <> dir then failwithf "These two BAMS are not in the same directory:\n\ \ %s\n\ \ %s\n\ GATK.indel_realigner when running on multiple bams \ requires a proper run-directory, clean-up your bams \ or provide the option ~run_directory " input_bam_1#product#path bam#product#path ); dir | Some rundir -> rundir in let indel_config, target_config = configuration in let input_sorted_bam_1 = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam_1 in let more_input_sorted_bams = List.map more_input_bams ~f:(Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate) in let more_input_bams = `Use_the_sorted_ones_please in let input_bam_1 = `Use_the_sorted_ones_please in ignore (more_input_bams, input_bam_1); let name = sprintf "Indel Realignment on %s (%s)" (Filename.basename input_sorted_bam_1#product#path) (Region.to_filename on_region) in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let reference_genome = let reference_build = input_sorted_bam_1#product#reference_build in Machine.get_reference_genome run_with reference_build in let fasta = Reference_genome.fasta reference_genome in let output_suffix = indel_realigner_output_filename_tag ~configuration ~region:on_region (input_sorted_bam_1 :: more_input_sorted_bams) in let intervals_file = Filename.chop_suffix input_sorted_bam_1#product#path ".bam" ^ output_suffix ^ ".intervals" in This function encodes how IndelRealign 's nWayOut names the output BAMs , including the directory it 'll end up placing them in . including the directory it'll end up placing them in. *) let output_bam_path input = run_directory // ( Filename.chop_extension input#product#path ^ output_suffix ^ ".bam" |> Filename.basename) in let processors = Machine.max_processors run_with in let make = let target_creation_args = [ "-R"; Filename.quote fasta#product#path; "-I"; Filename.quote input_sorted_bam_1#product#path; "-o"; Filename.quote intervals_file; "-nt"; Int.to_string processors; ] @ Realigner_target_creator.render target_config @ List.concat_map more_input_sorted_bams ~f:(fun bam -> ["-I"; Filename.quote bam#product#path]) in let indel_real_args = [ "-R"; fasta#product#path; "-I"; input_sorted_bam_1#product#path; "-targetIntervals"; intervals_file; ] @ Indel_realigner.render indel_config @ begin match more_input_sorted_bams with | [] -> ["-o"; output_bam_path input_sorted_bam_1] | more -> List.concat_map more ~f:(fun b -> ["-I"; Filename.quote b#product#path]) @ ["--nWayOut"; output_suffix ^ ".bam"] end in let intervals_option = Region.to_gatk_option on_region in Machine.run_big_program run_with ~name ~processors ~self_ids:["gatk"; "indel-realigner"] Program.( Machine.Tool.(init gatk) && shf "cd %s" (Filename.quote run_directory) && shf "java %s -jar $GATK_JAR -T RealignerTargetCreator %s %s" (Realigner_target_creator.memory_param target_config) intervals_option (String.concat ~sep:" " target_creation_args) && (shf "java %s -jar $GATK_JAR -T IndelRealigner %s %s %s" (Indel_realigner.memory_param indel_config) intervals_option (if compress then " " else " -compress 0 ") (String.concat ~sep:" " indel_real_args))) in let edges = let sequence_dict = (* implicit dependency *) Picard.create_dict ~run_with fasta in [ depends_on Machine.Tool.(ensure gatk); depends_on fasta; RealignerTargetCreator wants the ` .fai ` : depends_on (Samtools.faidx ~run_with fasta); depends_on sequence_dict; on_failure_activate (Remove.file ~run_with intervals_file); ] @ List.concat_map (input_sorted_bam_1 :: more_input_sorted_bams) ~f:(fun b -> [ depends_on b; depends_on (Samtools.index_to_bai ~run_with b); on_failure_activate (Remove.file ~run_with (output_bam_path b)); ]) in let node : type a. a bam_or_bams -> a = (* we need a function to force `type a.` *) function | Single_bam _ -> (* This is what we give to the `-o` option: *) workflow_node ~name ~make ~edges (transform_bam input_sorted_bam_1#product (output_bam_path input_sorted_bam_1)) | Bam_workflow_list _ -> workflow_node ~name ~make ~edges (bam_list (List.map (input_sorted_bam_1 :: more_input_sorted_bams) ~f:(fun b -> (* This is what the documentation says it will to with the `--nWayOut` option *) transform_bam b#product (output_bam_path b)))) in node input_bam_or_bams let indel_realigner_map_reduce : type a. ?compress:bool -> configuration:(Indel_realigner.t * Realigner_target_creator.t) -> run_with:Machine.t -> ?run_directory: string -> a KEDSL.bam_or_bams -> a = fun ?compress ~configuration ~run_with ?run_directory input_bam_or_bams -> let open KEDSL in begin match input_bam_or_bams with | Single_bam bam_node -> let all_nodes = let f on_region = indel_realigner ?compress ~on_region ~configuration ~run_with ?run_directory input_bam_or_bams in let reference = Machine.get_reference_genome run_with bam_node#product#reference_build in List.map ~f (Reference_genome.major_contigs reference) in let result_path = Filename.chop_extension bam_node#product#path ^ indel_realigner_output_filename_tag ~configuration [bam_node] ^ "-merged.bam" in Samtools.merge_bams ~run_with all_nodes result_path | Bam_workflow_list bams -> let all_nodes = A list of lists that looks like : [ [ bam1_reg1 ; bam2_reg1 ; bam3_reg1 ] ; [ bam1_reg2 ; bam2_reg2 ; bam3_reg2 ] ; [ bam1_reg3 ; bam2_reg3 ; bam3_reg3 ] ; [ bam1_reg4 ; bam2_reg4 ; bam3_reg4 ] ; ] [ [bam1_reg1; bam2_reg1; bam3_reg1]; [bam1_reg2; bam2_reg2; bam3_reg2]; [bam1_reg3; bam2_reg3; bam3_reg3]; [bam1_reg4; bam2_reg4; bam3_reg4]; ] *) let f on_region = let bam_list_node = indel_realigner ?compress ~on_region ~configuration ~run_with ?run_directory input_bam_or_bams in let exploded = KEDSL.explode_bam_list_node bam_list_node in exploded in let reference = Machine.get_reference_genome run_with (List.hd_exn bams)#product#reference_build in List.map ~f (Reference_genome.major_contigs reference) in let merged_bams = List.mapi bams ~f:(fun index bam -> let all_regions_for_this_bam = List.map all_nodes ~f:(fun region_n -> List.nth region_n index |> Option.value_exn ~msg:"bug in Gatk.indel_realigner_map_reduce") in let result_path = Filename.chop_extension bam#product#path ^ sprintf "-%d-" index (* the index is there as debug/witness *) ^ indel_realigner_output_filename_tag ~configuration bams ^ "-merged.bam" in Samtools.merge_bams ~run_with all_regions_for_this_bam result_path ) in workflow_node ~name:"Indel-realigner-map-reduce" ~edges:(List.map merged_bams ~f:depends_on) (bam_list (List.map merged_bams ~f:(fun n -> n#product))) end Again doing two steps in one target for now : -quality-score-recalibrator -quality-score-recalibrator *) let call_gatk ~analysis ?(region=`Full) ?java_heap_memory args = let open KEDSL.Program in let escaped_args = List.map ~f:Filename.quote args in let intervals_option = Region.to_gatk_option region in let hm = match java_heap_memory with | None -> "" | Some m -> sprintf "-Xmx%s" m in sh (String.concat ~sep:" " ((sprintf "java %s -jar $GATK_JAR -T " hm) :: analysis :: intervals_option :: escaped_args)) let base_quality_score_recalibrator ~configuration:(bqsr_configuration, print_reads_configuration) ~run_with ~input_bam ~output_bam = let open KEDSL in let name = sprintf "gatk-%s" (Filename.basename output_bam) in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let reference_genome = Machine.get_reference_genome run_with input_bam#product#reference_build in let fasta = Reference_genome.fasta reference_genome in let db_snp = Reference_genome.dbsnp_exn reference_genome in let sorted_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam in let input_bam = `Please_use_the_sorted_one in ignore input_bam; let recal_data_table = Name_file.from_path ~readable_suffix:"bqsr_recal.table" sorted_bam#product#path [] in let processors = Machine.max_processors run_with in let make = Machine.run_big_program run_with ~name ~processors ~self_ids:["gatk"; "bqsr"] Program.( Machine.Tool.(init gatk) && call_gatk ~analysis:"BaseRecalibrator" ([ "-nct"; Int.to_string processors; "-I"; sorted_bam#product#path; "-R"; fasta#product#path; "-knownSites"; db_snp#product#path; "-o"; recal_data_table; ] @ Configuration.Bqsr.render bqsr_configuration) && call_gatk ~analysis:"PrintReads" ([ "-nct"; Int.to_string processors; "-R"; fasta#product#path; "-I"; sorted_bam#product#path; "-BQSR"; recal_data_table; "-o"; output_bam; ] @ Configuration.Print_reads.render print_reads_configuration) ) in workflow_node ~name (transform_bam sorted_bam#product ~path:output_bam) ~make ~edges:[ depends_on Machine.Tool.(ensure gatk); depends_on fasta; depends_on db_snp; depends_on sorted_bam; depends_on (Samtools.faidx ~run_with fasta); depends_on (Samtools.index_to_bai ~run_with sorted_bam); on_failure_activate (Remove.file ~run_with output_bam); on_failure_activate (Remove.file ~run_with recal_data_table); ] let haplotype_caller ?(more_edges = []) ~run_with ~input_bam ~result_prefix how = let open KEDSL in let reference = Machine.get_reference_genome run_with input_bam#product#reference_build in let run_on_region ~add_edges region = let result_file suffix = let region_name = Region.to_filename region in sprintf "%s-%s%s" result_prefix region_name suffix in let output_vcf = result_file "-germline.vcf" in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let run_path = Filename.dirname output_vcf in let reference_fasta = Reference_genome.fasta reference in let reference_dot_fai = Samtools.faidx ~run_with reference_fasta in let sequence_dict = Picard.create_dict ~run_with reference_fasta in let dbsnp = Reference_genome.dbsnp_exn reference in let sorted_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam in let run_gatk_haplotype_caller = let name = sprintf "%s" (Filename.basename output_vcf) in let make = Machine.run_big_program run_with ~name ~self_ids:["gatk"; "haplotype-caller"] Program.( Machine.Tool.(init gatk) && shf "mkdir -p %s" run_path && shf "cd %s" run_path && call_gatk ~region ~analysis:"HaplotypeCaller" [ "-I"; sorted_bam#product#path; "-R"; reference_fasta#product#path; "--dbsnp"; dbsnp#product#path; "-o"; output_vcf; "--filter_reads_with_N_cigar"; ] ) in workflow_node ~name ~make (vcf_file output_vcf ~reference_build:input_bam#product#reference_build ~host:Machine.(as_host run_with)) ~tags:[Target_tags.variant_caller] ~edges:(add_edges @ [ depends_on Machine.Tool.(ensure gatk); depends_on sorted_bam; depends_on reference_fasta; depends_on dbsnp; depends_on reference_dot_fai; depends_on sequence_dict; depends_on (Samtools.index_to_bai ~run_with sorted_bam); on_failure_activate (Remove.file ~run_with output_vcf); ]) in run_gatk_haplotype_caller in match how with | `Region region -> run_on_region ~add_edges:more_edges region | `Map_reduce -> let targets = List.map (Reference_genome.major_contigs reference) ~f:(run_on_region ~add_edges:[]) (* we add edges only to the last step *) in let final_vcf = result_prefix ^ "-merged.vcf" in Vcftools.vcf_concat ~run_with targets ~final_vcf ~more_edges * Call somatic variants with Mutect2 . Mutect2 comes within the GATK ( as opposed to { ! } ) . Cf . also Mutect2 comes within the GATK (as opposed to {!Mutect}). Cf. also *) let mutect2 ?(more_edges = []) ~configuration ~run_with The doc says only one of each ~result_prefix how = let open KEDSL in let reference = Machine.get_reference_genome run_with input_normal_bam#product#reference_build in let run_on_region ~add_edges region = let result_file suffix = let region_name = Region.to_filename region in sprintf "%s-%s%s" result_prefix region_name suffix in let output_vcf = result_file "-mutect2.vcf" in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let run_path = Filename.dirname output_vcf in let reference_fasta = Reference_genome.fasta reference in let reference_dot_fai = Samtools.faidx ~run_with reference_fasta in let sequence_dict = Picard.create_dict ~run_with reference_fasta in let sorted_normal_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_normal_bam in let sorted_tumor_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_tumor_bam in let `Arguments config_arguments, `Edges confg_edges = Configuration.Mutect2.compile ~reference configuration in let run_caller = let name = sprintf "%s" (Filename.basename output_vcf) in let make = Machine.run_big_program run_with ~name ~self_ids:["gatk"; "mutect2"] Program.( Machine.Tool.(init gatk) && shf "mkdir -p %s" run_path && shf "cd %s" run_path && call_gatk ~region ~analysis:"MuTect2" ([ "-I:normal"; sorted_normal_bam#product#path; "-I:tumor"; sorted_tumor_bam#product#path; "-R"; reference_fasta#product#path; "-o"; output_vcf; ] @ config_arguments) ) in workflow_node ~name ~make (vcf_file output_vcf ~reference_build:input_normal_bam#product#reference_build ~host:Machine.(as_host run_with)) ~tags:[Target_tags.variant_caller] ~edges:(add_edges @ confg_edges @ [ depends_on Machine.Tool.(ensure gatk); depends_on sorted_normal_bam; depends_on sorted_tumor_bam; depends_on reference_fasta; depends_on reference_dot_fai; depends_on sequence_dict; depends_on (Samtools.index_to_bai ~run_with sorted_normal_bam); depends_on (Samtools.index_to_bai ~run_with sorted_tumor_bam); on_failure_activate (Remove.file ~run_with output_vcf); ]) in run_caller in match how with | `Region region -> run_on_region ~add_edges:more_edges region | `Map_reduce -> let targets = List.map (Reference_genome.major_contigs reference) ~f:(run_on_region ~add_edges:[]) (* we add edges only to the last step *) in let final_vcf = result_prefix ^ "-merged.vcf" in Vcftools.vcf_concat ~run_with targets ~final_vcf ~more_edges
null
https://raw.githubusercontent.com/hammerlab/biokepi/d64eb2c891b41bda3444445cd2adf4e3251725d4/src/bfx_tools/gatk.ml
ocaml
* The name of the configuration, specific to Biokepi. * Ignore reads with mismatching numbers of bases and base qualities, instead of failing with an error. * Ignore reads with no stored bases (i.e. '*' where the sequence should be), instead of failing with an error not rendered; needs to be explicitly passed to the `java` command * Other parameters: we make this file “unique” with an MD5 sum of the input paths this an at-least-length-1 list :) implicit dependency we need a function to force `type a.` This is what we give to the `-o` option: This is what the documentation says it will to with the `--nWayOut` option the index is there as debug/witness we add edges only to the last step we add edges only to the last step
open Biokepi_run_environment open Common module Remove = Workflow_utilities.Remove module Configuration = struct module Gatk_config () = struct type t = { name: string; * options . This filter is applied automatically by all GATK tools in order to protect them from crashing on reads that are grossly malformed . There are a few issues ( such as the absence of sequence bases ) that will cause the run to fail with an error , but these cases can be preempted by setting flags that cause the problem reads to also be filtered . This filter is applied automatically by all GATK tools in order to protect them from crashing on reads that are grossly malformed. There are a few issues (such as the absence of sequence bases) that will cause the run to fail with an error, but these cases can be preempted by setting flags that cause the problem reads to also be filtered. *) filter_reads_with_n_cigar: bool; * Ignore reads with CIGAR containing the N operator , instead of failing with an error with an error *) filter_mismatching_base_and_quals: bool; filter_bases_not_stored: bool; java_heap_memory: string option; * Heap size to be passed to ` java ` , e.g. 8 g , 256 m. parameters: (string * string) list; } let name t = t.name let memory_param t = Option.value_map ~default:"" ~f:(fun m -> sprintf "-Xmx%s" m) t.java_heap_memory let to_json t: Yojson.Basic.json = let {name; filter_reads_with_n_cigar; filter_mismatching_base_and_quals; filter_bases_not_stored; java_heap_memory; parameters} = t in `Assoc [ "name", `String name; "filter_reads_with_N_cigar", `Bool filter_reads_with_n_cigar; "filter_mismatching_base_and_quals", `Bool filter_mismatching_base_and_quals; "filter_bases_not_stored", `Bool filter_bases_not_stored; "java_heap_memory", `String (Option.value ~default:"" java_heap_memory); "parameters", `Assoc (List.map parameters ~f:(fun (a, b) -> a, `String b)); ] let render {name; filter_reads_with_n_cigar; filter_mismatching_base_and_quals; filter_bases_not_stored; parameters; _} = (if filter_reads_with_n_cigar then "--filter_reads_with_N_cigar" else "") :: (if filter_mismatching_base_and_quals then "--filter_mismatching_base_and_quals" else "") :: (if filter_bases_not_stored then "--filter_bases_not_stored" else "") :: List.concat_map parameters ~f:(fun (a, b) -> [a; b]) |> List.filter ~f:(fun s -> not (String.is_empty s)) let default = {name = "default"; filter_reads_with_n_cigar = false; filter_mismatching_base_and_quals = false; filter_bases_not_stored = false; java_heap_memory = None; parameters = []} end module Indel_realigner = struct include Gatk_config () end module Realigner_target_creator = struct include Gatk_config () end module Bqsr = struct include Gatk_config () end module Print_reads = struct include Gatk_config () end type indel_realigner = (Indel_realigner.t * Realigner_target_creator.t) type bqsr = (Bqsr.t * Print_reads.t) let default_indel_realigner = (Indel_realigner.default, Realigner_target_creator.default) let default_bqsr = (Bqsr.default, Print_reads.default) module Mutect2 = struct type t = { name: string; use_dbsnp: bool; use_cosmic: bool; additional_arguments: string list; } let create ?(use_dbsnp = true) ?(use_cosmic = true) name additional_arguments = {name; use_dbsnp; use_cosmic; additional_arguments} let to_json {name; use_dbsnp; use_cosmic; additional_arguments} : Yojson.Basic.json = `Assoc [ "name", `String name; "use-cosmic", `Bool use_cosmic; "use-dbsnp", `Bool use_dbsnp; "additional-arguments", `List (List.map additional_arguments ~f:(fun s -> `String s)); ] let default = create "default" [] let default_without_cosmic = create ~use_cosmic:false ~use_dbsnp:true "default_without_cosmic" [] let compile ~reference {name; use_dbsnp; use_cosmic; additional_arguments} = let with_db use opt_name get_exn = if not use then None else let node = get_exn reference in Some ( [opt_name; node#product#path], [KEDSL.depends_on node]) in let args, edges = List.filter_opt [ with_db use_dbsnp "--dbsnp" Reference_genome.dbsnp_exn; with_db use_cosmic "--cosmic" Reference_genome.cosmic_exn; ] |> List.split in (`Arguments (List.concat args @ additional_arguments), `Edges (List.concat edges)) let name t = t.name end end For now we have the two steps in the same target but this could be split in two . c.f . We want to be able to run the indel - realigner on , so we can not use the usual ` ~result_prefix ` argument : See the documentation for the ` --nWayOut ` option : #--nWayOut See also -practice-for-multi-sample-non-human-indel-realignment Also , the documentation is incomplete ( or buggy ) , the option ` --nWayOut ` will output the Bam files in the current directory ( i.e. the one GATK is running in ) . So , unless the user uses the ` ? run_directory ` option , we extract that directory from the input - bams ; if they do not coincide we consider this an error . On top of that we use the GADT ` _ KEDSL.bam_orf_bams ` to return have 2 possible return types : bam_file workflow_node or bam_list workflow_node For now we have the two steps in the same target but this could be split in two. c.f. We want to be able to run the indel-realigner on mutliple bams, so we cannot use the usual `~result_prefix` argument: See the documentation for the `--nWayOut` option: #--nWayOut See also -practice-for-multi-sample-non-human-indel-realignment Also, the documentation is incomplete (or buggy), the option `--nWayOut` will output the Bam files in the current directory (i.e. the one GATK is running in). So, unless the user uses the `?run_directory` option, we extract that directory from the input-bams; if they do not coincide we consider this an error. On top of that we use the GADT `_ KEDSL.bam_orf_bams` to return have 2 possible return types: bam_file workflow_node or bam_list workflow_node *) open Configuration We limit this to 20 characters to attempt to keep the length of the resulting filenames below the common maximum length of 255 . filenames below the common maximum length of 255. *) let indel_realigner_output_filename_tag ~configuration:(ir_config, target_config) ?region input_bams = let digest_of_input = (List.map input_bams ~f:(fun o -> o#product#path) @ [ir_config.Configuration.Indel_realigner.name; target_config.Configuration.Realigner_target_creator.name; Option.value_map ~f:(fun r -> "-" ^ Region.to_filename r) region ~default:""]) |> String.concat ~sep:"" |> Digest.string |> Digest.to_hex in (String.take digest_of_input ~index:11) ^ "-indelreal" let indel_realigner : type a. ?compress:bool -> ?on_region: Region.t -> configuration:(Indel_realigner.t * Realigner_target_creator.t) -> run_with:Machine.t -> ?run_directory: string -> a KEDSL.bam_or_bams -> a = fun ?(compress=false) ?(on_region = `Full) ~configuration ~run_with ?run_directory input_bam_or_bams -> let open KEDSL in match input_bam_or_bams with | Single_bam bam -> bam, [] | Bam_workflow_list [] -> failwithf "Empty bam-list in Gatk.indel_realigner`" | Bam_workflow_list (one :: more) -> (one, more) in let run_directory = match run_directory with | None -> let dir = Filename.dirname input_bam_1#product#path in List.iter more_input_bams ~f:(fun bam -> if Filename.dirname bam#product#path <> dir then failwithf "These two BAMS are not in the same directory:\n\ \ %s\n\ \ %s\n\ GATK.indel_realigner when running on multiple bams \ requires a proper run-directory, clean-up your bams \ or provide the option ~run_directory " input_bam_1#product#path bam#product#path ); dir | Some rundir -> rundir in let indel_config, target_config = configuration in let input_sorted_bam_1 = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam_1 in let more_input_sorted_bams = List.map more_input_bams ~f:(Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate) in let more_input_bams = `Use_the_sorted_ones_please in let input_bam_1 = `Use_the_sorted_ones_please in ignore (more_input_bams, input_bam_1); let name = sprintf "Indel Realignment on %s (%s)" (Filename.basename input_sorted_bam_1#product#path) (Region.to_filename on_region) in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let reference_genome = let reference_build = input_sorted_bam_1#product#reference_build in Machine.get_reference_genome run_with reference_build in let fasta = Reference_genome.fasta reference_genome in let output_suffix = indel_realigner_output_filename_tag ~configuration ~region:on_region (input_sorted_bam_1 :: more_input_sorted_bams) in let intervals_file = Filename.chop_suffix input_sorted_bam_1#product#path ".bam" ^ output_suffix ^ ".intervals" in This function encodes how IndelRealign 's nWayOut names the output BAMs , including the directory it 'll end up placing them in . including the directory it'll end up placing them in. *) let output_bam_path input = run_directory // ( Filename.chop_extension input#product#path ^ output_suffix ^ ".bam" |> Filename.basename) in let processors = Machine.max_processors run_with in let make = let target_creation_args = [ "-R"; Filename.quote fasta#product#path; "-I"; Filename.quote input_sorted_bam_1#product#path; "-o"; Filename.quote intervals_file; "-nt"; Int.to_string processors; ] @ Realigner_target_creator.render target_config @ List.concat_map more_input_sorted_bams ~f:(fun bam -> ["-I"; Filename.quote bam#product#path]) in let indel_real_args = [ "-R"; fasta#product#path; "-I"; input_sorted_bam_1#product#path; "-targetIntervals"; intervals_file; ] @ Indel_realigner.render indel_config @ begin match more_input_sorted_bams with | [] -> ["-o"; output_bam_path input_sorted_bam_1] | more -> List.concat_map more ~f:(fun b -> ["-I"; Filename.quote b#product#path]) @ ["--nWayOut"; output_suffix ^ ".bam"] end in let intervals_option = Region.to_gatk_option on_region in Machine.run_big_program run_with ~name ~processors ~self_ids:["gatk"; "indel-realigner"] Program.( Machine.Tool.(init gatk) && shf "cd %s" (Filename.quote run_directory) && shf "java %s -jar $GATK_JAR -T RealignerTargetCreator %s %s" (Realigner_target_creator.memory_param target_config) intervals_option (String.concat ~sep:" " target_creation_args) && (shf "java %s -jar $GATK_JAR -T IndelRealigner %s %s %s" (Indel_realigner.memory_param indel_config) intervals_option (if compress then " " else " -compress 0 ") (String.concat ~sep:" " indel_real_args))) in let edges = Picard.create_dict ~run_with fasta in [ depends_on Machine.Tool.(ensure gatk); depends_on fasta; RealignerTargetCreator wants the ` .fai ` : depends_on (Samtools.faidx ~run_with fasta); depends_on sequence_dict; on_failure_activate (Remove.file ~run_with intervals_file); ] @ List.concat_map (input_sorted_bam_1 :: more_input_sorted_bams) ~f:(fun b -> [ depends_on b; depends_on (Samtools.index_to_bai ~run_with b); on_failure_activate (Remove.file ~run_with (output_bam_path b)); ]) in let node : type a. a bam_or_bams -> a = function | Single_bam _ -> workflow_node ~name ~make ~edges (transform_bam input_sorted_bam_1#product (output_bam_path input_sorted_bam_1)) | Bam_workflow_list _ -> workflow_node ~name ~make ~edges (bam_list (List.map (input_sorted_bam_1 :: more_input_sorted_bams) ~f:(fun b -> transform_bam b#product (output_bam_path b)))) in node input_bam_or_bams let indel_realigner_map_reduce : type a. ?compress:bool -> configuration:(Indel_realigner.t * Realigner_target_creator.t) -> run_with:Machine.t -> ?run_directory: string -> a KEDSL.bam_or_bams -> a = fun ?compress ~configuration ~run_with ?run_directory input_bam_or_bams -> let open KEDSL in begin match input_bam_or_bams with | Single_bam bam_node -> let all_nodes = let f on_region = indel_realigner ?compress ~on_region ~configuration ~run_with ?run_directory input_bam_or_bams in let reference = Machine.get_reference_genome run_with bam_node#product#reference_build in List.map ~f (Reference_genome.major_contigs reference) in let result_path = Filename.chop_extension bam_node#product#path ^ indel_realigner_output_filename_tag ~configuration [bam_node] ^ "-merged.bam" in Samtools.merge_bams ~run_with all_nodes result_path | Bam_workflow_list bams -> let all_nodes = A list of lists that looks like : [ [ bam1_reg1 ; bam2_reg1 ; bam3_reg1 ] ; [ bam1_reg2 ; bam2_reg2 ; bam3_reg2 ] ; [ bam1_reg3 ; bam2_reg3 ; bam3_reg3 ] ; [ bam1_reg4 ; bam2_reg4 ; bam3_reg4 ] ; ] [ [bam1_reg1; bam2_reg1; bam3_reg1]; [bam1_reg2; bam2_reg2; bam3_reg2]; [bam1_reg3; bam2_reg3; bam3_reg3]; [bam1_reg4; bam2_reg4; bam3_reg4]; ] *) let f on_region = let bam_list_node = indel_realigner ?compress ~on_region ~configuration ~run_with ?run_directory input_bam_or_bams in let exploded = KEDSL.explode_bam_list_node bam_list_node in exploded in let reference = Machine.get_reference_genome run_with (List.hd_exn bams)#product#reference_build in List.map ~f (Reference_genome.major_contigs reference) in let merged_bams = List.mapi bams ~f:(fun index bam -> let all_regions_for_this_bam = List.map all_nodes ~f:(fun region_n -> List.nth region_n index |> Option.value_exn ~msg:"bug in Gatk.indel_realigner_map_reduce") in let result_path = Filename.chop_extension bam#product#path ^ indel_realigner_output_filename_tag ~configuration bams ^ "-merged.bam" in Samtools.merge_bams ~run_with all_regions_for_this_bam result_path ) in workflow_node ~name:"Indel-realigner-map-reduce" ~edges:(List.map merged_bams ~f:depends_on) (bam_list (List.map merged_bams ~f:(fun n -> n#product))) end Again doing two steps in one target for now : -quality-score-recalibrator -quality-score-recalibrator *) let call_gatk ~analysis ?(region=`Full) ?java_heap_memory args = let open KEDSL.Program in let escaped_args = List.map ~f:Filename.quote args in let intervals_option = Region.to_gatk_option region in let hm = match java_heap_memory with | None -> "" | Some m -> sprintf "-Xmx%s" m in sh (String.concat ~sep:" " ((sprintf "java %s -jar $GATK_JAR -T " hm) :: analysis :: intervals_option :: escaped_args)) let base_quality_score_recalibrator ~configuration:(bqsr_configuration, print_reads_configuration) ~run_with ~input_bam ~output_bam = let open KEDSL in let name = sprintf "gatk-%s" (Filename.basename output_bam) in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let reference_genome = Machine.get_reference_genome run_with input_bam#product#reference_build in let fasta = Reference_genome.fasta reference_genome in let db_snp = Reference_genome.dbsnp_exn reference_genome in let sorted_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam in let input_bam = `Please_use_the_sorted_one in ignore input_bam; let recal_data_table = Name_file.from_path ~readable_suffix:"bqsr_recal.table" sorted_bam#product#path [] in let processors = Machine.max_processors run_with in let make = Machine.run_big_program run_with ~name ~processors ~self_ids:["gatk"; "bqsr"] Program.( Machine.Tool.(init gatk) && call_gatk ~analysis:"BaseRecalibrator" ([ "-nct"; Int.to_string processors; "-I"; sorted_bam#product#path; "-R"; fasta#product#path; "-knownSites"; db_snp#product#path; "-o"; recal_data_table; ] @ Configuration.Bqsr.render bqsr_configuration) && call_gatk ~analysis:"PrintReads" ([ "-nct"; Int.to_string processors; "-R"; fasta#product#path; "-I"; sorted_bam#product#path; "-BQSR"; recal_data_table; "-o"; output_bam; ] @ Configuration.Print_reads.render print_reads_configuration) ) in workflow_node ~name (transform_bam sorted_bam#product ~path:output_bam) ~make ~edges:[ depends_on Machine.Tool.(ensure gatk); depends_on fasta; depends_on db_snp; depends_on sorted_bam; depends_on (Samtools.faidx ~run_with fasta); depends_on (Samtools.index_to_bai ~run_with sorted_bam); on_failure_activate (Remove.file ~run_with output_bam); on_failure_activate (Remove.file ~run_with recal_data_table); ] let haplotype_caller ?(more_edges = []) ~run_with ~input_bam ~result_prefix how = let open KEDSL in let reference = Machine.get_reference_genome run_with input_bam#product#reference_build in let run_on_region ~add_edges region = let result_file suffix = let region_name = Region.to_filename region in sprintf "%s-%s%s" result_prefix region_name suffix in let output_vcf = result_file "-germline.vcf" in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let run_path = Filename.dirname output_vcf in let reference_fasta = Reference_genome.fasta reference in let reference_dot_fai = Samtools.faidx ~run_with reference_fasta in let sequence_dict = Picard.create_dict ~run_with reference_fasta in let dbsnp = Reference_genome.dbsnp_exn reference in let sorted_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_bam in let run_gatk_haplotype_caller = let name = sprintf "%s" (Filename.basename output_vcf) in let make = Machine.run_big_program run_with ~name ~self_ids:["gatk"; "haplotype-caller"] Program.( Machine.Tool.(init gatk) && shf "mkdir -p %s" run_path && shf "cd %s" run_path && call_gatk ~region ~analysis:"HaplotypeCaller" [ "-I"; sorted_bam#product#path; "-R"; reference_fasta#product#path; "--dbsnp"; dbsnp#product#path; "-o"; output_vcf; "--filter_reads_with_N_cigar"; ] ) in workflow_node ~name ~make (vcf_file output_vcf ~reference_build:input_bam#product#reference_build ~host:Machine.(as_host run_with)) ~tags:[Target_tags.variant_caller] ~edges:(add_edges @ [ depends_on Machine.Tool.(ensure gatk); depends_on sorted_bam; depends_on reference_fasta; depends_on dbsnp; depends_on reference_dot_fai; depends_on sequence_dict; depends_on (Samtools.index_to_bai ~run_with sorted_bam); on_failure_activate (Remove.file ~run_with output_vcf); ]) in run_gatk_haplotype_caller in match how with | `Region region -> run_on_region ~add_edges:more_edges region | `Map_reduce -> let targets = List.map (Reference_genome.major_contigs reference) in let final_vcf = result_prefix ^ "-merged.vcf" in Vcftools.vcf_concat ~run_with targets ~final_vcf ~more_edges * Call somatic variants with Mutect2 . Mutect2 comes within the GATK ( as opposed to { ! } ) . Cf . also Mutect2 comes within the GATK (as opposed to {!Mutect}). Cf. also *) let mutect2 ?(more_edges = []) ~configuration ~run_with The doc says only one of each ~result_prefix how = let open KEDSL in let reference = Machine.get_reference_genome run_with input_normal_bam#product#reference_build in let run_on_region ~add_edges region = let result_file suffix = let region_name = Region.to_filename region in sprintf "%s-%s%s" result_prefix region_name suffix in let output_vcf = result_file "-mutect2.vcf" in let gatk = Machine.get_tool run_with Machine.Tool.Default.gatk in let run_path = Filename.dirname output_vcf in let reference_fasta = Reference_genome.fasta reference in let reference_dot_fai = Samtools.faidx ~run_with reference_fasta in let sequence_dict = Picard.create_dict ~run_with reference_fasta in let sorted_normal_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_normal_bam in let sorted_tumor_bam = Samtools.sort_bam_if_necessary ~run_with ~by:`Coordinate input_tumor_bam in let `Arguments config_arguments, `Edges confg_edges = Configuration.Mutect2.compile ~reference configuration in let run_caller = let name = sprintf "%s" (Filename.basename output_vcf) in let make = Machine.run_big_program run_with ~name ~self_ids:["gatk"; "mutect2"] Program.( Machine.Tool.(init gatk) && shf "mkdir -p %s" run_path && shf "cd %s" run_path && call_gatk ~region ~analysis:"MuTect2" ([ "-I:normal"; sorted_normal_bam#product#path; "-I:tumor"; sorted_tumor_bam#product#path; "-R"; reference_fasta#product#path; "-o"; output_vcf; ] @ config_arguments) ) in workflow_node ~name ~make (vcf_file output_vcf ~reference_build:input_normal_bam#product#reference_build ~host:Machine.(as_host run_with)) ~tags:[Target_tags.variant_caller] ~edges:(add_edges @ confg_edges @ [ depends_on Machine.Tool.(ensure gatk); depends_on sorted_normal_bam; depends_on sorted_tumor_bam; depends_on reference_fasta; depends_on reference_dot_fai; depends_on sequence_dict; depends_on (Samtools.index_to_bai ~run_with sorted_normal_bam); depends_on (Samtools.index_to_bai ~run_with sorted_tumor_bam); on_failure_activate (Remove.file ~run_with output_vcf); ]) in run_caller in match how with | `Region region -> run_on_region ~add_edges:more_edges region | `Map_reduce -> let targets = List.map (Reference_genome.major_contigs reference) in let final_vcf = result_prefix ^ "-merged.vcf" in Vcftools.vcf_concat ~run_with targets ~final_vcf ~more_edges
ab742a2f09700bf8e033a2e9e1634c224a5007083d45749c1a66bac5d5e50e1a
graninas/Functional-Design-and-Architecture
Device.hs
module Andromeda.Hardware.Device ( ) where
null
https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/1736abc16d3e4917fc466010dcc182746af2fd0e/Second-Edition-Manning-Publications/BookSamples/CH03/Section3p2/Andromeda/Hardware/Device.hs
haskell
module Andromeda.Hardware.Device ( ) where
ef496c6d9bbdb92a0b5d38fd90ff67691709e7e340de182ba5adcc44126c949a
auser/hermes
nag.erl
%%%------------------------------------------------------------------- %%% File : nag.erl Author : %%% Description : %%% Created : Mon Aug 10 00:19:48 PDT 2009 %%%------------------------------------------------------------------- -module (nag). -include ("hermes.hrl"). -behaviour(gen_server). %% API -export([start_link/0]). %% gen_server callbacks -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -record(state, { sleep_delay % delay between lags }). -define(SERVER, ?MODULE). %%==================================================================== %% API %%==================================================================== %%-------------------------------------------------------------------- Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error } %% Description: Starts the server %%-------------------------------------------------------------------- start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). %%==================================================================== %% gen_server callbacks %%==================================================================== %%-------------------------------------------------------------------- %% Function: init(Args) -> {ok, State} | { ok , State , Timeout } | %% ignore | %% {stop, Reason} %% Description: Initiates the server %%-------------------------------------------------------------------- init([]) -> SleepDelay = case application:get_env(hermes, nag_delay) of { ok, D } -> D; undefined -> ?DEFAULT_NAG_DELAY end, start_nag_timer(SleepDelay), {ok, #state{ sleep_delay = SleepDelay }}. %%-------------------------------------------------------------------- Function : % % handle_call(Request , From , State ) - > { reply , Reply , State } | { reply , Reply , State , Timeout } | { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, Reply, State} | %% {stop, Reason, State} %% Description: Handling call messages %%-------------------------------------------------------------------- handle_call(_Request, _From, State) -> Reply = ok, {reply, Reply, State}. %%-------------------------------------------------------------------- Function : handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, State} %% Description: Handling cast messages %%-------------------------------------------------------------------- handle_cast(_Msg, State) -> {noreply, State}. %%-------------------------------------------------------------------- Function : handle_info(Info , State ) - > { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, State} %% Description: Handling all non call/cast messages %%-------------------------------------------------------------------- handle_info({nag, Interval}, #state{sleep_delay = SleepDelay} = State) -> {ok, MonReturn} = ambassador:ask("monitors", []), Monitors = lists:map(fun(MonString) -> LocalMon = case string:tokens(MonString, ":") of [M, "null"] -> M; E -> E end, utils:turn_to_atom(LocalMon) end, utils:turn_to_list(MonReturn)), lists:map(fun(Mon) -> Float = mon_server:get_latest_average_for(Mon, Interval), ?INFO("format_args_for_thrift(utils:turn_binary(Float)): ~p~n", [format_args_for_thrift(utils:turn_binary(Float))]), Out = ambassador:ask("run_monitor", [ erlang:atom_to_list(Mon), format_args_for_thrift(utils:turn_binary(Float)) ]), ?INFO("Got back from ambassador: ~p~n", [Out]), case catch Out of {'EXIT', _} -> ok; {ok, [Resp]} -> ?INFO("Resp", [Resp]), case string:tokens(Resp, ":") of ["vote_for", Action] -> ElectionName = erlang:list_to_atom(lists:append(["hold_election_", Action])), case stoplight_client:lock(ElectionName, ?LOCK_TIMOUT) of {no, _} -> ok; {crit, _} -> ?INFO("Calling action ~p for ~p (~p)~n", [Action, erlang:atom_to_list(Mon), ElectionName]), ElectionValue = athens:call_ambassador_election(Mon, Action), case ElectionValue > 0.5 of true -> get_lock_and_call_action(Action); _ -> ok end; Else -> ?INFO("Got other response for the election: ~p => ~p~n", [Action, Else]) end; [Action] -> ambassador:run(Action, []); _Else -> ok % ?INFO("Unhandled Event: ~p~n", [Else]) end, % ?INFO("VOTE ACTION!: ~p (Load: ~p)~n", [Resp, Float]), timer:sleep(1000), Resp; {ok, []} -> ok; {error, _} -> ok end end, Monitors), start_nag_timer(SleepDelay), {noreply, State}; handle_info(_Info, State) -> {noreply, State}. %%-------------------------------------------------------------------- %% Function: terminate(Reason, State) -> void() %% Description: This function is called by a gen_server when it is about to %% terminate. It should be the opposite of Module:init/1 and do any necessary %% cleaning up. When it returns, the gen_server terminates with Reason. %% The return value is ignored. %%-------------------------------------------------------------------- terminate(_Reason, _State) -> ok. %%-------------------------------------------------------------------- Func : code_change(OldVsn , State , Extra ) - > { ok , NewState } %% Description: Convert process state when code is changed %%-------------------------------------------------------------------- code_change(_OldVsn, State, _Extra) -> {ok, State}. %%-------------------------------------------------------------------- Internal functions %%-------------------------------------------------------------------- start_nag_timer(SleepDelay) -> timer:send_after(SleepDelay, {nag, 600}). get_lock_and_call_action(Action) -> ?INFO("Won the election for ~p. Get the lock on the system and call the action!~n", [Action]), ElectionName2 = erlang:list_to_atom(lists:append(["run_action_", Action])), case stoplight_client:lock(ElectionName2, ?LOCK_TIMOUT) of {no, _} -> ok; {crit, _} -> F = fun() -> ?INFO("Got the lock on the system for ~p (~p)~n", [Action, ElectionName2]), O = ambassador:run(Action, []), ?INFO("Ambassador response from ~p: ~p~n", [Action, O]), O end, spawn(F); _ -> ok end. format_args_for_thrift(Args) when is_list(Args) -> [FirstElement|_] = Args, Out = case FirstElement of O when is_binary(O) -> StringElements = lists:map(fun(Bin) -> erlang:binary_to_list(Bin) end, Args), string:join(StringElements, ", "); O -> O end; format_args_for_thrift(Args) -> Args.
null
https://raw.githubusercontent.com/auser/hermes/32741eb75398ebbcbf640e2c73dfd2a54f0d1241/src/nag/nag.erl
erlang
------------------------------------------------------------------- File : nag.erl Description : ------------------------------------------------------------------- API gen_server callbacks delay between lags ==================================================================== API ==================================================================== -------------------------------------------------------------------- Description: Starts the server -------------------------------------------------------------------- ==================================================================== gen_server callbacks ==================================================================== -------------------------------------------------------------------- Function: init(Args) -> {ok, State} | ignore | {stop, Reason} Description: Initiates the server -------------------------------------------------------------------- -------------------------------------------------------------------- % handle_call(Request , From , State ) - > { reply , Reply , State } | {stop, Reason, Reply, State} | {stop, Reason, State} Description: Handling call messages -------------------------------------------------------------------- -------------------------------------------------------------------- {stop, Reason, State} Description: Handling cast messages -------------------------------------------------------------------- -------------------------------------------------------------------- {stop, Reason, State} Description: Handling all non call/cast messages -------------------------------------------------------------------- ?INFO("Unhandled Event: ~p~n", [Else]) ?INFO("VOTE ACTION!: ~p (Load: ~p)~n", [Resp, Float]), -------------------------------------------------------------------- Function: terminate(Reason, State) -> void() Description: This function is called by a gen_server when it is about to terminate. It should be the opposite of Module:init/1 and do any necessary cleaning up. When it returns, the gen_server terminates with Reason. The return value is ignored. -------------------------------------------------------------------- -------------------------------------------------------------------- Description: Convert process state when code is changed -------------------------------------------------------------------- -------------------------------------------------------------------- --------------------------------------------------------------------
Author : Created : Mon Aug 10 00:19:48 PDT 2009 -module (nag). -include ("hermes.hrl"). -behaviour(gen_server). -export([start_link/0]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -record(state, { }). -define(SERVER, ?MODULE). Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error } start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). { ok , State , Timeout } | init([]) -> SleepDelay = case application:get_env(hermes, nag_delay) of { ok, D } -> D; undefined -> ?DEFAULT_NAG_DELAY end, start_nag_timer(SleepDelay), {ok, #state{ sleep_delay = SleepDelay }}. { reply , Reply , State , Timeout } | { noreply , State } | { noreply , State , Timeout } | handle_call(_Request, _From, State) -> Reply = ok, {reply, Reply, State}. Function : handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | handle_cast(_Msg, State) -> {noreply, State}. Function : handle_info(Info , State ) - > { noreply , State } | { noreply , State , Timeout } | handle_info({nag, Interval}, #state{sleep_delay = SleepDelay} = State) -> {ok, MonReturn} = ambassador:ask("monitors", []), Monitors = lists:map(fun(MonString) -> LocalMon = case string:tokens(MonString, ":") of [M, "null"] -> M; E -> E end, utils:turn_to_atom(LocalMon) end, utils:turn_to_list(MonReturn)), lists:map(fun(Mon) -> Float = mon_server:get_latest_average_for(Mon, Interval), ?INFO("format_args_for_thrift(utils:turn_binary(Float)): ~p~n", [format_args_for_thrift(utils:turn_binary(Float))]), Out = ambassador:ask("run_monitor", [ erlang:atom_to_list(Mon), format_args_for_thrift(utils:turn_binary(Float)) ]), ?INFO("Got back from ambassador: ~p~n", [Out]), case catch Out of {'EXIT', _} -> ok; {ok, [Resp]} -> ?INFO("Resp", [Resp]), case string:tokens(Resp, ":") of ["vote_for", Action] -> ElectionName = erlang:list_to_atom(lists:append(["hold_election_", Action])), case stoplight_client:lock(ElectionName, ?LOCK_TIMOUT) of {no, _} -> ok; {crit, _} -> ?INFO("Calling action ~p for ~p (~p)~n", [Action, erlang:atom_to_list(Mon), ElectionName]), ElectionValue = athens:call_ambassador_election(Mon, Action), case ElectionValue > 0.5 of true -> get_lock_and_call_action(Action); _ -> ok end; Else -> ?INFO("Got other response for the election: ~p => ~p~n", [Action, Else]) end; [Action] -> ambassador:run(Action, []); end, timer:sleep(1000), Resp; {ok, []} -> ok; {error, _} -> ok end end, Monitors), start_nag_timer(SleepDelay), {noreply, State}; handle_info(_Info, State) -> {noreply, State}. terminate(_Reason, _State) -> ok. Func : code_change(OldVsn , State , Extra ) - > { ok , NewState } code_change(_OldVsn, State, _Extra) -> {ok, State}. Internal functions start_nag_timer(SleepDelay) -> timer:send_after(SleepDelay, {nag, 600}). get_lock_and_call_action(Action) -> ?INFO("Won the election for ~p. Get the lock on the system and call the action!~n", [Action]), ElectionName2 = erlang:list_to_atom(lists:append(["run_action_", Action])), case stoplight_client:lock(ElectionName2, ?LOCK_TIMOUT) of {no, _} -> ok; {crit, _} -> F = fun() -> ?INFO("Got the lock on the system for ~p (~p)~n", [Action, ElectionName2]), O = ambassador:run(Action, []), ?INFO("Ambassador response from ~p: ~p~n", [Action, O]), O end, spawn(F); _ -> ok end. format_args_for_thrift(Args) when is_list(Args) -> [FirstElement|_] = Args, Out = case FirstElement of O when is_binary(O) -> StringElements = lists:map(fun(Bin) -> erlang:binary_to_list(Bin) end, Args), string:join(StringElements, ", "); O -> O end; format_args_for_thrift(Args) -> Args.
5beb07535f0e3fdc67cf54d35c8af8d499d58f688f61084e0289986cc7669c70
evansb/cis194-hw
StackVM.hs
module StackVM (StackVal(..), StackExp(..), Stack, Program, stackVM) where -- Values that may appear in the stack. Such a value will also be -- returned by the stackVM program execution function. data StackVal = IVal Integer | BVal Bool | Void deriving Show -- The various expressions our VM understands. data StackExp = PushI Integer | PushB Bool | Add | Mul | And | Or deriving Show type Stack = [StackVal] type Program = [StackExp] -- Execute the given program. Returns either an error message or the -- value on top of the stack after execution. stackVM :: Program -> Either String StackVal stackVM = execute [] errType :: String -> Either String a errType op = Left $ "Encountered '" ++ op ++ "' opcode with ill-typed stack." errUnderflow :: String -> Either String a errUnderflow op = Left $ "Stack underflow with '" ++ op ++ "' opcode." -- Execute a program against a given stack. execute :: Stack -> Program -> Either String StackVal execute [] [] = Right Void execute (s:_) [] = Right s execute s (PushI x : xs) = execute (IVal x : s) xs execute s (PushB x : xs) = execute (BVal x : s) xs execute (IVal s1 : IVal s2 : ss) (Add : xs) = execute (s':ss) xs where s' = IVal (s1 + s2) execute (_:_:_) (Add:_) = errType "Add" execute _ (Add:_) = errUnderflow "Add" execute (IVal s1:IVal s2:ss) (Mul : xs) = execute (s':ss) xs where s' = IVal (s1 * s2) execute (_:_:_) (Mul:_) = errType "Mul" execute _ (Mul:_) = errUnderflow "Mul" execute (BVal s1:BVal s2:ss) (And : xs) = execute (s':ss) xs where s' = BVal (s1 && s2) execute (_:_:_) (And:_) = errType "And" execute _ (And:_) = errUnderflow "And" execute (BVal s1 : BVal s2 : ss) (Or : xs) = execute (s':ss) xs where s' = BVal (s1 || s2) execute (_:_:_) (Or:_) = errType "Or" execute _ (Or:_) = errUnderflow "Or"
null
https://raw.githubusercontent.com/evansb/cis194-hw/7ee2bc5e45a6c61d021b43e19ded66a0899f9c33/spring_2013/hw5/StackVM.hs
haskell
Values that may appear in the stack. Such a value will also be returned by the stackVM program execution function. The various expressions our VM understands. Execute the given program. Returns either an error message or the value on top of the stack after execution. Execute a program against a given stack.
module StackVM (StackVal(..), StackExp(..), Stack, Program, stackVM) where data StackVal = IVal Integer | BVal Bool | Void deriving Show data StackExp = PushI Integer | PushB Bool | Add | Mul | And | Or deriving Show type Stack = [StackVal] type Program = [StackExp] stackVM :: Program -> Either String StackVal stackVM = execute [] errType :: String -> Either String a errType op = Left $ "Encountered '" ++ op ++ "' opcode with ill-typed stack." errUnderflow :: String -> Either String a errUnderflow op = Left $ "Stack underflow with '" ++ op ++ "' opcode." execute :: Stack -> Program -> Either String StackVal execute [] [] = Right Void execute (s:_) [] = Right s execute s (PushI x : xs) = execute (IVal x : s) xs execute s (PushB x : xs) = execute (BVal x : s) xs execute (IVal s1 : IVal s2 : ss) (Add : xs) = execute (s':ss) xs where s' = IVal (s1 + s2) execute (_:_:_) (Add:_) = errType "Add" execute _ (Add:_) = errUnderflow "Add" execute (IVal s1:IVal s2:ss) (Mul : xs) = execute (s':ss) xs where s' = IVal (s1 * s2) execute (_:_:_) (Mul:_) = errType "Mul" execute _ (Mul:_) = errUnderflow "Mul" execute (BVal s1:BVal s2:ss) (And : xs) = execute (s':ss) xs where s' = BVal (s1 && s2) execute (_:_:_) (And:_) = errType "And" execute _ (And:_) = errUnderflow "And" execute (BVal s1 : BVal s2 : ss) (Or : xs) = execute (s':ss) xs where s' = BVal (s1 || s2) execute (_:_:_) (Or:_) = errType "Or" execute _ (Or:_) = errUnderflow "Or"
3aa51a1e3c2e5f3d9959afa93f6de29b2d8318da5914733c25b36c5404ff6de9
liqd/aula
Prelude.hs
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DeriveDataTypeable #-} # LANGUAGE DeriveGeneric # {-# LANGUAGE FlexibleContexts #-} # LANGUAGE GeneralizedNewtypeDeriving # {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE Rank2Types #-} # LANGUAGE ScopedTypeVariables # {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE ViewPatterns #-} {-# OPTIONS_GHC -Wall -Werror #-} module Types.Prelude where import Control.Lens hiding ((<.>)) import Control.Monad.Trans.Except (ExceptT, runExceptT) import Data.Char import Data.Data (Data) import Data.Function (on) import Data.List as List (sortBy) import Data.Monoid import Data.Proxy (Proxy(Proxy)) import Data.SafeCopy (base, deriveSafeCopy) import Data.String.Conversions import Data.Time import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds) import Data.Typeable (Typeable) import GHC.Generics (Generic) import Servant ((:~>)(Nat)) import System.Directory (getDirectoryContents) import Text.Read (readEither) import qualified Data.Aeson as Aeson import qualified Data.Text as ST import qualified Data.Ord (Down(Down)) import qualified Generics.Generic.Aeson as Aeson import qualified Generics.SOP as SOP | A shorter alias for ' ' . nil :: Monoid a => a nil = mempty isNil :: (Monoid a, Eq a) => a -> Bool isNil = (== nil) readWith :: Read a => Proxy a -> String -> a readWith Proxy = read justIf :: a -> Bool -> Maybe a justIf x b = if b then Just x else Nothing justIfP :: a -> (a -> Bool) -> Maybe a justIfP x f = justIf x (f x) lowerFirst :: String -> String lowerFirst [] = [] lowerFirst (x:xs) = toLower x : xs toEnumMay :: forall a. (Enum a, Bounded a) => Int -> Maybe a toEnumMay i = if i >= 0 && i <= fromEnum (maxBound :: a) then Just $ toEnum i else Nothing readEitherCS :: (ConvertibleStrings String c, Read a) => String -> Either c a readEitherCS = either (Left . cs) Right . readEither type CSI s t a b = (ConvertibleStrings s a, ConvertibleStrings b t) type CSI' s a = CSI s s a a -- An optic for string conversion -- let p = ("a" :: ST, Just ("b" :: SBS)) p ^. _ 1 . : : SBS -- > "a" -- p & _1 . csi %~ ('x':) > ( " xa " , " b " ) csi :: CSI s t a b => Iso s t a b csi = iso cs cs cshow :: (Show a, ConvertibleStrings String c) => a -> c cshow = cs . show showed :: Show a => Getter a String showed = to show _utctDay :: Lens' UTCTime Day _utctDay f t = (\d -> t { utctDay = d }) <$> f (utctDay t) -- As in the lens-datetime package julianDay :: Iso' Day Integer julianDay = iso toModifiedJulianDay ModifiedJulianDay exceptToFail :: (Monad m, Show e) => ExceptT e m :~> m exceptToFail = Nat ((either (fail . show) pure =<<) . runExceptT) data Either3 a b c = Left3 a | Middle3 b | Right3 c deriving (Eq, Ord, Show, Read, Generic) instance (SOP.Generic a, SOP.Generic b, SOP.Generic c) => SOP.Generic (Either3 a b c) instance (Aeson.ToJSON a, Aeson.ToJSON b, Aeson.ToJSON c) => Aeson.ToJSON (Either3 a b c) where toJSON = Aeson.gtoJson instance (Aeson.FromJSON a, Aeson.FromJSON b, Aeson.FromJSON c) => Aeson.FromJSON (Either3 a b c) where parseJSON = Aeson.gparseJson infixr 9 <..> (<..>) :: (c -> d) -> (a -> b -> c) -> a -> b -> d (<..>) f g x y = f $ g x y infixr 9 <...> (<...>) :: (d -> e) -> (a -> b -> c -> d) -> a -> b -> c -> e (<...>) f g x y z = f $ g x y z sortOn :: Ord b => Getter a b -> [a] -> [a] sortOn l = sortBy (compare `on` view l) downSortOn :: Ord b => Getter a b -> [a] -> [a] downSortOn l = sortOn (l . to Data.Ord.Down) countEq :: (Foldable f, Eq value) => value -> Lens' vote value -> f vote -> Int countEq v l = lengthOf $ folded . filtered ((== v) . view l) -- | Use this for storing URLs in the aula state. Unlike 'UriPath' is serializable, has equality, and unlike " Frontend . Path " , it is flexible enough to contain internal and external uris . ( : the ` uri - bytestring ` package could be nice here , but it may require a few orphans or a newtype to prevent them ; see also : # 31 . ) type URL = ST -- * time newtype Timestamp = Timestamp { unTimestamp :: UTCTime } deriving (Eq, Ord, Generic, Typeable, Data) timestampToEpoch :: Timestamp -> Integer timestampToEpoch = round . utcTimeToPOSIXSeconds . unTimestamp FIXME : import this from thentos ? create a package thentos - base ? TimespanUs Integer | TimespanMs Integer | TimespanSecs Integer | TimespanMins Integer | TimespanHours Integer | TimespanDays Integer deriving (Eq, Ord, Show, Read, Generic, Typeable, Data) instance SOP.Generic Timestamp instance SOP.Generic Timespan deriveSafeCopy 0 'base ''Timestamp deriveSafeCopy 0 'base ''Timespan instance Aeson.ToJSON Timestamp where toJSON = Aeson.gtoJson instance Aeson.FromJSON Timestamp where parseJSON = Aeson.gparseJson instance Show Timestamp where show = showTimestamp instance Read Timestamp where readsPrec _ s = case splitAt timestampFormatLength $ dropWhile isSpace s of (parseTimestamp -> Just t, r) -> [(t, r)] _ -> error $ "Read Timestamp: " <> show s parseTimestamp :: String -> Maybe Timestamp parseTimestamp = fmap Timestamp . parseTimeM True defaultTimeLocale timestampFormat showTimestamp :: Timestamp -> String showTimestamp = formatTime defaultTimeLocale timestampFormat . unTimestamp timestampFormat :: String timestampFormat = "%F_%T_%q" timestampFormatLength :: Int timestampFormatLength = length ("1864-04-13_13:01:33_846177415049" :: String) simpleTimestampToHtmlDate :: Timestamp -> String simpleTimestampToHtmlDate = formatTime defaultTimeLocale "%d.%m.%Y" . unTimestamp showTimespan :: Timespan -> String showTimespan (TimespanUs i) = show i <> "us" showTimespan (TimespanMs i) = show i <> "ms" showTimespan (TimespanSecs i) = show i <> "s" showTimespan (TimespanMins i) = show i <> "m" showTimespan (TimespanHours i) = show i <> "h" showTimespan (TimespanDays i) = show i <> "d" timespanUs :: Timespan -> Int timespanUs (TimespanUs i) = fromIntegral i timespanUs (TimespanMs i) = fromIntegral $ i * 1000 timespanUs (TimespanSecs i) = fromIntegral $ i * (1000 * 1000) timespanUs (TimespanMins i) = fromIntegral $ i * (1000 * 1000 * 60) timespanUs (TimespanHours i) = fromIntegral $ i * (1000 * 1000 * 3600) timespanUs (TimespanDays i) = fromIntegral $ i * (1000 * 1000 * 3600 * 24) timespanDays :: Timespan -> Int timespanDays = (`div` (1000 * 1000 * 3600 * 24)) . timespanUs instance Aeson.FromJSON Timespan where parseJSON = Aeson.withText "Timespan value" $ \raw -> do let (digits, units) = ST.break (`notElem` ("-0123456789" :: String)) raw bad = fail $ "bad Timespan value: " <> cs (show raw) construct :: Monad m => ST -> (Integer -> Timespan) -> m Timespan construct i cns = pure . cns . read . cs $ i case (digits, units) of ("", _) -> bad (i, "us") -> construct i TimespanUs (i, "ms") -> construct i TimespanMs (i, "s") -> construct i TimespanSecs (i, "m") -> construct i TimespanMins (i, "h") -> construct i TimespanHours (i, "d") -> construct i TimespanDays _ -> bad instance Aeson.ToJSON Timespan where toJSON = \case (TimespanUs i) -> render i "us" (TimespanMs i) -> render i "ms" (TimespanSecs i) -> render i "s" (TimespanMins i) -> render i "m" (TimespanHours i) -> render i "h" (TimespanDays i) -> render i "d" where render :: Integer -> String -> Aeson.Value render i unit = Aeson.String . cs $ show i <> unit diffTimestamps :: Timestamp -> Timestamp -> Timespan diffTimestamps (Timestamp tfrom) (Timestamp ttill) = TimespanUs . round $ (tfrom `diffUTCTime` ttill) * (1000 * 1000) addTimespan :: Timespan -> Timestamp -> Timestamp addTimespan tdiff (Timestamp tfrom) = Timestamp $ fromRational (fromIntegral (timespanUs tdiff) / (1000 * 1000) :: Rational) `addUTCTime` tfrom fromNow :: Timestamp -> Iso' Timestamp Timespan fromNow now = iso (`diffTimestamps` now) (`addTimespan` now) getDirectoryContentsNoDots :: FilePath -> IO [FilePath] getDirectoryContentsNoDots path = filter (not . (`elem` [".", ".."])) <$> getDirectoryContents path
null
https://raw.githubusercontent.com/liqd/aula/f96dbf85cd80d0b445e7d198c9b2866bed9c4e3d/src/Types/Prelude.hs
haskell
# LANGUAGE ConstraintKinds # # LANGUAGE DeriveDataTypeable # # LANGUAGE FlexibleContexts # # LANGUAGE LambdaCase # # LANGUAGE OverloadedStrings # # LANGUAGE Rank2Types # # LANGUAGE TemplateHaskell # # LANGUAGE TypeOperators # # LANGUAGE ViewPatterns # # OPTIONS_GHC -Wall -Werror # An optic for string conversion let p = ("a" :: ST, Just ("b" :: SBS)) > "a" p & _1 . csi %~ ('x':) As in the lens-datetime package | Use this for storing URLs in the aula state. Unlike 'UriPath' is serializable, has equality, * time
# LANGUAGE DeriveGeneric # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE ScopedTypeVariables # module Types.Prelude where import Control.Lens hiding ((<.>)) import Control.Monad.Trans.Except (ExceptT, runExceptT) import Data.Char import Data.Data (Data) import Data.Function (on) import Data.List as List (sortBy) import Data.Monoid import Data.Proxy (Proxy(Proxy)) import Data.SafeCopy (base, deriveSafeCopy) import Data.String.Conversions import Data.Time import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds) import Data.Typeable (Typeable) import GHC.Generics (Generic) import Servant ((:~>)(Nat)) import System.Directory (getDirectoryContents) import Text.Read (readEither) import qualified Data.Aeson as Aeson import qualified Data.Text as ST import qualified Data.Ord (Down(Down)) import qualified Generics.Generic.Aeson as Aeson import qualified Generics.SOP as SOP | A shorter alias for ' ' . nil :: Monoid a => a nil = mempty isNil :: (Monoid a, Eq a) => a -> Bool isNil = (== nil) readWith :: Read a => Proxy a -> String -> a readWith Proxy = read justIf :: a -> Bool -> Maybe a justIf x b = if b then Just x else Nothing justIfP :: a -> (a -> Bool) -> Maybe a justIfP x f = justIf x (f x) lowerFirst :: String -> String lowerFirst [] = [] lowerFirst (x:xs) = toLower x : xs toEnumMay :: forall a. (Enum a, Bounded a) => Int -> Maybe a toEnumMay i = if i >= 0 && i <= fromEnum (maxBound :: a) then Just $ toEnum i else Nothing readEitherCS :: (ConvertibleStrings String c, Read a) => String -> Either c a readEitherCS = either (Left . cs) Right . readEither type CSI s t a b = (ConvertibleStrings s a, ConvertibleStrings b t) type CSI' s a = CSI s s a a p ^. _ 1 . : : SBS > ( " xa " , " b " ) csi :: CSI s t a b => Iso s t a b csi = iso cs cs cshow :: (Show a, ConvertibleStrings String c) => a -> c cshow = cs . show showed :: Show a => Getter a String showed = to show _utctDay :: Lens' UTCTime Day _utctDay f t = (\d -> t { utctDay = d }) <$> f (utctDay t) julianDay :: Iso' Day Integer julianDay = iso toModifiedJulianDay ModifiedJulianDay exceptToFail :: (Monad m, Show e) => ExceptT e m :~> m exceptToFail = Nat ((either (fail . show) pure =<<) . runExceptT) data Either3 a b c = Left3 a | Middle3 b | Right3 c deriving (Eq, Ord, Show, Read, Generic) instance (SOP.Generic a, SOP.Generic b, SOP.Generic c) => SOP.Generic (Either3 a b c) instance (Aeson.ToJSON a, Aeson.ToJSON b, Aeson.ToJSON c) => Aeson.ToJSON (Either3 a b c) where toJSON = Aeson.gtoJson instance (Aeson.FromJSON a, Aeson.FromJSON b, Aeson.FromJSON c) => Aeson.FromJSON (Either3 a b c) where parseJSON = Aeson.gparseJson infixr 9 <..> (<..>) :: (c -> d) -> (a -> b -> c) -> a -> b -> d (<..>) f g x y = f $ g x y infixr 9 <...> (<...>) :: (d -> e) -> (a -> b -> c -> d) -> a -> b -> c -> e (<...>) f g x y z = f $ g x y z sortOn :: Ord b => Getter a b -> [a] -> [a] sortOn l = sortBy (compare `on` view l) downSortOn :: Ord b => Getter a b -> [a] -> [a] downSortOn l = sortOn (l . to Data.Ord.Down) countEq :: (Foldable f, Eq value) => value -> Lens' vote value -> f vote -> Int countEq v l = lengthOf $ folded . filtered ((== v) . view l) and unlike " Frontend . Path " , it is flexible enough to contain internal and external uris . ( : the ` uri - bytestring ` package could be nice here , but it may require a few orphans or a newtype to prevent them ; see also : # 31 . ) type URL = ST newtype Timestamp = Timestamp { unTimestamp :: UTCTime } deriving (Eq, Ord, Generic, Typeable, Data) timestampToEpoch :: Timestamp -> Integer timestampToEpoch = round . utcTimeToPOSIXSeconds . unTimestamp FIXME : import this from thentos ? create a package thentos - base ? TimespanUs Integer | TimespanMs Integer | TimespanSecs Integer | TimespanMins Integer | TimespanHours Integer | TimespanDays Integer deriving (Eq, Ord, Show, Read, Generic, Typeable, Data) instance SOP.Generic Timestamp instance SOP.Generic Timespan deriveSafeCopy 0 'base ''Timestamp deriveSafeCopy 0 'base ''Timespan instance Aeson.ToJSON Timestamp where toJSON = Aeson.gtoJson instance Aeson.FromJSON Timestamp where parseJSON = Aeson.gparseJson instance Show Timestamp where show = showTimestamp instance Read Timestamp where readsPrec _ s = case splitAt timestampFormatLength $ dropWhile isSpace s of (parseTimestamp -> Just t, r) -> [(t, r)] _ -> error $ "Read Timestamp: " <> show s parseTimestamp :: String -> Maybe Timestamp parseTimestamp = fmap Timestamp . parseTimeM True defaultTimeLocale timestampFormat showTimestamp :: Timestamp -> String showTimestamp = formatTime defaultTimeLocale timestampFormat . unTimestamp timestampFormat :: String timestampFormat = "%F_%T_%q" timestampFormatLength :: Int timestampFormatLength = length ("1864-04-13_13:01:33_846177415049" :: String) simpleTimestampToHtmlDate :: Timestamp -> String simpleTimestampToHtmlDate = formatTime defaultTimeLocale "%d.%m.%Y" . unTimestamp showTimespan :: Timespan -> String showTimespan (TimespanUs i) = show i <> "us" showTimespan (TimespanMs i) = show i <> "ms" showTimespan (TimespanSecs i) = show i <> "s" showTimespan (TimespanMins i) = show i <> "m" showTimespan (TimespanHours i) = show i <> "h" showTimespan (TimespanDays i) = show i <> "d" timespanUs :: Timespan -> Int timespanUs (TimespanUs i) = fromIntegral i timespanUs (TimespanMs i) = fromIntegral $ i * 1000 timespanUs (TimespanSecs i) = fromIntegral $ i * (1000 * 1000) timespanUs (TimespanMins i) = fromIntegral $ i * (1000 * 1000 * 60) timespanUs (TimespanHours i) = fromIntegral $ i * (1000 * 1000 * 3600) timespanUs (TimespanDays i) = fromIntegral $ i * (1000 * 1000 * 3600 * 24) timespanDays :: Timespan -> Int timespanDays = (`div` (1000 * 1000 * 3600 * 24)) . timespanUs instance Aeson.FromJSON Timespan where parseJSON = Aeson.withText "Timespan value" $ \raw -> do let (digits, units) = ST.break (`notElem` ("-0123456789" :: String)) raw bad = fail $ "bad Timespan value: " <> cs (show raw) construct :: Monad m => ST -> (Integer -> Timespan) -> m Timespan construct i cns = pure . cns . read . cs $ i case (digits, units) of ("", _) -> bad (i, "us") -> construct i TimespanUs (i, "ms") -> construct i TimespanMs (i, "s") -> construct i TimespanSecs (i, "m") -> construct i TimespanMins (i, "h") -> construct i TimespanHours (i, "d") -> construct i TimespanDays _ -> bad instance Aeson.ToJSON Timespan where toJSON = \case (TimespanUs i) -> render i "us" (TimespanMs i) -> render i "ms" (TimespanSecs i) -> render i "s" (TimespanMins i) -> render i "m" (TimespanHours i) -> render i "h" (TimespanDays i) -> render i "d" where render :: Integer -> String -> Aeson.Value render i unit = Aeson.String . cs $ show i <> unit diffTimestamps :: Timestamp -> Timestamp -> Timespan diffTimestamps (Timestamp tfrom) (Timestamp ttill) = TimespanUs . round $ (tfrom `diffUTCTime` ttill) * (1000 * 1000) addTimespan :: Timespan -> Timestamp -> Timestamp addTimespan tdiff (Timestamp tfrom) = Timestamp $ fromRational (fromIntegral (timespanUs tdiff) / (1000 * 1000) :: Rational) `addUTCTime` tfrom fromNow :: Timestamp -> Iso' Timestamp Timespan fromNow now = iso (`diffTimestamps` now) (`addTimespan` now) getDirectoryContentsNoDots :: FilePath -> IO [FilePath] getDirectoryContentsNoDots path = filter (not . (`elem` [".", ".."])) <$> getDirectoryContents path
c87a4a1eeae34595959e84ab5e93a8079f3f8b245e414a5e46d79c8a35fca44a
hyperfiddle/electric
nav_scratch.clj
(ns dustin.y2022.nav-scratch (:require [hyperfiddle.rcf :as rcf :refer [tests tap % with]] [hyperfiddle.photon :as p] [hyperfiddle.photon-dom :as dom] [missionary.core :as m] [clojure.datafy :refer [datafy]] clojure.core.protocols clojure.repl)) (hyperfiddle.rcf/enable!) (comment (as-> (datafy *ns*) % (nav % :publics (:publics %))) (as-> (datafy (type 1)) % (nav % :members (:members %))) (as-> java.lang.Long % (datafy %) (nav % :members (:members %)) (datafy %) (get % 'BYTES) (datafy %) (get % 0) (datafy %) #_(:declaring-class %) (nav % :declaring-class (:declaring-class %)) (datafy %)) ) (defn sortmap [m] (into (sorted-map) m)) (defn- with-var-nav [v] (with-meta v {'clojure.core.protocols/nav (fn [_ k v] (if (var? v) (let [sym (.toSymbol v) ns (namespace sym) n (name sym)] {:source (clojure.repl/source-fn sym) :doc (with-out-str (@#'clojure.repl/print-doc (meta v)))}) v))})) (extend-protocol clojure.core.protocols/Datafiable clojure.lang.Namespace (datafy [n] (let [m {:name (.getName n) :publics (-> n ns-publics sortmap) :imports (-> n ns-imports sortmap) :interns (-> n ns-interns sortmap)}] (with-meta m (merge (meta n) {'clojure.core.protocols/nav (fn [_ k v] (case k :publics (with-var-nav v) ; datafy :interns (with-var-nav v) v))}))))) (tests (datafy #'sortmap) (meta (datafy #'clojure.core/assoc)) (datafy (find-ns 'clojure.core)) (def m (reflect (find-ns 'clojure.core))) (keys m) := (:name :publics :imports :interns) (:name m) := 'clojure.core (count (:publics m)) (count (:imports m)) (count (:interns m)) (meta m) ; nav just adds metadata (clojure.core.protocols/nav m :name (:name m)) := 'clojure.core (clojure.core.protocols/nav m :publics (:publics m)) := 'clojure.core )
null
https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2022/nav_scratch.clj
clojure
datafy nav just adds metadata
(ns dustin.y2022.nav-scratch (:require [hyperfiddle.rcf :as rcf :refer [tests tap % with]] [hyperfiddle.photon :as p] [hyperfiddle.photon-dom :as dom] [missionary.core :as m] [clojure.datafy :refer [datafy]] clojure.core.protocols clojure.repl)) (hyperfiddle.rcf/enable!) (comment (as-> (datafy *ns*) % (nav % :publics (:publics %))) (as-> (datafy (type 1)) % (nav % :members (:members %))) (as-> java.lang.Long % (datafy %) (nav % :members (:members %)) (datafy %) (get % 'BYTES) (datafy %) (get % 0) (datafy %) #_(:declaring-class %) (nav % :declaring-class (:declaring-class %)) (datafy %)) ) (defn sortmap [m] (into (sorted-map) m)) (defn- with-var-nav [v] (with-meta v {'clojure.core.protocols/nav (fn [_ k v] (if (var? v) (let [sym (.toSymbol v) ns (namespace sym) n (name sym)] {:source (clojure.repl/source-fn sym) :doc (with-out-str (@#'clojure.repl/print-doc (meta v)))}) v))})) (extend-protocol clojure.core.protocols/Datafiable clojure.lang.Namespace (datafy [n] (let [m {:name (.getName n) :publics (-> n ns-publics sortmap) :imports (-> n ns-imports sortmap) :interns (-> n ns-interns sortmap)}] (with-meta m (merge (meta n) {'clojure.core.protocols/nav (fn [_ k v] (case k :interns (with-var-nav v) v))}))))) (tests (datafy #'sortmap) (meta (datafy #'clojure.core/assoc)) (datafy (find-ns 'clojure.core)) (def m (reflect (find-ns 'clojure.core))) (keys m) := (:name :publics :imports :interns) (:name m) := 'clojure.core (count (:publics m)) (count (:imports m)) (count (:interns m)) (meta m) (clojure.core.protocols/nav m :name (:name m)) := 'clojure.core (clojure.core.protocols/nav m :publics (:publics m)) := 'clojure.core )
b2e78d848291a51daa0329e19aa4e16af53bff5c6fbcef167f5142b6738f332c
alexstoick/pandoc-docx-pagebreak
pandoc-docx-pagebreak.hs
module Main where import Text.Pandoc.JSON pagegBreakLandscapeXml :: String pagegBreakLandscapeXml = "<w:p><w:pPr><w:sectPr> <w:pgSz w:w=\"15840\" w:h=\"12240\" w:orient=\"landscape\" /></w:sectPr></w:pPr></w:p>" pageBreakXml :: String pageBreakXml = "<w:p><w:pPr><w:sectPr> <w:pgSz w:w=\"12240\" w:h=\"15840\"/> </w:sectPr></w:pPr></w:p>" pageBreakLandscapeBlock :: Block pageBreakBlock :: Block pageBreakLandscapeBlock = RawBlock (Format "openxml") pagegBreakLandscapeXml pageBreakBlock = RawBlock (Format "openxml") pageBreakXml insertPgBrks :: Block -> Block insertPgBrks (Para [Str "PAGEBREAKLANDSCAPE"]) = pageBreakLandscapeBlock insertPgBrks (Para [Str "PAGEBREAK"]) = pageBreakBlock insertPgBrks blk = blk main = toJSONFilter insertPgBrks
null
https://raw.githubusercontent.com/alexstoick/pandoc-docx-pagebreak/0387562ccc48571a2ef765a6859870546ad30f77/pandoc-docx-pagebreak.hs
haskell
module Main where import Text.Pandoc.JSON pagegBreakLandscapeXml :: String pagegBreakLandscapeXml = "<w:p><w:pPr><w:sectPr> <w:pgSz w:w=\"15840\" w:h=\"12240\" w:orient=\"landscape\" /></w:sectPr></w:pPr></w:p>" pageBreakXml :: String pageBreakXml = "<w:p><w:pPr><w:sectPr> <w:pgSz w:w=\"12240\" w:h=\"15840\"/> </w:sectPr></w:pPr></w:p>" pageBreakLandscapeBlock :: Block pageBreakBlock :: Block pageBreakLandscapeBlock = RawBlock (Format "openxml") pagegBreakLandscapeXml pageBreakBlock = RawBlock (Format "openxml") pageBreakXml insertPgBrks :: Block -> Block insertPgBrks (Para [Str "PAGEBREAKLANDSCAPE"]) = pageBreakLandscapeBlock insertPgBrks (Para [Str "PAGEBREAK"]) = pageBreakBlock insertPgBrks blk = blk main = toJSONFilter insertPgBrks
a1d57d8035ada94bef05e2bf3972a436ec3ab4db5f120d68c612e100d20764b5
jpmonettas/clograms
subs.cljs
(ns clograms.subs (:require [re-frame.core :as re-frame] [datascript.core :as d] [clograms.db :refer [project-browser-level-idx->key] :as db] [clograms.db.components :as components-db] [clograms.re-grams.re-grams :as rg] [clograms.utils :as utils] [clojure.zip :as zip] [goog.string :as gstring] [clograms.models :as models] [clojure.string :as str] [pretty-spec.core :as pspec])) (re-frame/reg-sub ::all-searchable-entities (fn [{:keys [:datascript/db]} _] (-> [] (into (map (fn [p] (assoc p :search-str (str (:project/name p)))) (db/all-projects db))) (into (map (fn [n] (assoc n :search-str (str (:namespace/name n)))) (db/all-namespaces db))) (into (map (fn [v] (assoc v :search-str (str (:var/name v)))) (db/all-vars db))) (into (map (fn [s] (assoc s :search-str (str (:spec.alpha/key s)))) (db/all-specs db)))))) (re-frame/reg-sub ::selected-entity (fn [{:keys [selected-entity]} _] selected-entity)) (re-frame/reg-sub ::datascript-db (fn [db _] (:datascript/db db))) ;;;;;;;;;;;;;;;;;;; ;; Right sidebar ;; ;;;;;;;;;;;;;;;;;;; (re-frame/reg-sub :accordion/active-item (fn [db [_ accordion-id]] (components-db/accordion-active-item db accordion-id))) (re-frame/reg-sub ::side-bar-browser-level (fn [db _] (->> db :projects-browser :level (project-browser-level-idx->key)))) (re-frame/reg-sub ::side-bar-browser-selected-project (fn [db] (let [project-id (db/selected-project db)] {:project/id project-id :project/name (:project/name (db/project-entity (:datascript/db db) project-id))}))) (re-frame/reg-sub ::side-bar-browser-selected-namespace (fn [db] (let [ns-id (db/selected-namespace db) ns-entity (db/namespace-entity (:datascript/db db) ns-id)] {:namespace/id ns-id :namespace/name (:namespace/name ns-entity)}))) (re-frame/reg-sub ::side-bar-search (fn [db] (db/side-bar-search db))) (defn project-items [datascript-db] (when datascript-db (->> (db/all-projects datascript-db) (map #(assoc % :type :project :search-name (str (:project/name %)))) (sort-by (comp str :project/name))))) (defn namespaces-items [datascript-db pid] (when datascript-db (->> (db/all-namespaces-for-project datascript-db pid) (map #(assoc % :type :namespace :search-name (str (:namespace/name %)))) (sort-by (comp str :namespace/name))))) (defn vars-items [datascript-db nsid] (when datascript-db (->> (db/all-vars-for-ns datascript-db nsid) (map #(assoc % :type :var :search-name (str (:var/name %)))) (sort-by :var/line)))) (re-frame/reg-sub ::side-bar-browser-items (fn [db _] (let [{:keys [level selected-project selected-namespace]} (:projects-browser db) level-key (project-browser-level-idx->key level)] (case level-key :projects (let [all-projects (project-items (:datascript/db db)) is-main-project #(when (= (:project/name %) 'clindex/main-project) %) main-project (some is-main-project all-projects)] (into [main-project] (remove is-main-project all-projects))) :namespaces (namespaces-items (:datascript/db db) selected-project) :vars (vars-items (:datascript/db db) selected-namespace))))) (re-frame/reg-sub ::side-bar-browser-items+query :<- [::side-bar-browser-items] :<- [::side-bar-search] (fn [[items query] [_]] (->> items (filter (fn [i] (if (string? (:search-name i)) (str/includes? (:search-name i) query) (do (js/console.warn "Wrong search name for item " i) nil))))))) (re-frame/reg-sub ::current-var-references (fn [db _] (db/current-var-references db))) (re-frame/reg-sub ::bottom-bar (fn [db _] (db/bottom-bar db))) (re-frame/reg-sub ::ref-frame-feature :<- [::datascript-db] (fn [datascript-db [_ feature-key]] (db/all-re-frame-feature datascript-db feature-key))) (re-frame/reg-sub ::specs :<- [::datascript-db] :<- [::side-bar-search] (fn [[datascript-db query] [_]] (->> (db/all-specs datascript-db) (filter #(str/includes? (str (:spec.alpha/key %)) query)) (sort-by (comp str :spec.alpha/key))))) ;; Re-frame features subscriptions (re-frame/reg-sub ::re-frame-feature-tree (fn [[_ feature-key]] [(re-frame/subscribe [::ref-frame-feature feature-key]) (re-frame/subscribe [::side-bar-search])]) (fn [[feature-keys query] [_ feature-key]] (->> feature-keys (filter #(str/includes? (subs (str (:re-frame/key %)) 1) query)) (group-by :namespace/name) (map (fn [[ns-symb ns-subs]] {:type :namespace :data {:namespace/name ns-symb :project/name (:project/name (first ns-subs))} :childs (map (fn [sub] {:data sub :type feature-key}) ns-subs)}))))) (re-frame/reg-sub ::ctx-menu (fn [{:keys [ctx-menu]}] ctx-menu)) (re-frame/reg-sub ::selected-color (fn [db] (db/selected-color db))) (re-frame/reg-sub ::namespace-colors (fn [db [_]] (db/namespace-colors db))) (re-frame/reg-sub ::project-colors (fn [db [_]] (db/project-colors db))) (re-frame/reg-sub ::loading? (fn [db _] (db/loading? db))) (defn make-function-source-link [var-id node-id src] (gstring/format "<a onclick=\"clograms.diagram.entities.add_var_from_link(%d,'%s')\">%s</a>" var-id node-id src)) (defn enhance-source-str "Returns a enhanced `source-str` with anchors added for each var that appears on `source-form`" [source-str var-id source-form node-id] (let [{:keys [line column]} (meta source-form) all-symbols-meta (loop [all nil zloc (utils/move-zipper-to-next (utils/code-zipper source-form) symbol?)] (if (zip/end? zloc) all (recur (if-let [m (meta (zip/node zloc))] (conj all m) all) ;; we should add only if it points to some other var (utils/move-zipper-to-next zloc symbol?)))) all-meta-at-origin (->> all-symbols-meta (map (fn [m] (-> m (update :line #(- % line)) (update :end-line #(- % line)) (update :column #(- % column)) (update :end-column #(- % column))))))] (reduce (fn [src {:keys [line column end-column] :as m}] (if (and (:var/id m) (not= var-id (:var/id m))) (utils/replace-in-str-line (partial make-function-source-link (:var/id m) node-id) src line column (- end-column column)) src)) source-str all-meta-at-origin))) (re-frame/reg-sub ::project-entity :<- [::datascript-db] (fn [datascript-db [_ proj-id]] (db/project-entity datascript-db proj-id))) (re-frame/reg-sub ::namespace-entity :<- [::datascript-db] (fn [datascript-db [_ ns-id]] (db/namespace-entity datascript-db ns-id))) (re-frame/reg-sub ::function-entity :<- [::datascript-db] (fn [datascript-db [_ var-id node-id]] (let [e (db/function-entity datascript-db var-id)] (cond-> e true (update :function/source-str enhance-source-str var-id (:function/source-form e) node-id) (:fspec.alpha/source-form e) (assoc :fspec.alpha/source-str (with-out-str (pspec/pprint (:fspec.alpha/source-form e) {:ns-aliases {"clojure.spec.alpha" "s" "clojure.core.specs.alpha" "score" "clojure.core" nil}}))))))) (re-frame/reg-sub ::multimethod-entity :<- [::datascript-db] (fn [datascript-db [_ var-id node-id]] (let [e (db/multimethod-entity datascript-db var-id)] (update e :multi/methods (fn [mm] (->> mm (map (fn [method] (update method :multimethod/source-str enhance-source-str var-id (:multimethod/source-form method) node-id))))))))) (re-frame/reg-sub ::var-entity :<- [::datascript-db] (fn [datascript-db [_ id]] (db/var-entity datascript-db id))) (re-frame/reg-sub ::re-frame-subs-entity :<- [::datascript-db] (fn [datascript-db [_ sub-id node-id]] (let [e (db/re-frame-subs-entity datascript-db sub-id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-event-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-event-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-fx-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-fx-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-cofx-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-cofx-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::spec-entity :<- [::datascript-db] (fn [datascript-db [_ spec-id]] (let [s (db/spec-entity datascript-db spec-id)] (assoc s :spec.alpha/source-str (with-out-str (pspec/pprint (:spec.alpha/source-form s) {:ns-aliases {"clojure.spec.alpha" "s" "clojure.core.specs.alpha" "score" "clojure.core" nil}})))))) (re-frame/reg-sub ::node-color :<- [::datascript-db] :<- [::project-colors] :<- [::namespace-colors] (fn [[ds-db proj-colors ns-colors] [_ entity]] (let [[proj-name ns-name] (case (:entity/type entity) :function (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :var (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :multimethod (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :namespace (let [nse (db/namespace-entity ds-db (:namespace/id entity))] [(-> nse :project/_namespaces :project/name) (:namespace/name nse)]) :project [(:project/name (db/project-entity ds-db (:project/id entity))) nil] :re-frame-subs (let [r (db/re-frame-subs-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-event (let [r (db/re-frame-event-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-fx (let [r (db/re-frame-fx-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-cofx (let [r (db/re-frame-cofx-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :spec (let [s (db/spec-entity ds-db (:spec/id entity))] [(:project/name s) (:namespace/name s)]))] (or (get ns-colors ns-name) (get proj-colors proj-name))))) (re-frame/reg-sub :text-edit-modal/event (fn [db [_]] (components-db/text-edit-modal-event db)))
null
https://raw.githubusercontent.com/jpmonettas/clograms/8586948b5e7439171f1d63030b7a34127c8a63f6/src/cljs/clograms/subs.cljs
clojure
Right sidebar ;; Re-frame features subscriptions we should add only if it points to some other var
(ns clograms.subs (:require [re-frame.core :as re-frame] [datascript.core :as d] [clograms.db :refer [project-browser-level-idx->key] :as db] [clograms.db.components :as components-db] [clograms.re-grams.re-grams :as rg] [clograms.utils :as utils] [clojure.zip :as zip] [goog.string :as gstring] [clograms.models :as models] [clojure.string :as str] [pretty-spec.core :as pspec])) (re-frame/reg-sub ::all-searchable-entities (fn [{:keys [:datascript/db]} _] (-> [] (into (map (fn [p] (assoc p :search-str (str (:project/name p)))) (db/all-projects db))) (into (map (fn [n] (assoc n :search-str (str (:namespace/name n)))) (db/all-namespaces db))) (into (map (fn [v] (assoc v :search-str (str (:var/name v)))) (db/all-vars db))) (into (map (fn [s] (assoc s :search-str (str (:spec.alpha/key s)))) (db/all-specs db)))))) (re-frame/reg-sub ::selected-entity (fn [{:keys [selected-entity]} _] selected-entity)) (re-frame/reg-sub ::datascript-db (fn [db _] (:datascript/db db))) (re-frame/reg-sub :accordion/active-item (fn [db [_ accordion-id]] (components-db/accordion-active-item db accordion-id))) (re-frame/reg-sub ::side-bar-browser-level (fn [db _] (->> db :projects-browser :level (project-browser-level-idx->key)))) (re-frame/reg-sub ::side-bar-browser-selected-project (fn [db] (let [project-id (db/selected-project db)] {:project/id project-id :project/name (:project/name (db/project-entity (:datascript/db db) project-id))}))) (re-frame/reg-sub ::side-bar-browser-selected-namespace (fn [db] (let [ns-id (db/selected-namespace db) ns-entity (db/namespace-entity (:datascript/db db) ns-id)] {:namespace/id ns-id :namespace/name (:namespace/name ns-entity)}))) (re-frame/reg-sub ::side-bar-search (fn [db] (db/side-bar-search db))) (defn project-items [datascript-db] (when datascript-db (->> (db/all-projects datascript-db) (map #(assoc % :type :project :search-name (str (:project/name %)))) (sort-by (comp str :project/name))))) (defn namespaces-items [datascript-db pid] (when datascript-db (->> (db/all-namespaces-for-project datascript-db pid) (map #(assoc % :type :namespace :search-name (str (:namespace/name %)))) (sort-by (comp str :namespace/name))))) (defn vars-items [datascript-db nsid] (when datascript-db (->> (db/all-vars-for-ns datascript-db nsid) (map #(assoc % :type :var :search-name (str (:var/name %)))) (sort-by :var/line)))) (re-frame/reg-sub ::side-bar-browser-items (fn [db _] (let [{:keys [level selected-project selected-namespace]} (:projects-browser db) level-key (project-browser-level-idx->key level)] (case level-key :projects (let [all-projects (project-items (:datascript/db db)) is-main-project #(when (= (:project/name %) 'clindex/main-project) %) main-project (some is-main-project all-projects)] (into [main-project] (remove is-main-project all-projects))) :namespaces (namespaces-items (:datascript/db db) selected-project) :vars (vars-items (:datascript/db db) selected-namespace))))) (re-frame/reg-sub ::side-bar-browser-items+query :<- [::side-bar-browser-items] :<- [::side-bar-search] (fn [[items query] [_]] (->> items (filter (fn [i] (if (string? (:search-name i)) (str/includes? (:search-name i) query) (do (js/console.warn "Wrong search name for item " i) nil))))))) (re-frame/reg-sub ::current-var-references (fn [db _] (db/current-var-references db))) (re-frame/reg-sub ::bottom-bar (fn [db _] (db/bottom-bar db))) (re-frame/reg-sub ::ref-frame-feature :<- [::datascript-db] (fn [datascript-db [_ feature-key]] (db/all-re-frame-feature datascript-db feature-key))) (re-frame/reg-sub ::specs :<- [::datascript-db] :<- [::side-bar-search] (fn [[datascript-db query] [_]] (->> (db/all-specs datascript-db) (filter #(str/includes? (str (:spec.alpha/key %)) query)) (sort-by (comp str :spec.alpha/key))))) (re-frame/reg-sub ::re-frame-feature-tree (fn [[_ feature-key]] [(re-frame/subscribe [::ref-frame-feature feature-key]) (re-frame/subscribe [::side-bar-search])]) (fn [[feature-keys query] [_ feature-key]] (->> feature-keys (filter #(str/includes? (subs (str (:re-frame/key %)) 1) query)) (group-by :namespace/name) (map (fn [[ns-symb ns-subs]] {:type :namespace :data {:namespace/name ns-symb :project/name (:project/name (first ns-subs))} :childs (map (fn [sub] {:data sub :type feature-key}) ns-subs)}))))) (re-frame/reg-sub ::ctx-menu (fn [{:keys [ctx-menu]}] ctx-menu)) (re-frame/reg-sub ::selected-color (fn [db] (db/selected-color db))) (re-frame/reg-sub ::namespace-colors (fn [db [_]] (db/namespace-colors db))) (re-frame/reg-sub ::project-colors (fn [db [_]] (db/project-colors db))) (re-frame/reg-sub ::loading? (fn [db _] (db/loading? db))) (defn make-function-source-link [var-id node-id src] (gstring/format "<a onclick=\"clograms.diagram.entities.add_var_from_link(%d,'%s')\">%s</a>" var-id node-id src)) (defn enhance-source-str "Returns a enhanced `source-str` with anchors added for each var that appears on `source-form`" [source-str var-id source-form node-id] (let [{:keys [line column]} (meta source-form) all-symbols-meta (loop [all nil zloc (utils/move-zipper-to-next (utils/code-zipper source-form) symbol?)] (if (zip/end? zloc) all (recur (if-let [m (meta (zip/node zloc))] (conj all m) (utils/move-zipper-to-next zloc symbol?)))) all-meta-at-origin (->> all-symbols-meta (map (fn [m] (-> m (update :line #(- % line)) (update :end-line #(- % line)) (update :column #(- % column)) (update :end-column #(- % column))))))] (reduce (fn [src {:keys [line column end-column] :as m}] (if (and (:var/id m) (not= var-id (:var/id m))) (utils/replace-in-str-line (partial make-function-source-link (:var/id m) node-id) src line column (- end-column column)) src)) source-str all-meta-at-origin))) (re-frame/reg-sub ::project-entity :<- [::datascript-db] (fn [datascript-db [_ proj-id]] (db/project-entity datascript-db proj-id))) (re-frame/reg-sub ::namespace-entity :<- [::datascript-db] (fn [datascript-db [_ ns-id]] (db/namespace-entity datascript-db ns-id))) (re-frame/reg-sub ::function-entity :<- [::datascript-db] (fn [datascript-db [_ var-id node-id]] (let [e (db/function-entity datascript-db var-id)] (cond-> e true (update :function/source-str enhance-source-str var-id (:function/source-form e) node-id) (:fspec.alpha/source-form e) (assoc :fspec.alpha/source-str (with-out-str (pspec/pprint (:fspec.alpha/source-form e) {:ns-aliases {"clojure.spec.alpha" "s" "clojure.core.specs.alpha" "score" "clojure.core" nil}}))))))) (re-frame/reg-sub ::multimethod-entity :<- [::datascript-db] (fn [datascript-db [_ var-id node-id]] (let [e (db/multimethod-entity datascript-db var-id)] (update e :multi/methods (fn [mm] (->> mm (map (fn [method] (update method :multimethod/source-str enhance-source-str var-id (:multimethod/source-form method) node-id))))))))) (re-frame/reg-sub ::var-entity :<- [::datascript-db] (fn [datascript-db [_ id]] (db/var-entity datascript-db id))) (re-frame/reg-sub ::re-frame-subs-entity :<- [::datascript-db] (fn [datascript-db [_ sub-id node-id]] (let [e (db/re-frame-subs-entity datascript-db sub-id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-event-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-event-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-fx-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-fx-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::re-frame-cofx-entity :<- [::datascript-db] (fn [datascript-db [_ id node-id]] (let [e (db/re-frame-cofx-entity datascript-db id)] (-> e (update :source/str enhance-source-str nil (:source/form e) node-id))))) (re-frame/reg-sub ::spec-entity :<- [::datascript-db] (fn [datascript-db [_ spec-id]] (let [s (db/spec-entity datascript-db spec-id)] (assoc s :spec.alpha/source-str (with-out-str (pspec/pprint (:spec.alpha/source-form s) {:ns-aliases {"clojure.spec.alpha" "s" "clojure.core.specs.alpha" "score" "clojure.core" nil}})))))) (re-frame/reg-sub ::node-color :<- [::datascript-db] :<- [::project-colors] :<- [::namespace-colors] (fn [[ds-db proj-colors ns-colors] [_ entity]] (let [[proj-name ns-name] (case (:entity/type entity) :function (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :var (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :multimethod (let [ve (db/var-entity ds-db (:var/id entity))] [(:project/name ve) (:namespace/name ve)]) :namespace (let [nse (db/namespace-entity ds-db (:namespace/id entity))] [(-> nse :project/_namespaces :project/name) (:namespace/name nse)]) :project [(:project/name (db/project-entity ds-db (:project/id entity))) nil] :re-frame-subs (let [r (db/re-frame-subs-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-event (let [r (db/re-frame-event-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-fx (let [r (db/re-frame-fx-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :re-frame-cofx (let [r (db/re-frame-cofx-entity ds-db (:id entity))] [(:project/name r) (:namespace/name r)]) :spec (let [s (db/spec-entity ds-db (:spec/id entity))] [(:project/name s) (:namespace/name s)]))] (or (get ns-colors ns-name) (get proj-colors proj-name))))) (re-frame/reg-sub :text-edit-modal/event (fn [db [_]] (components-db/text-edit-modal-event db)))
52136fda18eb9d358037c513b74b1b0d2c5995ca5cf8f9bd4db2e0c604405d10
goncalotomas/FMKe
fmke_gen_driver.erl
%% ------------------------------------------------------------------- %% Copyright ( c ) 2014 SyncFree Consortium . All Rights Reserved . %% This file is provided to you under the Apache License , %% Version 2.0 (the "License"); you may not use this file except in compliance with the License . You may obtain %% a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(fmke_gen_driver). -type id() :: non_neg_integer(). -type context() :: term(). -type crdt() :: term(). -callback start_link(term()) -> {ok, pid()} | {error, term()}. -callback stop(pid()) -> ok | {error, term()}. %%----------------------------------------------------------------------------- Create Operations %%----------------------------------------------------------------------------- -callback create_patient(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_pharmacy(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_facility(Id::id(), Name::string(), Address::string(), Type::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_staff(Id::id(), Name::string(), Address::string(), Speciality::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_prescription(PrescriptionId::id(), PatientId::id(), PrescriberId::id(), PharmacyId::id(), DatePrescribed::string(), Drugs::list(crdt())) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_event(EventId::id ( ) , TreatmentId::id ( ) , ( ) , Timestamp::string ( ) , % Description::string()) -> { ok | { error , Reason::term ( ) } , ( ) } . % % -callback create_treatment(TreatmentId::id(), PatientId::id(), StaffId::id(), FacilityId::id(), % DateStarted::string()) -> { ok | { error , Reason::term ( ) } , ( ) } . %%----------------------------------------------------------------------------- %% Get Operations %%----------------------------------------------------------------------------- % -callback get_event_by_id(Id::id()) -> { { ok , Object::crdt ( ) } | { error , Reason::term ( ) } , ( ) } . -callback get_facility_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_patient_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_pharmacy_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_prescription_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_staff_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. % -callback get_treatment_by_id(Id::id()) -> { { ok , Object::crdt ( ) } | { error , Reason::term ( ) } , ( ) } . -callback ( ) ) - > { { ok , ListObjects::list(crdt ( ) ) } | { error , Reason::term ( ) } , ( ) } . -callback get_processed_pharmacy_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. -callback get_pharmacy_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. -callback get_staff_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. % -callback get_staff_treatments(Id::id()) -> { { ok , ListObjects::list(crdt ( ) ) } | { error , Reason::term ( ) } , ( ) } . %%----------------------------------------------------------------------------- %% Update Operations %%----------------------------------------------------------------------------- -callback process_prescription(Id::id(), DateProcessed::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_patient_details(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_pharmacy_details(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_facility_details(Id::id(), Name::string(), Address::string(), Type::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_staff_details(Id::id(), Name::string(), Address::string(), Speciality::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_prescription_medication(Id::id(), Operation::atom(), Drugs::list(crdt())) -> {ok | {error, Reason::term()}, Context::context()}.
null
https://raw.githubusercontent.com/goncalotomas/FMKe/654d3211ef57d841540e58033a397ce0f3dee0f7/src/fmke_gen_driver.erl
erlang
------------------------------------------------------------------- Version 2.0 (the "License"); you may not use this file a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------- ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- Description::string()) -> -callback create_treatment(TreatmentId::id(), PatientId::id(), StaffId::id(), FacilityId::id(), DateStarted::string()) -> ----------------------------------------------------------------------------- Get Operations ----------------------------------------------------------------------------- -callback get_event_by_id(Id::id()) -> -callback get_treatment_by_id(Id::id()) -> -callback get_staff_treatments(Id::id()) -> ----------------------------------------------------------------------------- Update Operations -----------------------------------------------------------------------------
Copyright ( c ) 2014 SyncFree Consortium . All Rights Reserved . This file is provided to you under the Apache License , except in compliance with the License . You may obtain software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY -module(fmke_gen_driver). -type id() :: non_neg_integer(). -type context() :: term(). -type crdt() :: term(). -callback start_link(term()) -> {ok, pid()} | {error, term()}. -callback stop(pid()) -> ok | {error, term()}. Create Operations -callback create_patient(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_pharmacy(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_facility(Id::id(), Name::string(), Address::string(), Type::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_staff(Id::id(), Name::string(), Address::string(), Speciality::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_prescription(PrescriptionId::id(), PatientId::id(), PrescriberId::id(), PharmacyId::id(), DatePrescribed::string(), Drugs::list(crdt())) -> {ok | {error, Reason::term()}, Context::context()}. -callback create_event(EventId::id ( ) , TreatmentId::id ( ) , ( ) , Timestamp::string ( ) , { ok | { error , Reason::term ( ) } , ( ) } . { ok | { error , Reason::term ( ) } , ( ) } . { { ok , Object::crdt ( ) } | { error , Reason::term ( ) } , ( ) } . -callback get_facility_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_patient_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_pharmacy_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_prescription_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. -callback get_staff_by_id(Id::id()) -> {{ok, Object::crdt()} | {error, Reason::term()}, Context::context()}. { { ok , Object::crdt ( ) } | { error , Reason::term ( ) } , ( ) } . -callback ( ) ) - > { { ok , ListObjects::list(crdt ( ) ) } | { error , Reason::term ( ) } , ( ) } . -callback get_processed_pharmacy_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. -callback get_pharmacy_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. -callback get_staff_prescriptions(Id::id()) -> {{ok, ListObjects::list(crdt())} | {error, Reason::term()}, Context::context()}. { { ok , ListObjects::list(crdt ( ) ) } | { error , Reason::term ( ) } , ( ) } . -callback process_prescription(Id::id(), DateProcessed::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_patient_details(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_pharmacy_details(Id::id(), Name::string(), Address::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_facility_details(Id::id(), Name::string(), Address::string(), Type::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_staff_details(Id::id(), Name::string(), Address::string(), Speciality::string()) -> {ok | {error, Reason::term()}, Context::context()}. -callback update_prescription_medication(Id::id(), Operation::atom(), Drugs::list(crdt())) -> {ok | {error, Reason::term()}, Context::context()}.
dddf848b969ea55eb62ca335ba6e0945c936ee3ee0fd1e30db95de95de99672b
TheClimateCorporation/lemur
generic-jobdef.clj
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Generic Launcher ;;; For launching a simple job ( one step ) , where the details are specified ;;; on the command line. ;;; ;;; Example of common usage: lemur dry - run generic-jobdef.clj --app " prod-348 " --num - instances 3 \ ;;; --main-class weatherbill.hadoop.query.prod-348 --slave-instance-type m2.xlarge \ ;;; --bucket 'com.weatherbill.${env}.banzai' ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (catch-args :main-class "A fully qualified classname." :app "A representative name the app-- no spaces, as it is used in paths" :bucket "An s3 bucket, e.g. 'com.weatherbill.${env}.banzai'") (defcluster generic-cluster :emr-name "${app}") (defstep generic-step :args.passthrough true :args.data-uri true) (fire! generic-cluster generic-step)
null
https://raw.githubusercontent.com/TheClimateCorporation/lemur/00eb21b6f534ceefe354ab89042826188d156991/examples/generic-jobdef.clj
clojure
on the command line. Example of common usage: --main-class weatherbill.hadoop.query.prod-348 --slave-instance-type m2.xlarge \ --bucket 'com.weatherbill.${env}.banzai'
Generic Launcher For launching a simple job ( one step ) , where the details are specified lemur dry - run generic-jobdef.clj --app " prod-348 " --num - instances 3 \ (catch-args :main-class "A fully qualified classname." :app "A representative name the app-- no spaces, as it is used in paths" :bucket "An s3 bucket, e.g. 'com.weatherbill.${env}.banzai'") (defcluster generic-cluster :emr-name "${app}") (defstep generic-step :args.passthrough true :args.data-uri true) (fire! generic-cluster generic-step)
03d0aceb9a1f5ba9b7552532a94a96491836adb222b64aee169ad4f0bbd87e36
zilch-lang/nstar
Types.hs
# LANGUAGE FlexibleContexts # {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE GADTs #-} # LANGUAGE ScopedTypeVariables # # LANGUAGE StandaloneDeriving # # LANGUAGE ViewPatterns # module Data.Elf.Types ( -- * ELF types Elf_Half, Elf_Word, Elf_Sword, Elf_Xword, Elf_Sxword, Elf_Addr(..), Elf_Off(..), Elf_UChar, Elf_Section, Elf_Rel_Info(..), Elf_Rel_Addend(..) -- * Internal export , ReifySize ) where import Data.Word (Word8, Word16, Word32, Word64) import Data.Int (Int32, Int64) import Data.Elf.Internal.BusSize (Size(..)) import Data.Some import Unsafe.Coerce (unsafeCoerce) | Unsigned 8 - bits integer type Elf_UChar (n :: Size) = Word8 | Unsigned 16 - bits integer type Elf_Half (n :: Size) = Word16 | Unsigned 32 - bits integer type Elf_Word (n :: Size) = Word32 | Signed 32 - bits integer type Elf_Sword (n :: Size) = Int32 | Unsigned 64 - bits integer type Elf_Xword (n :: Size) = Word64 | Signed 64 - bits integer type Elf_Sxword (n :: Size) = Int64 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Addr (n :: Size) where Elf32_Addr :: Word32 -> Elf_Addr 'S32 Elf64_Addr :: Word64 -> Elf_Addr 'S64 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Off (n :: Size) where Elf32_Off :: Word32 -> Elf_Off 'S32 Elf64_Off :: Word64 -> Elf_Off 'S64 | Unsigned 16 - bits integer type Elf_Section (n :: Size) = Word16 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Rel_Info (n :: Size) where Elf32_Rel_Info :: Word32 -> Elf_Rel_Info 'S32 Elf64_Rel_Info :: Word64 -> Elf_Rel_Info 'S64 | [ 32 bits ] Signed 32 - bits integer [ 64 bits ] Signed 64 - bits integer data Elf_Rel_Addend (n :: Size) where Elf32_Rel_Addend :: Word32 -> Elf_Rel_Addend 'S32 Elf64_Rel_Addend :: Word64 -> Elf_Rel_Addend 'S64 class ReifySize (n :: Size) where reifySize :: Size instance ReifySize 'S32 where reifySize = S32 instance ReifySize 'S64 where reifySize = S64 instance ReifySize n => Num (Elf_Addr n) where Elf32_Addr a1 + Elf32_Addr a2 = Elf32_Addr (a1 + a2) Elf64_Addr a1 + Elf64_Addr a2 = Elf64_Addr (a1 + a2) Elf32_Addr a1 * Elf32_Addr a2 = Elf32_Addr (a1 * a2) Elf64_Addr a1 * Elf64_Addr a2 = Elf64_Addr (a1 * a2) -- NOTE: we don't use those at all, and it makes no sense. abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addr :: Elf_Addr n) = case reifySize @n of S32 -> Some $ Elf32_Addr (fromInteger i) S64 -> Some $ Elf64_Addr (fromInteger i) in addr negate (Elf32_Addr a) = Elf32_Addr (negate a) negate (Elf64_Addr a) = Elf64_Addr (negate a) instance ReifySize n => Real (Elf_Addr n) where toRational (Elf32_Addr a) = toRational a toRational (Elf64_Addr a) = toRational a instance ReifySize n => Enum (Elf_Addr n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Addr n) = case reifySize @n of S32 -> Some $ Elf32_Addr (toEnum i) S64 -> Some $ Elf64_Addr (toEnum i) in addr fromEnum (Elf32_Addr a) = fromEnum a fromEnum (Elf64_Addr a) = fromEnum a instance ReifySize n => Integral (Elf_Addr n) where quotRem (Elf32_Addr a1) (Elf32_Addr a2) = let (x, y) = quotRem a1 a2 in (Elf32_Addr x, Elf32_Addr y) quotRem (Elf64_Addr a1) (Elf64_Addr a2) = let (x, y) = quotRem a1 a2 in (Elf64_Addr x, Elf64_Addr y) toInteger (Elf32_Addr a) = toInteger a toInteger (Elf64_Addr a) = toInteger a deriving instance Ord (Elf_Addr n) deriving instance Eq (Elf_Addr n) deriving instance Show (Elf_Addr n) instance ReifySize n => Num (Elf_Off n) where Elf32_Off a1 + Elf32_Off a2 = Elf32_Off (a1 + a2) Elf64_Off a1 + Elf64_Off a2 = Elf64_Off (a1 + a2) Elf32_Off a1 * Elf32_Off a2 = Elf32_Off (a1 * a2) Elf64_Off a1 * Elf64_Off a2 = Elf64_Off (a1 * a2) -- NOTE: we don't use those at all, and it makes no sense. abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addr :: Elf_Off n) = case reifySize @n of S32 -> Some $ Elf32_Off (fromInteger i) S64 -> Some $ Elf64_Off (fromInteger i) in addr negate (Elf32_Off a) = Elf32_Off (negate a) negate (Elf64_Off a) = Elf64_Off (negate a) instance ReifySize n => Real (Elf_Off n) where toRational (Elf32_Off a) = toRational a toRational (Elf64_Off a) = toRational a instance ReifySize n => Enum (Elf_Off n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Off n) = case reifySize @n of S32 -> Some $ Elf32_Off (toEnum i) S64 -> Some $ Elf64_Off (toEnum i) in addr fromEnum (Elf32_Off a) = fromEnum a fromEnum (Elf64_Off a) = fromEnum a instance ReifySize n => Integral (Elf_Off n) where quotRem (Elf32_Off a1) (Elf32_Off a2) = let (x, y) = quotRem a1 a2 in (Elf32_Off x, Elf32_Off y) quotRem (Elf64_Off a1) (Elf64_Off a2) = let (x, y) = quotRem a1 a2 in (Elf64_Off x, Elf64_Off y) toInteger (Elf32_Off a) = toInteger a toInteger (Elf64_Off a) = toInteger a deriving instance Ord (Elf_Off n) deriving instance Eq (Elf_Off n) deriving instance Show (Elf_Off n) instance ReifySize n => Num (Elf_Rel_Info n) where Elf32_Rel_Info i1 + Elf32_Rel_Info i2 = Elf32_Rel_Info (i1 + i2) Elf64_Rel_Info i1 + Elf64_Rel_Info i2 = Elf64_Rel_Info (i1 + i2) Elf32_Rel_Info i1 * Elf32_Rel_Info i2 = Elf32_Rel_Info (i1 * i2) Elf64_Rel_Info i1 * Elf64_Rel_Info i2 = Elf64_Rel_Info (i1 * i2) -- NOTE: we don't use those at all, and it makes no sense. abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> info :: Elf_Rel_Info n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Info (fromInteger i) S64 -> Some $ Elf64_Rel_Info (fromInteger i) in info negate (Elf32_Rel_Info i) = Elf32_Rel_Info (negate i) negate (Elf64_Rel_Info i) = Elf64_Rel_Info (negate i) instance ReifySize n => Real (Elf_Rel_Info n) where toRational (Elf32_Rel_Info a) = toRational a toRational (Elf64_Rel_Info a) = toRational a instance ReifySize n => Enum (Elf_Rel_Info n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Rel_Info n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Info (toEnum i) S64 -> Some $ Elf64_Rel_Info (toEnum i) in addr fromEnum (Elf32_Rel_Info a) = fromEnum a fromEnum (Elf64_Rel_Info a) = fromEnum a instance ReifySize n => Integral (Elf_Rel_Info n) where quotRem (Elf32_Rel_Info a1) (Elf32_Rel_Info a2) = let (x, y) = quotRem a1 a2 in (Elf32_Rel_Info x, Elf32_Rel_Info y) quotRem (Elf64_Rel_Info a1) (Elf64_Rel_Info a2) = let (x, y) = quotRem a1 a2 in (Elf64_Rel_Info x, Elf64_Rel_Info y) toInteger (Elf32_Rel_Info a) = toInteger a toInteger (Elf64_Rel_Info a) = toInteger a deriving instance Ord (Elf_Rel_Info n) deriving instance Eq (Elf_Rel_Info n) deriving instance Show (Elf_Rel_Info n) instance ReifySize n => Num (Elf_Rel_Addend n) where Elf32_Rel_Addend a1 + Elf32_Rel_Addend a2 = Elf32_Rel_Addend (a1 + a2) Elf64_Rel_Addend a1 + Elf64_Rel_Addend a2 = Elf64_Rel_Addend (a1 + a2) Elf32_Rel_Addend a1 * Elf32_Rel_Addend a2 = Elf32_Rel_Addend (a1 * a2) Elf64_Rel_Addend a1 * Elf64_Rel_Addend a2 = Elf64_Rel_Addend (a1 * a2) -- NOTE: we don't use those at all, and it makes no sense. abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addend :: Elf_Rel_Addend n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Addend (fromInteger i) S64 -> Some $ Elf64_Rel_Addend (fromInteger i) in addend negate (Elf32_Rel_Addend a) = Elf32_Rel_Addend (negate a) negate (Elf64_Rel_Addend a) = Elf64_Rel_Addend (negate a) instance ReifySize n => Real (Elf_Rel_Addend n) where toRational (Elf32_Rel_Addend a) = toRational a toRational (Elf64_Rel_Addend a) = toRational a instance ReifySize n => Enum (Elf_Rel_Addend n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Rel_Addend n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Addend (toEnum i) S64 -> Some $ Elf64_Rel_Addend (toEnum i) in addr fromEnum (Elf32_Rel_Addend a) = fromEnum a fromEnum (Elf64_Rel_Addend a) = fromEnum a instance ReifySize n => Integral (Elf_Rel_Addend n) where quotRem (Elf32_Rel_Addend a1) (Elf32_Rel_Addend a2) = let (x, y) = quotRem a1 a2 in (Elf32_Rel_Addend x, Elf32_Rel_Addend y) quotRem (Elf64_Rel_Addend a1) (Elf64_Rel_Addend a2) = let (x, y) = quotRem a1 a2 in (Elf64_Rel_Addend x, Elf64_Rel_Addend y) toInteger (Elf32_Rel_Addend a) = toInteger a toInteger (Elf64_Rel_Addend a) = toInteger a deriving instance Ord (Elf_Rel_Addend n) deriving instance Eq (Elf_Rel_Addend n) deriving instance Show (Elf_Rel_Addend n)
null
https://raw.githubusercontent.com/zilch-lang/nstar/8a9d1d5aa19fe6493c556ae3606c758829683762/lib/elfgen/src/Data/Elf/Types.hs
haskell
# LANGUAGE ConstraintKinds # # LANGUAGE GADTs # * ELF types * Internal export NOTE: we don't use those at all, and it makes no sense. NOTE: we don't use those at all, and it makes no sense. NOTE: we don't use those at all, and it makes no sense. NOTE: we don't use those at all, and it makes no sense.
# LANGUAGE FlexibleContexts # # LANGUAGE ScopedTypeVariables # # LANGUAGE StandaloneDeriving # # LANGUAGE ViewPatterns # module Data.Elf.Types Elf_Half, Elf_Word, Elf_Sword, Elf_Xword, Elf_Sxword, Elf_Addr(..), Elf_Off(..), Elf_UChar, Elf_Section, Elf_Rel_Info(..), Elf_Rel_Addend(..) , ReifySize ) where import Data.Word (Word8, Word16, Word32, Word64) import Data.Int (Int32, Int64) import Data.Elf.Internal.BusSize (Size(..)) import Data.Some import Unsafe.Coerce (unsafeCoerce) | Unsigned 8 - bits integer type Elf_UChar (n :: Size) = Word8 | Unsigned 16 - bits integer type Elf_Half (n :: Size) = Word16 | Unsigned 32 - bits integer type Elf_Word (n :: Size) = Word32 | Signed 32 - bits integer type Elf_Sword (n :: Size) = Int32 | Unsigned 64 - bits integer type Elf_Xword (n :: Size) = Word64 | Signed 64 - bits integer type Elf_Sxword (n :: Size) = Int64 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Addr (n :: Size) where Elf32_Addr :: Word32 -> Elf_Addr 'S32 Elf64_Addr :: Word64 -> Elf_Addr 'S64 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Off (n :: Size) where Elf32_Off :: Word32 -> Elf_Off 'S32 Elf64_Off :: Word64 -> Elf_Off 'S64 | Unsigned 16 - bits integer type Elf_Section (n :: Size) = Word16 | [ 32 bits ] Unsigned 32 - bits integer [ 64 bits ] Unsigned 64 - bits integer data Elf_Rel_Info (n :: Size) where Elf32_Rel_Info :: Word32 -> Elf_Rel_Info 'S32 Elf64_Rel_Info :: Word64 -> Elf_Rel_Info 'S64 | [ 32 bits ] Signed 32 - bits integer [ 64 bits ] Signed 64 - bits integer data Elf_Rel_Addend (n :: Size) where Elf32_Rel_Addend :: Word32 -> Elf_Rel_Addend 'S32 Elf64_Rel_Addend :: Word64 -> Elf_Rel_Addend 'S64 class ReifySize (n :: Size) where reifySize :: Size instance ReifySize 'S32 where reifySize = S32 instance ReifySize 'S64 where reifySize = S64 instance ReifySize n => Num (Elf_Addr n) where Elf32_Addr a1 + Elf32_Addr a2 = Elf32_Addr (a1 + a2) Elf64_Addr a1 + Elf64_Addr a2 = Elf64_Addr (a1 + a2) Elf32_Addr a1 * Elf32_Addr a2 = Elf32_Addr (a1 * a2) Elf64_Addr a1 * Elf64_Addr a2 = Elf64_Addr (a1 * a2) abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addr :: Elf_Addr n) = case reifySize @n of S32 -> Some $ Elf32_Addr (fromInteger i) S64 -> Some $ Elf64_Addr (fromInteger i) in addr negate (Elf32_Addr a) = Elf32_Addr (negate a) negate (Elf64_Addr a) = Elf64_Addr (negate a) instance ReifySize n => Real (Elf_Addr n) where toRational (Elf32_Addr a) = toRational a toRational (Elf64_Addr a) = toRational a instance ReifySize n => Enum (Elf_Addr n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Addr n) = case reifySize @n of S32 -> Some $ Elf32_Addr (toEnum i) S64 -> Some $ Elf64_Addr (toEnum i) in addr fromEnum (Elf32_Addr a) = fromEnum a fromEnum (Elf64_Addr a) = fromEnum a instance ReifySize n => Integral (Elf_Addr n) where quotRem (Elf32_Addr a1) (Elf32_Addr a2) = let (x, y) = quotRem a1 a2 in (Elf32_Addr x, Elf32_Addr y) quotRem (Elf64_Addr a1) (Elf64_Addr a2) = let (x, y) = quotRem a1 a2 in (Elf64_Addr x, Elf64_Addr y) toInteger (Elf32_Addr a) = toInteger a toInteger (Elf64_Addr a) = toInteger a deriving instance Ord (Elf_Addr n) deriving instance Eq (Elf_Addr n) deriving instance Show (Elf_Addr n) instance ReifySize n => Num (Elf_Off n) where Elf32_Off a1 + Elf32_Off a2 = Elf32_Off (a1 + a2) Elf64_Off a1 + Elf64_Off a2 = Elf64_Off (a1 + a2) Elf32_Off a1 * Elf32_Off a2 = Elf32_Off (a1 * a2) Elf64_Off a1 * Elf64_Off a2 = Elf64_Off (a1 * a2) abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addr :: Elf_Off n) = case reifySize @n of S32 -> Some $ Elf32_Off (fromInteger i) S64 -> Some $ Elf64_Off (fromInteger i) in addr negate (Elf32_Off a) = Elf32_Off (negate a) negate (Elf64_Off a) = Elf64_Off (negate a) instance ReifySize n => Real (Elf_Off n) where toRational (Elf32_Off a) = toRational a toRational (Elf64_Off a) = toRational a instance ReifySize n => Enum (Elf_Off n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Off n) = case reifySize @n of S32 -> Some $ Elf32_Off (toEnum i) S64 -> Some $ Elf64_Off (toEnum i) in addr fromEnum (Elf32_Off a) = fromEnum a fromEnum (Elf64_Off a) = fromEnum a instance ReifySize n => Integral (Elf_Off n) where quotRem (Elf32_Off a1) (Elf32_Off a2) = let (x, y) = quotRem a1 a2 in (Elf32_Off x, Elf32_Off y) quotRem (Elf64_Off a1) (Elf64_Off a2) = let (x, y) = quotRem a1 a2 in (Elf64_Off x, Elf64_Off y) toInteger (Elf32_Off a) = toInteger a toInteger (Elf64_Off a) = toInteger a deriving instance Ord (Elf_Off n) deriving instance Eq (Elf_Off n) deriving instance Show (Elf_Off n) instance ReifySize n => Num (Elf_Rel_Info n) where Elf32_Rel_Info i1 + Elf32_Rel_Info i2 = Elf32_Rel_Info (i1 + i2) Elf64_Rel_Info i1 + Elf64_Rel_Info i2 = Elf64_Rel_Info (i1 + i2) Elf32_Rel_Info i1 * Elf32_Rel_Info i2 = Elf32_Rel_Info (i1 * i2) Elf64_Rel_Info i1 * Elf64_Rel_Info i2 = Elf64_Rel_Info (i1 * i2) abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> info :: Elf_Rel_Info n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Info (fromInteger i) S64 -> Some $ Elf64_Rel_Info (fromInteger i) in info negate (Elf32_Rel_Info i) = Elf32_Rel_Info (negate i) negate (Elf64_Rel_Info i) = Elf64_Rel_Info (negate i) instance ReifySize n => Real (Elf_Rel_Info n) where toRational (Elf32_Rel_Info a) = toRational a toRational (Elf64_Rel_Info a) = toRational a instance ReifySize n => Enum (Elf_Rel_Info n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Rel_Info n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Info (toEnum i) S64 -> Some $ Elf64_Rel_Info (toEnum i) in addr fromEnum (Elf32_Rel_Info a) = fromEnum a fromEnum (Elf64_Rel_Info a) = fromEnum a instance ReifySize n => Integral (Elf_Rel_Info n) where quotRem (Elf32_Rel_Info a1) (Elf32_Rel_Info a2) = let (x, y) = quotRem a1 a2 in (Elf32_Rel_Info x, Elf32_Rel_Info y) quotRem (Elf64_Rel_Info a1) (Elf64_Rel_Info a2) = let (x, y) = quotRem a1 a2 in (Elf64_Rel_Info x, Elf64_Rel_Info y) toInteger (Elf32_Rel_Info a) = toInteger a toInteger (Elf64_Rel_Info a) = toInteger a deriving instance Ord (Elf_Rel_Info n) deriving instance Eq (Elf_Rel_Info n) deriving instance Show (Elf_Rel_Info n) instance ReifySize n => Num (Elf_Rel_Addend n) where Elf32_Rel_Addend a1 + Elf32_Rel_Addend a2 = Elf32_Rel_Addend (a1 + a2) Elf64_Rel_Addend a1 + Elf64_Rel_Addend a2 = Elf64_Rel_Addend (a1 + a2) Elf32_Rel_Addend a1 * Elf32_Rel_Addend a2 = Elf32_Rel_Addend (a1 * a2) Elf64_Rel_Addend a1 * Elf64_Rel_Addend a2 = Elf64_Rel_Addend (a1 * a2) abs _ = undefined signum _ = undefined fromInteger i = let Some (unsafeCoerce -> addend :: Elf_Rel_Addend n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Addend (fromInteger i) S64 -> Some $ Elf64_Rel_Addend (fromInteger i) in addend negate (Elf32_Rel_Addend a) = Elf32_Rel_Addend (negate a) negate (Elf64_Rel_Addend a) = Elf64_Rel_Addend (negate a) instance ReifySize n => Real (Elf_Rel_Addend n) where toRational (Elf32_Rel_Addend a) = toRational a toRational (Elf64_Rel_Addend a) = toRational a instance ReifySize n => Enum (Elf_Rel_Addend n) where toEnum i = let Some (unsafeCoerce -> addr :: Elf_Rel_Addend n) = case reifySize @n of S32 -> Some $ Elf32_Rel_Addend (toEnum i) S64 -> Some $ Elf64_Rel_Addend (toEnum i) in addr fromEnum (Elf32_Rel_Addend a) = fromEnum a fromEnum (Elf64_Rel_Addend a) = fromEnum a instance ReifySize n => Integral (Elf_Rel_Addend n) where quotRem (Elf32_Rel_Addend a1) (Elf32_Rel_Addend a2) = let (x, y) = quotRem a1 a2 in (Elf32_Rel_Addend x, Elf32_Rel_Addend y) quotRem (Elf64_Rel_Addend a1) (Elf64_Rel_Addend a2) = let (x, y) = quotRem a1 a2 in (Elf64_Rel_Addend x, Elf64_Rel_Addend y) toInteger (Elf32_Rel_Addend a) = toInteger a toInteger (Elf64_Rel_Addend a) = toInteger a deriving instance Ord (Elf_Rel_Addend n) deriving instance Eq (Elf_Rel_Addend n) deriving instance Show (Elf_Rel_Addend n)
8e31410eeefc8c227ae3210df494298ccb4be30971192308be51e0ef6d8556f9
janestreet/base
test_option_array_allocation.ml
open! Base open Option_array open Expect_test_helpers_core let () = let t = of_array [| None |] in assert ( require_no_allocation [%here] (fun () -> match get t 0 with | None -> true | Some _ -> false)) ;; let () = let t = of_array [| Some 0 |] in let get_some () = match get t 0 with | None -> false | Some _ -> true in After inlining , [ match get t 0 with ] is : { [ match let cheap_option = Uniform_array.get t 0 in if Cheap_option.is_some cheap_option then Some ( Cheap_option.value_unsafe cheap_option ) else None with ] } This situation is called " match - in - match " ( the inner [ if ] is essentially a match ) . The OCaml compiler and Flambda optimizer do n't handle match - in - match well , and so can not eliminate the allocation of [ Some ] . Flambda2 is expected to eliminate the allocation , at which point we can [ require_no_allocation ] ( possibly annotating the test with [ " fast - flambda " ] ) . {[ match let cheap_option = Uniform_array.get t 0 in if Cheap_option.is_some cheap_option then Some (Cheap_option.value_unsafe cheap_option) else None with ]} This situation is called "match-in-match" (the inner [if] is essentially a match). The OCaml compiler and Flambda optimizer don't handle match-in-match well, and so cannot eliminate the allocation of [Some]. Flambda2 is expected to eliminate the allocation, at which point we can [require_no_allocation] (possibly annotating the test with [@tags "fast-flambda"]). *) let compiler_eliminates_the_allocation = (* [Version_util.x_library_inlining] is the whole reason this is a separate executable. *) Config.flambda2 && Version_util.x_library_inlining in if compiler_eliminates_the_allocation then assert (require_no_allocation [%here] get_some) else let module Gc = Core.Gc.For_testing in let _, { Gc.Allocation_report.minor_words_allocated; _ } = Gc.measure_allocation get_some in assert (minor_words_allocated = 2) ;;
null
https://raw.githubusercontent.com/janestreet/base/4d2f3f7ed9b8f20cef9d3ad582d986b7c78e0358/test/allocation/bin/test_option_array_allocation.ml
ocaml
[Version_util.x_library_inlining] is the whole reason this is a separate executable.
open! Base open Option_array open Expect_test_helpers_core let () = let t = of_array [| None |] in assert ( require_no_allocation [%here] (fun () -> match get t 0 with | None -> true | Some _ -> false)) ;; let () = let t = of_array [| Some 0 |] in let get_some () = match get t 0 with | None -> false | Some _ -> true in After inlining , [ match get t 0 with ] is : { [ match let cheap_option = Uniform_array.get t 0 in if Cheap_option.is_some cheap_option then Some ( Cheap_option.value_unsafe cheap_option ) else None with ] } This situation is called " match - in - match " ( the inner [ if ] is essentially a match ) . The OCaml compiler and Flambda optimizer do n't handle match - in - match well , and so can not eliminate the allocation of [ Some ] . Flambda2 is expected to eliminate the allocation , at which point we can [ require_no_allocation ] ( possibly annotating the test with [ " fast - flambda " ] ) . {[ match let cheap_option = Uniform_array.get t 0 in if Cheap_option.is_some cheap_option then Some (Cheap_option.value_unsafe cheap_option) else None with ]} This situation is called "match-in-match" (the inner [if] is essentially a match). The OCaml compiler and Flambda optimizer don't handle match-in-match well, and so cannot eliminate the allocation of [Some]. Flambda2 is expected to eliminate the allocation, at which point we can [require_no_allocation] (possibly annotating the test with [@tags "fast-flambda"]). *) let compiler_eliminates_the_allocation = Config.flambda2 && Version_util.x_library_inlining in if compiler_eliminates_the_allocation then assert (require_no_allocation [%here] get_some) else let module Gc = Core.Gc.For_testing in let _, { Gc.Allocation_report.minor_words_allocated; _ } = Gc.measure_allocation get_some in assert (minor_words_allocated = 2) ;;
956f7d104f7057437b3f78b863ad1dd6509cba7ea2038f3ba8b4cec6f366484a
reasonml-old/BetterErrors
prettyPrint_2.ml
type bread = | Coconut of string let morning = Coconut
null
https://raw.githubusercontent.com/reasonml-old/BetterErrors/d439b92bfe377689c38fded5d8aa2b151133f25d/tests/prettyPrint/prettyPrint_2.ml
ocaml
type bread = | Coconut of string let morning = Coconut
41af4e90b9791e3f0aa0abb350b51a87445366495dded19d2665f014875d7ead
alexandergunnarson/quantum
trig.cljc
(ns quantum.core.numeric.trig (:refer-clojure :exclude [+ * /]) (:require [quantum.core.error :as err :refer [TODO] ] [quantum.core.macros :refer [#?@(:clj [defnt defnt'])] :refer-macros [defnt]] [quantum.core.numeric.exponents :as exp :refer [#?@(:clj [log-e sqrt pow])] :refer-macros [log-e sqrt pow]] [quantum.core.numeric.operators :refer [#?@(:clj [+ * / inc* dec*])] :refer-macros [+ * / inc* dec*]]) #?(:clj (:import [net.jafama FastMath]))) ; ===== SINE ===== ; #?(:clj (defnt' asin "arc sine" (^double [^double x] (Math/asin x))) :cljs (defn asin "arc sine" [x] (js/Math.asin x))) #?(:clj (defnt asin* "arc sine, fast+lax" {:performance ["3.8 times faster than java.lang.Math" "Worst case 2E-12 difference"]} (^double [^double x] (FastMath/asin x)))) #?(:clj (defnt asinh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (log-e (+ x (sqrt (inc* (pow x 2))))))) :cljs (defn asinh [x] (js/Math.asinh x))) #?(:clj (defnt' sin "sine" (^double ^:intrinsic [^double x] (Math/sin x))) :cljs (defn sin "sine" [x] (js/Math.sin x))) #?(:clj (defnt sin* "sine, fast+lax" {:performance ["4.5 times faster than java.lang.Math" "Worst case 1E-11 difference"]} (^double [^double x] (FastMath/sin x)))) #?(:clj (defnt' sinh "hyperbolic sine" (^double [^double x] (Math/sinh x))) :cljs (defn sinh [x] (js/Math.sinh x))) #?(:clj (defnt sinh* "hyperbolic sine" {:performance ["5.5 times faster than java.lang.Math" "Worst case 7E-14 difference"]} (^double [^double x] (FastMath/sinh x)))) ; ===== COSINE ===== ; #?(:clj (defnt acos "arc cosine" (^double [^double x] (Math/acos x))) :cljs (defn acos "arc cosine" [x] (js/Math.acos x))) #?(:clj (defnt acosh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (log-e (+ x (* (sqrt (dec* x)) (sqrt (inc* x))))))) :cljs (defn acosh "hyperbolic arc cosine" [x] (js/Math.acosh x))) #?(:clj (defnt acos* "arc cosine" {:performance ["3.6 times faster than java.lang.Math" "Worst case 1E-12 difference"]} (^double [^double x] (FastMath/acos x)))) #?(:clj (defnt' cos "cosine" (^double ^:intrinsic [^double x] (Math/cos x))) :cljs (defn cos "cosine" [x] (js/Math.cos x))) #?(:clj (defnt' cos* "cosine" {:performance ["5.7 times faster than java.lang.Math" "Worst case 8E-12 difference"]} (^double [^double x] (FastMath/cos x)))) #?(:clj (defnt' cosh "hyperbolic cosine" (^double [^double x] (Math/cosh x))) :cljs (defn cosh "hyperbolic cosine" [x] (js/Math.cosh x))) #?(:clj (defnt' cosh* "hyperbolic cosine" {:performance ["5 times faster than java.lang.Math" "Worst case 4E-14 difference"]} (^double [^double x] (FastMath/cosh x)))) ; ===== TANGENT ===== ; #?(:clj (defnt' atan "arc tangent" (^double [^double x] (Math/atan x))) :cljs (defn atan "arc tangent" [x] (js/Math.atan x))) #?(:clj (defnt atan* "arc tangent" {:performance ["6.2 times faster than java.lang.Math" "Worst case 5E-13 difference"]} (^double [^double x] (FastMath/atan x)))) #?(:clj (defnt atanh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (/ (- (log-e (+ 1 x)) (log-e (- 1 x))) 2))) :cljs (defn atanh [x] (js/Math.atanh x))) #?(:clj (defnt' atan2 "returns angle theta" (^double ^:intrinsic [^double x ^double y] (Math/atan2 x y))) :cljs (defn atan2 "returns angle theta" [x y] (js/Math.atan2 x y))) #?(:clj (defnt atan2* "returns angle theta" {:performance ["6.3 times faster than java.lang.Math" "Worst case 4E-13 difference"]} (^double [^double x ^double y] (FastMath/atan2 x y)))) #?(:clj (defnt' tan "tangent" (^double ^:intrinsic [^double x] (Math/tan x))) :cljs (defn tan "tangent" [x] (js/Math.tan x))) #?(:clj (defnt tan* "tangent" {:performance ["3.7 times faster than java.lang.Math" "Worst case 1E-13 difference"]} (^double [^double x] (FastMath/tan x)))) #?(:clj (defnt' tanh "hyperbolic tangent" (^double [^double x] (Math/tanh x))) :cljs (defn tanh "hyperbolic tangent" [x] (js/Math.tanh x))) #?(:clj (defnt tanh* "hyperbolic tangent" {:performance ["6.4 times faster than java.lang.Math" "Worst case 5E-14 difference"]} (^double [^double x] (FastMath/tanh x)))) ; ===== DEGREES + RADIANS ===== ; #?(:clj (defnt' rad->deg (^double [^double x] (Math/toDegrees x))) :cljs (defn rad->deg [x] (/ (* x 180.0) js/Math.PI))) #?(:clj (defnt' deg->rad (^double [^double x] (Math/toRadians x))) :cljs (defn deg->rad [x] (/ (* x js/Math.PI) 180.0)))
null
https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/core/numeric/trig.cljc
clojure
===== SINE ===== ; ===== COSINE ===== ; ===== TANGENT ===== ; ===== DEGREES + RADIANS ===== ;
(ns quantum.core.numeric.trig (:refer-clojure :exclude [+ * /]) (:require [quantum.core.error :as err :refer [TODO] ] [quantum.core.macros :refer [#?@(:clj [defnt defnt'])] :refer-macros [defnt]] [quantum.core.numeric.exponents :as exp :refer [#?@(:clj [log-e sqrt pow])] :refer-macros [log-e sqrt pow]] [quantum.core.numeric.operators :refer [#?@(:clj [+ * / inc* dec*])] :refer-macros [+ * / inc* dec*]]) #?(:clj (:import [net.jafama FastMath]))) #?(:clj (defnt' asin "arc sine" (^double [^double x] (Math/asin x))) :cljs (defn asin "arc sine" [x] (js/Math.asin x))) #?(:clj (defnt asin* "arc sine, fast+lax" {:performance ["3.8 times faster than java.lang.Math" "Worst case 2E-12 difference"]} (^double [^double x] (FastMath/asin x)))) #?(:clj (defnt asinh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (log-e (+ x (sqrt (inc* (pow x 2))))))) :cljs (defn asinh [x] (js/Math.asinh x))) #?(:clj (defnt' sin "sine" (^double ^:intrinsic [^double x] (Math/sin x))) :cljs (defn sin "sine" [x] (js/Math.sin x))) #?(:clj (defnt sin* "sine, fast+lax" {:performance ["4.5 times faster than java.lang.Math" "Worst case 1E-11 difference"]} (^double [^double x] (FastMath/sin x)))) #?(:clj (defnt' sinh "hyperbolic sine" (^double [^double x] (Math/sinh x))) :cljs (defn sinh [x] (js/Math.sinh x))) #?(:clj (defnt sinh* "hyperbolic sine" {:performance ["5.5 times faster than java.lang.Math" "Worst case 7E-14 difference"]} (^double [^double x] (FastMath/sinh x)))) #?(:clj (defnt acos "arc cosine" (^double [^double x] (Math/acos x))) :cljs (defn acos "arc cosine" [x] (js/Math.acos x))) #?(:clj (defnt acosh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (log-e (+ x (* (sqrt (dec* x)) (sqrt (inc* x))))))) :cljs (defn acosh "hyperbolic arc cosine" [x] (js/Math.acosh x))) #?(:clj (defnt acos* "arc cosine" {:performance ["3.6 times faster than java.lang.Math" "Worst case 1E-12 difference"]} (^double [^double x] (FastMath/acos x)))) #?(:clj (defnt' cos "cosine" (^double ^:intrinsic [^double x] (Math/cos x))) :cljs (defn cos "cosine" [x] (js/Math.cos x))) #?(:clj (defnt' cos* "cosine" {:performance ["5.7 times faster than java.lang.Math" "Worst case 8E-12 difference"]} (^double [^double x] (FastMath/cos x)))) #?(:clj (defnt' cosh "hyperbolic cosine" (^double [^double x] (Math/cosh x))) :cljs (defn cosh "hyperbolic cosine" [x] (js/Math.cosh x))) #?(:clj (defnt' cosh* "hyperbolic cosine" {:performance ["5 times faster than java.lang.Math" "Worst case 4E-14 difference"]} (^double [^double x] (FastMath/cosh x)))) #?(:clj (defnt' atan "arc tangent" (^double [^double x] (Math/atan x))) :cljs (defn atan "arc tangent" [x] (js/Math.atan x))) #?(:clj (defnt atan* "arc tangent" {:performance ["6.2 times faster than java.lang.Math" "Worst case 5E-13 difference"]} (^double [^double x] (FastMath/atan x)))) #?(:clj (defnt atanh {:performance "Unoptimized, but that's okay for now."} (^double [^double x] (/ (- (log-e (+ 1 x)) (log-e (- 1 x))) 2))) :cljs (defn atanh [x] (js/Math.atanh x))) #?(:clj (defnt' atan2 "returns angle theta" (^double ^:intrinsic [^double x ^double y] (Math/atan2 x y))) :cljs (defn atan2 "returns angle theta" [x y] (js/Math.atan2 x y))) #?(:clj (defnt atan2* "returns angle theta" {:performance ["6.3 times faster than java.lang.Math" "Worst case 4E-13 difference"]} (^double [^double x ^double y] (FastMath/atan2 x y)))) #?(:clj (defnt' tan "tangent" (^double ^:intrinsic [^double x] (Math/tan x))) :cljs (defn tan "tangent" [x] (js/Math.tan x))) #?(:clj (defnt tan* "tangent" {:performance ["3.7 times faster than java.lang.Math" "Worst case 1E-13 difference"]} (^double [^double x] (FastMath/tan x)))) #?(:clj (defnt' tanh "hyperbolic tangent" (^double [^double x] (Math/tanh x))) :cljs (defn tanh "hyperbolic tangent" [x] (js/Math.tanh x))) #?(:clj (defnt tanh* "hyperbolic tangent" {:performance ["6.4 times faster than java.lang.Math" "Worst case 5E-14 difference"]} (^double [^double x] (FastMath/tanh x)))) #?(:clj (defnt' rad->deg (^double [^double x] (Math/toDegrees x))) :cljs (defn rad->deg [x] (/ (* x 180.0) js/Math.PI))) #?(:clj (defnt' deg->rad (^double [^double x] (Math/toRadians x))) :cljs (defn deg->rad [x] (/ (* x js/Math.PI) 180.0)))
c0858a3c9bb4d11a811e1e0f2e904fb5cc501dbe03493abb4f3cff3d9e030aa6
d-cent/mooncake
mongo.clj
(ns mooncake.test.db.mongo (:require [midje.sweet :refer :all] [monger.operators :as mop] [mooncake.db.mongo :as mongo])) (facts "about value-map->mongo-query-map" (fact "creates query map with the same values if provided map has no nested collections" (mongo/value-map->mongo-query-map {:a "1" :b "2" :c "3"}) => {:a "1" :b "2" :c "3"}) (fact "creates query map with mongo $in statements if provided map has nested collections" (mongo/value-map->mongo-query-map {:a "1" :b ["2" "3"] :c ["4" "5"]}) => {:a "1" :b {mop/$in ["2" "3"]} :c {mop/$in ["4" "5"]}}) (fact "creates empty query map if provided map is empty" (mongo/value-map->mongo-query-map {}) => {})) (facts "about value-map-list->mongo-or-query-map" (fact "joins elements with mongo $or keyword" (mongo/value-map-vector->or-mongo-query-map [{:a "1"} {:b "2"}]) => {mop/$or [{:a "1"} {:b "2"}]}) (fact "converts elements to mongo queries" (mongo/value-map-vector->or-mongo-query-map [{:a ["1", "2"]} {:b "2"}]) => {mop/$or [{:a {mop/$in ["1" "2"]}} {:b "2"}]}))
null
https://raw.githubusercontent.com/d-cent/mooncake/eb16b7239e7580a73b98f7cdacb324ab4e301f9c/test/mooncake/test/db/mongo.clj
clojure
(ns mooncake.test.db.mongo (:require [midje.sweet :refer :all] [monger.operators :as mop] [mooncake.db.mongo :as mongo])) (facts "about value-map->mongo-query-map" (fact "creates query map with the same values if provided map has no nested collections" (mongo/value-map->mongo-query-map {:a "1" :b "2" :c "3"}) => {:a "1" :b "2" :c "3"}) (fact "creates query map with mongo $in statements if provided map has nested collections" (mongo/value-map->mongo-query-map {:a "1" :b ["2" "3"] :c ["4" "5"]}) => {:a "1" :b {mop/$in ["2" "3"]} :c {mop/$in ["4" "5"]}}) (fact "creates empty query map if provided map is empty" (mongo/value-map->mongo-query-map {}) => {})) (facts "about value-map-list->mongo-or-query-map" (fact "joins elements with mongo $or keyword" (mongo/value-map-vector->or-mongo-query-map [{:a "1"} {:b "2"}]) => {mop/$or [{:a "1"} {:b "2"}]}) (fact "converts elements to mongo queries" (mongo/value-map-vector->or-mongo-query-map [{:a ["1", "2"]} {:b "2"}]) => {mop/$or [{:a {mop/$in ["1" "2"]}} {:b "2"}]}))
b0142933226d62dfe963463a4246262e616dc9d5349d2af1dc2db5b5562cc98f
sjl/advent
day-16.lisp
(advent:defpackage* :advent/2019/16) (in-package :advent/2019/16) (defparameter *pattern* #(0 1 0 -1)) (defun compute-element (input i) (iterate (for x :in-vector input) (generate p :around *pattern*) (if-first-time (next p)) ; initialize pattern skip first element in the expanded pattern (when (zerop c) (next p)) (summing (* x p) :into result) (returning (mod (abs result) 10)))) (defun run-phase (input output) (iterate (for i :below (length output)) (setf (aref output i) (compute-element input i)))) (defun fft (input &optional (n 1)) (let ((input (fresh-vector input)) (output (fresh-vector input))) (do-repeat n (run-phase input output) (rotatef input output)) input)) (defun part2 (digits) ;; This is a dumb hack. ;; Because the message is in the latter half of the result , we can cheat and take advantage of the fact that for any element in the last half of the ;; input, the result is always just the sum of the tail of the array starting ;; at that element. ;; This is because by the time we 're in the back half of the array , the ( 0 1 0 -1 ) input pattern repeats the 0 i times ( which wipes out everything before i ) and the 1 i+1 times ( which means we just sum up the rest of the ;; array): ;; 0 1 2 3 4 5 6 7 8 9 10 11 12 input a b c d e = 13 pattern for i = 0 1 0 -1 0 1 0 -1 0 1 0 -1 0 1 pattern for i = 1 0 1 1 0 0 -1 -1 0 0 1 1 0 0 pattern for i = 2 0 0 1 1 1 0 0 0 -1 -1 -1 0 0 pattern for i = 3 0 0 0 1 1 1 1 0 0 0 0 -1 -1 pattern for i = 4 0 0 0 0 1 1 1 1 1 0 0 0 0 pattern for i = 5 0 0 0 0 0 1 1 1 1 1 1 0 0 pattern for i = 6 0 0 0 0 0 0 1 1 1 1 1 1 1 < ---- all zeroes all ones --------- > ;; ;; Additionally: by starting at the end of the array we don't need a temporary ;; array, we can just keep a running sum and not worrying about destroying the ;; input. ;; ;; This is cheating, but whatever, I didn't really like this problem much ;; anyway. (let* ((digits (coerce (iterate (repeat 10000) (appending digits)) 'vector)) (offset (digits->number (subseq digits 0 7))) (data (subseq digits offset))) (do-repeat 100 (iterate (for x :in-vector data :with-index i :from (1- (length data)) :downto 0) (summing x :into n) (setf (aref data i) (mod n 10)))) (subseq data 0 8))) (defun digits-string (digits) (map 'string #'digit-char digits)) (define-problem (2019 16) (data read-digits) ("96136976" "85600369") (values (_ (fft data 100) (subseq _ 0 8) digits-string) (digits-string (part2 data)))) #; Scratch -------------------------------------------------------------------- (part2 '(0 3 0 3 6 7 3 2 5 7 7 2 1 2 9 4 4 0 6 3 4 9 1 5 6 5 4 7 4 6 6 4 ))
null
https://raw.githubusercontent.com/sjl/advent/3eabc384120692444e15c76b64f32d8d69c3305d/src/2019/days/day-16.lisp
lisp
initialize pattern This is a dumb hack. input, the result is always just the sum of the tail of the array starting at that element. array): Additionally: by starting at the end of the array we don't need a temporary array, we can just keep a running sum and not worrying about destroying the input. This is cheating, but whatever, I didn't really like this problem much anyway. Scratch --------------------------------------------------------------------
(advent:defpackage* :advent/2019/16) (in-package :advent/2019/16) (defparameter *pattern* #(0 1 0 -1)) (defun compute-element (input i) (iterate (for x :in-vector input) (generate p :around *pattern*) skip first element in the expanded pattern (when (zerop c) (next p)) (summing (* x p) :into result) (returning (mod (abs result) 10)))) (defun run-phase (input output) (iterate (for i :below (length output)) (setf (aref output i) (compute-element input i)))) (defun fft (input &optional (n 1)) (let ((input (fresh-vector input)) (output (fresh-vector input))) (do-repeat n (run-phase input output) (rotatef input output)) input)) (defun part2 (digits) Because the message is in the latter half of the result , we can cheat and take advantage of the fact that for any element in the last half of the This is because by the time we 're in the back half of the array , the ( 0 1 0 -1 ) input pattern repeats the 0 i times ( which wipes out everything before i ) and the 1 i+1 times ( which means we just sum up the rest of the 0 1 2 3 4 5 6 7 8 9 10 11 12 input a b c d e = 13 pattern for i = 0 1 0 -1 0 1 0 -1 0 1 0 -1 0 1 pattern for i = 1 0 1 1 0 0 -1 -1 0 0 1 1 0 0 pattern for i = 2 0 0 1 1 1 0 0 0 -1 -1 -1 0 0 pattern for i = 3 0 0 0 1 1 1 1 0 0 0 0 -1 -1 pattern for i = 4 0 0 0 0 1 1 1 1 1 0 0 0 0 pattern for i = 5 0 0 0 0 0 1 1 1 1 1 1 0 0 pattern for i = 6 0 0 0 0 0 0 1 1 1 1 1 1 1 < ---- all zeroes all ones --------- > (let* ((digits (coerce (iterate (repeat 10000) (appending digits)) 'vector)) (offset (digits->number (subseq digits 0 7))) (data (subseq digits offset))) (do-repeat 100 (iterate (for x :in-vector data :with-index i :from (1- (length data)) :downto 0) (summing x :into n) (setf (aref data i) (mod n 10)))) (subseq data 0 8))) (defun digits-string (digits) (map 'string #'digit-char digits)) (define-problem (2019 16) (data read-digits) ("96136976" "85600369") (values (_ (fft data 100) (subseq _ 0 8) digits-string) (digits-string (part2 data)))) (part2 '(0 3 0 3 6 7 3 2 5 7 7 2 1 2 9 4 4 0 6 3 4 9 1 5 6 5 4 7 4 6 6 4 ))
8c2f280e078457601754bab995c05926248118c3d3b204126130da57659231b0
dpiponi/Moodler
Command.hs
| Module : Command Description : Interpreter for plugin DSL Maintainer : The plugin DSL uses an AST build with a free monad . This module provides the interpreter . Module : Command Description : Interpreter for plugin DSL Maintainer : The plugin DSL uses an AST build with a free monad. This module provides the interpreter. -} # LANGUAGE FlexibleContexts # module Command(execScript, execCommand, evalUi) where import Control.Applicative import Control.Exception import Control.Lens import Control.Monad.State import Graphics.Gloss.Data.Picture import Graphics.Gloss.Data.Color import qualified Language.Haskell.Interpreter as I import qualified Data.Map as M import qualified Data.Set as S import System.Directory import Data.Attoparsec.Text import qualified Data.Text as T import Control.Monad.Error import Sound.MoodlerLib.Symbols import Sound.MoodlerLib.Quantise import Sound.MoodlerLib.UiLib as U import qualified NanoHaskell as N import Check import Wiring import ContainerTree import Save import UIElement import World import UISupport import qualified ContainerTree as T import qualified Box as B import KeyMatcher import KeyStrokes import ServerState import WorldSupport alertGadget :: String -> B.Transform -> Picture alertGadget alt _ = B.textInBox (makeColor 1.0 0.1 0.1 0.8) white alt doAlert :: (MonadIO m, MonadState World m) => String -> m () doAlert alt = do gadget .= alertGadget alt liftIO $ putStrLn alt commandImportList :: [String] commandImportList = [ "Prelude", "Control.Monad", "Text.Read", "System.Directory", "Sound.MoodlerLib.Symbols", "Sound.MoodlerLib.UiLib", "Sound.MoodlerLib.UiLibElement", "Sound.MoodlerLib.Quantise"] execNanoCommand :: (InputHandler m, MonadIO m, MonadState World m, Functor m) => N.Nano -> m () execNanoCommand r = do let a = runErrorT (N.interpret r) b <- evalUi a case b of Left e -> liftIO $ putStrLn ("Error: " ++ e) Right () -> return () execGHCCommand :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => String -> m () execGHCCommand cmd = do commandResult <- liftIO $ I.runInterpreter $ do I.set [I.searchPath I.:= ["src"]] I.setImports commandImportList I.interpret cmd (I.as :: Ui ()) case commandResult of Left err -> doAlert $ case err of I.UnknownError e -> "Unknown error: " ++ e I.WontCompile es -> show (map I.errMsg es) I.NotAllowed e -> "Not allowed: " ++ e I.GhcException e -> "GHC exception: " ++ e Right commandTree -> evalUi commandTree -- -- XXX Work on error message display! -- | Execute plugin commands from a 'String'. execCommand :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => String -> m () execCommand cmd = case parseOnly N.nanoParser (T.pack cmd) of Right r -> do liftIO $ putStrLn "Using nanoInterpreter" execNanoCommand r Left x -> do liftIO $ putStrLn ("Not using nanoInterpreter: " ++ x) execGHCCommand cmd safeReadFile :: String -> IO (Either String String) safeReadFile f = catch (Right <$> readFile f) $ \exception -> do let err = show (exception :: IOException) return $ Left err -- | Execute plugin script from a .hs file execScript :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => String -- ^ Execute script from this directory... -> String -- ^ ...called by this name (leaving out the .hs)... -> m String -- ^ ...returning full filename of script execScript dir f = do -- use proper dir API XXX let fileName = dir ++ "/" ++ f ++ ".hs" cmds <- liftIO $ safeReadFile fileName case cmds of Left err -> do liftIO $ putStrLn err gadget .= alertGadget err Right cmd -> execCommand cmd return fileName evalUi :: (Functor m, MonadIO m, MonadState World m, InputHandler m) => Ui a -> m a evalUi (Return a) = return a evalUi (CurrentPlane cfn) = do p <- use (planeInfo . planes) evalUi (cfn p) evalUi (Switch p cfn) = planeInfo . planes .= p >> evalUi cfn evalUi (Echo t cfn) = doAlert t >> evalUi cfn evalUi (Hide t h cfn) = serverState . uiElements . ix t . ur . hidden .= h >> evalUi cfn evalUi (ToggleHidden cfn) = showHidden %= not >> evalUi cfn evalUi (Delete t cfn) = T.deleteElement t >> evalUi cfn evalUi (New s1 s2 cfn) = synthNew s1 s2 >> evalUi cfn evalUi (Run dir t cfn) = execScript dir t >> evalUi cfn evalUi (Load dir t cfn) = do fileName <- execScript dir t projectFile .= fileName evalUi cfn evalUi (SendBack t cfn) = sendToBack t >> evalUi cfn evalUi (BringFront t cfn) = bringToFront t >> evalUi cfn evalUi (PlugIn n t p creationParent cfn) = do (_, hi) <- depthExtent let e = In (UrElement creationParent False (hi+1) False p t) "#sample" t [] createdInParent n e creationParent evalUi (cfn n) evalUi (PlugOut n t p creationPlane cfn) = do (_, hi) <- depthExtent let e = Out (UrElement creationPlane False (hi+1) False p t) "#sample" createdInParent n e creationPlane evalUi (cfn n) evalUi (U.Knob n t p creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.Knob (UrElement creationParent False (hi+1) False p t) "#control" t 0.0 Nothing Nothing createdInParent n e creationParent evalUi (cfn n) evalUi (U.Selector n t p opts creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.Selector (UrElement creationParent False (hi+1) False p t) "#control" 0.0 opts createdInParent n e creationParent evalUi (cfn n) evalUi (U.TextBox n t p creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.TextBox (UrElement creationParent False (hi+1) False p t) "(0, 0, 1)" "" createdInParent n e creationParent evalUi (cfn n) evalUi (U.Proxy n proxyName p planeItsOn cfn) = do (_, hi) <- depthExtent let e = UIElement.Container { _ur = UrElement planeItsOn False (hi+1) False p proxyName , _pic = "panel_proxy.png" , _imageWidth = 40 , _imageHeight = 40 , _inside = S.empty , _outside = S.empty } createdInParent n e planeItsOn evalUi (cfn n) evalUi (U.Container n bmpName p creationPlane cfn) = do (_, hi) <- depthExtent maybePic <- getPic bmpName case maybePic of Right (width, height) -> do let e = UIElement.Container { _ur = UrElement creationPlane False (hi+1) False p (unUiId n) , _pic = bmpName , _imageWidth = width , _imageHeight = height , _inside = S.empty , _outside = S.empty } createdInParent n e creationPlane Left e -> doAlert e evalUi (cfn n) evalUi (U.SetPicture uiId pictureFileName cfn) = do maybePic <- getPic pictureFileName case maybePic of Right (width, height) -> do serverState . uiElements . ix uiId . pic .= pictureFileName serverState . uiElements . ix uiId . UIElement.imageWidth .= width serverState . uiElements . ix uiId . UIElement.imageHeight .= height Left e -> doAlert e evalUi cfn evalUi (U.Label n labelText p creationPlane cfn) = do (_, hi) <- depthExtent let e = UIElement.Label (UrElement creationPlane False (hi+1) False p labelText) createdInParent n e creationPlane evalUi (cfn n) evalUi (U.Cable s1 s2 cfn) = synthConnect s1 s2 >> evalUi cfn evalUi (U.UnCable dest cfn) = deleteCable dest >> evalUi cfn evalUi (U.Recompile cfn) = synthRecompile "Recompile command" >> evalUi cfn evalUi (U.Restart cfn) = synthReset "Restart command" >> evalUi cfn evalUi (U.Quit cfn) = synthQuit >> evalUi cfn evalUi (U.Check cfn) = do liftIO $ putStrLn "Consistency check..." result1 <- checkEverythingAccessibleFromRoot result2 <- checkChildrenHaveCorrectParent liftIO $ putStrLn $ if result1 && result2 then "No inconsistency found" else "Consistency problem" evalUi cfn -- Set command doesn't check evalUi (Set t v cfn) = do succeeded <- synthSet t v evalUi (cfn succeeded) evalUi (SetString t v cfn) = synthSetString t v >> evalUi cfn evalUi (SetLow t v cfn) = serverState . uiElements . ix t . UIElement.knobMin .= v >> evalUi cfn evalUi (SetName t n cfn) = serverState . uiElements . ix t . ur . UIElement.name .= n >> evalUi cfn evalUi (SetHigh t v cfn) = serverState . uiElements . ix t . UIElement.knobMax .= v >> evalUi cfn evalUi (SetColour t v cfn) = serverState . uiElements . ix t . UIElement.dataColour .= v >> evalUi cfn evalUi (Mouse cfn) = do p <- use mouseLoc evalUi (cfn p) evalUi (GetValue s1 cfn) = do elts <- use (serverState . uiElements) let a = case M.lookup s1 elts of Nothing -> error "No value" Just e -> UIElement._setting (e::UIElement) evalUi (cfn (a::Float)) evalUi (GetType s1 cfn) = do t <- getElementTypeById s1 evalUi (cfn t) -- Is this right? XXX evalUi (GetParent s1 cfn) = do elts <- use (serverState . uiElements) root <- use (planeInfo . rootPlane) if s1 == root then evalUi (cfn (Inside root)) else let a = case M.lookup s1 elts of Nothing -> error "No value" Just e -> UIElement._parent (_ur e) in evalUi (cfn a) evalUi (GetRoot cfn) = do root <- use (planeInfo . rootPlane) evalUi (cfn (root::UiId)) evalUi (Parent s1 s2 cfn) = T.reparent s1 s2 >> evalUi cfn evalUi (Rename namedTo toBeNamed cfn) = serverState . uiElements . ix toBeNamed . displayName .= namedTo >> evalUi cfn evalUi (Unparent s1 cfn) = T.unparent s1 >> evalUi cfn evalUi (Write t cfn) = do p <- use mouseLoc code <- saveSelection (Just (quantise2 quantum p)) liftIO $ writeFile ("scripts/" ++ t ++ ".hs") code evalUi cfn -- XXX evalUi (NewId s1 cfn) = do newN <- use newName newName %= (+ 1) let n = UiId (s1 ++ show newN) elts <- use (serverState . uiElements) evalUi $ if n `M.member` elts then NewId s1 cfn else cfn n evalUi (Selection cfn) = do a <- use currentSelection evalUi (cfn a) evalUi (Bind c t cfn) = keyMatcher %= addKey (interpretKeys c) t >> evalUi cfn -- Deprecate? XXX evalUi (Move c p cfn) = serverState . uiElements . ix c . ur . loc .= p >> evalUi cfn evalUi (GetName c cfn) = do elts <- use (serverState . uiElements) evalUi (cfn (_name . _ur <$> M.lookup c elts)) -- Not everything has a colour XXX evalUi (GetColour c cfn) = do elts <- use (serverState . uiElements) evalUi (cfn (_dataColour <$> M.lookup c elts)) evalUi (Location c cfn) = do elt <- getElementById "Location" c evalUi (cfn (elt ^. ur . loc)) evalUi (Input prompt cfn) = do inp <- getInput "" [] prompt evalUi (cfn inp) -- Supply list of filenames evalUi (InputFile prompt directory cfn) = do filenames <- liftIO $ getDirectoryContents directory inp <- getInput "" filenames prompt evalUi (cfn inp) evalUi (GetCableSource destId cfn) = do src <- cableSrc destId evalUi (cfn src) evalUi (Alias aliasName synthName cfn) = do synthAlias aliasName synthName evalUi cfn evalUi (UnAlias aliasName cfn) = do synthUnAlias aliasName evalUi cfn evalUi (SetOutput i cfn) = do outputId .= i evalUi cfn
null
https://raw.githubusercontent.com/dpiponi/Moodler/a0c984c36abae52668d00f25eb3749e97e8936d3/Moodler/src/Command.hs
haskell
XXX Work on error message display! | Execute plugin commands from a 'String'. | Execute plugin script from a .hs file ^ Execute script from this directory... ^ ...called by this name (leaving out the .hs)... ^ ...returning full filename of script use proper dir API XXX Set command doesn't check Is this right? XXX XXX Deprecate? XXX Not everything has a colour XXX Supply list of filenames
| Module : Command Description : Interpreter for plugin DSL Maintainer : The plugin DSL uses an AST build with a free monad . This module provides the interpreter . Module : Command Description : Interpreter for plugin DSL Maintainer : The plugin DSL uses an AST build with a free monad. This module provides the interpreter. -} # LANGUAGE FlexibleContexts # module Command(execScript, execCommand, evalUi) where import Control.Applicative import Control.Exception import Control.Lens import Control.Monad.State import Graphics.Gloss.Data.Picture import Graphics.Gloss.Data.Color import qualified Language.Haskell.Interpreter as I import qualified Data.Map as M import qualified Data.Set as S import System.Directory import Data.Attoparsec.Text import qualified Data.Text as T import Control.Monad.Error import Sound.MoodlerLib.Symbols import Sound.MoodlerLib.Quantise import Sound.MoodlerLib.UiLib as U import qualified NanoHaskell as N import Check import Wiring import ContainerTree import Save import UIElement import World import UISupport import qualified ContainerTree as T import qualified Box as B import KeyMatcher import KeyStrokes import ServerState import WorldSupport alertGadget :: String -> B.Transform -> Picture alertGadget alt _ = B.textInBox (makeColor 1.0 0.1 0.1 0.8) white alt doAlert :: (MonadIO m, MonadState World m) => String -> m () doAlert alt = do gadget .= alertGadget alt liftIO $ putStrLn alt commandImportList :: [String] commandImportList = [ "Prelude", "Control.Monad", "Text.Read", "System.Directory", "Sound.MoodlerLib.Symbols", "Sound.MoodlerLib.UiLib", "Sound.MoodlerLib.UiLibElement", "Sound.MoodlerLib.Quantise"] execNanoCommand :: (InputHandler m, MonadIO m, MonadState World m, Functor m) => N.Nano -> m () execNanoCommand r = do let a = runErrorT (N.interpret r) b <- evalUi a case b of Left e -> liftIO $ putStrLn ("Error: " ++ e) Right () -> return () execGHCCommand :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => String -> m () execGHCCommand cmd = do commandResult <- liftIO $ I.runInterpreter $ do I.set [I.searchPath I.:= ["src"]] I.setImports commandImportList I.interpret cmd (I.as :: Ui ()) case commandResult of Left err -> doAlert $ case err of I.UnknownError e -> "Unknown error: " ++ e I.WontCompile es -> show (map I.errMsg es) I.NotAllowed e -> "Not allowed: " ++ e I.GhcException e -> "GHC exception: " ++ e Right commandTree -> evalUi commandTree execCommand :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => String -> m () execCommand cmd = case parseOnly N.nanoParser (T.pack cmd) of Right r -> do liftIO $ putStrLn "Using nanoInterpreter" execNanoCommand r Left x -> do liftIO $ putStrLn ("Not using nanoInterpreter: " ++ x) execGHCCommand cmd safeReadFile :: String -> IO (Either String String) safeReadFile f = catch (Right <$> readFile f) $ \exception -> do let err = show (exception :: IOException) return $ Left err execScript :: (InputHandler m, Functor m, MonadIO m, MonadState World m) => let fileName = dir ++ "/" ++ f ++ ".hs" cmds <- liftIO $ safeReadFile fileName case cmds of Left err -> do liftIO $ putStrLn err gadget .= alertGadget err Right cmd -> execCommand cmd return fileName evalUi :: (Functor m, MonadIO m, MonadState World m, InputHandler m) => Ui a -> m a evalUi (Return a) = return a evalUi (CurrentPlane cfn) = do p <- use (planeInfo . planes) evalUi (cfn p) evalUi (Switch p cfn) = planeInfo . planes .= p >> evalUi cfn evalUi (Echo t cfn) = doAlert t >> evalUi cfn evalUi (Hide t h cfn) = serverState . uiElements . ix t . ur . hidden .= h >> evalUi cfn evalUi (ToggleHidden cfn) = showHidden %= not >> evalUi cfn evalUi (Delete t cfn) = T.deleteElement t >> evalUi cfn evalUi (New s1 s2 cfn) = synthNew s1 s2 >> evalUi cfn evalUi (Run dir t cfn) = execScript dir t >> evalUi cfn evalUi (Load dir t cfn) = do fileName <- execScript dir t projectFile .= fileName evalUi cfn evalUi (SendBack t cfn) = sendToBack t >> evalUi cfn evalUi (BringFront t cfn) = bringToFront t >> evalUi cfn evalUi (PlugIn n t p creationParent cfn) = do (_, hi) <- depthExtent let e = In (UrElement creationParent False (hi+1) False p t) "#sample" t [] createdInParent n e creationParent evalUi (cfn n) evalUi (PlugOut n t p creationPlane cfn) = do (_, hi) <- depthExtent let e = Out (UrElement creationPlane False (hi+1) False p t) "#sample" createdInParent n e creationPlane evalUi (cfn n) evalUi (U.Knob n t p creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.Knob (UrElement creationParent False (hi+1) False p t) "#control" t 0.0 Nothing Nothing createdInParent n e creationParent evalUi (cfn n) evalUi (U.Selector n t p opts creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.Selector (UrElement creationParent False (hi+1) False p t) "#control" 0.0 opts createdInParent n e creationParent evalUi (cfn n) evalUi (U.TextBox n t p creationParent cfn) = do (_, hi) <- depthExtent let e = UIElement.TextBox (UrElement creationParent False (hi+1) False p t) "(0, 0, 1)" "" createdInParent n e creationParent evalUi (cfn n) evalUi (U.Proxy n proxyName p planeItsOn cfn) = do (_, hi) <- depthExtent let e = UIElement.Container { _ur = UrElement planeItsOn False (hi+1) False p proxyName , _pic = "panel_proxy.png" , _imageWidth = 40 , _imageHeight = 40 , _inside = S.empty , _outside = S.empty } createdInParent n e planeItsOn evalUi (cfn n) evalUi (U.Container n bmpName p creationPlane cfn) = do (_, hi) <- depthExtent maybePic <- getPic bmpName case maybePic of Right (width, height) -> do let e = UIElement.Container { _ur = UrElement creationPlane False (hi+1) False p (unUiId n) , _pic = bmpName , _imageWidth = width , _imageHeight = height , _inside = S.empty , _outside = S.empty } createdInParent n e creationPlane Left e -> doAlert e evalUi (cfn n) evalUi (U.SetPicture uiId pictureFileName cfn) = do maybePic <- getPic pictureFileName case maybePic of Right (width, height) -> do serverState . uiElements . ix uiId . pic .= pictureFileName serverState . uiElements . ix uiId . UIElement.imageWidth .= width serverState . uiElements . ix uiId . UIElement.imageHeight .= height Left e -> doAlert e evalUi cfn evalUi (U.Label n labelText p creationPlane cfn) = do (_, hi) <- depthExtent let e = UIElement.Label (UrElement creationPlane False (hi+1) False p labelText) createdInParent n e creationPlane evalUi (cfn n) evalUi (U.Cable s1 s2 cfn) = synthConnect s1 s2 >> evalUi cfn evalUi (U.UnCable dest cfn) = deleteCable dest >> evalUi cfn evalUi (U.Recompile cfn) = synthRecompile "Recompile command" >> evalUi cfn evalUi (U.Restart cfn) = synthReset "Restart command" >> evalUi cfn evalUi (U.Quit cfn) = synthQuit >> evalUi cfn evalUi (U.Check cfn) = do liftIO $ putStrLn "Consistency check..." result1 <- checkEverythingAccessibleFromRoot result2 <- checkChildrenHaveCorrectParent liftIO $ putStrLn $ if result1 && result2 then "No inconsistency found" else "Consistency problem" evalUi cfn evalUi (Set t v cfn) = do succeeded <- synthSet t v evalUi (cfn succeeded) evalUi (SetString t v cfn) = synthSetString t v >> evalUi cfn evalUi (SetLow t v cfn) = serverState . uiElements . ix t . UIElement.knobMin .= v >> evalUi cfn evalUi (SetName t n cfn) = serverState . uiElements . ix t . ur . UIElement.name .= n >> evalUi cfn evalUi (SetHigh t v cfn) = serverState . uiElements . ix t . UIElement.knobMax .= v >> evalUi cfn evalUi (SetColour t v cfn) = serverState . uiElements . ix t . UIElement.dataColour .= v >> evalUi cfn evalUi (Mouse cfn) = do p <- use mouseLoc evalUi (cfn p) evalUi (GetValue s1 cfn) = do elts <- use (serverState . uiElements) let a = case M.lookup s1 elts of Nothing -> error "No value" Just e -> UIElement._setting (e::UIElement) evalUi (cfn (a::Float)) evalUi (GetType s1 cfn) = do t <- getElementTypeById s1 evalUi (cfn t) evalUi (GetParent s1 cfn) = do elts <- use (serverState . uiElements) root <- use (planeInfo . rootPlane) if s1 == root then evalUi (cfn (Inside root)) else let a = case M.lookup s1 elts of Nothing -> error "No value" Just e -> UIElement._parent (_ur e) in evalUi (cfn a) evalUi (GetRoot cfn) = do root <- use (planeInfo . rootPlane) evalUi (cfn (root::UiId)) evalUi (Parent s1 s2 cfn) = T.reparent s1 s2 >> evalUi cfn evalUi (Rename namedTo toBeNamed cfn) = serverState . uiElements . ix toBeNamed . displayName .= namedTo >> evalUi cfn evalUi (Unparent s1 cfn) = T.unparent s1 >> evalUi cfn evalUi (Write t cfn) = do p <- use mouseLoc code <- saveSelection (Just (quantise2 quantum p)) liftIO $ writeFile ("scripts/" ++ t ++ ".hs") code evalUi cfn evalUi (NewId s1 cfn) = do newN <- use newName newName %= (+ 1) let n = UiId (s1 ++ show newN) elts <- use (serverState . uiElements) evalUi $ if n `M.member` elts then NewId s1 cfn else cfn n evalUi (Selection cfn) = do a <- use currentSelection evalUi (cfn a) evalUi (Bind c t cfn) = keyMatcher %= addKey (interpretKeys c) t >> evalUi cfn evalUi (Move c p cfn) = serverState . uiElements . ix c . ur . loc .= p >> evalUi cfn evalUi (GetName c cfn) = do elts <- use (serverState . uiElements) evalUi (cfn (_name . _ur <$> M.lookup c elts)) evalUi (GetColour c cfn) = do elts <- use (serverState . uiElements) evalUi (cfn (_dataColour <$> M.lookup c elts)) evalUi (Location c cfn) = do elt <- getElementById "Location" c evalUi (cfn (elt ^. ur . loc)) evalUi (Input prompt cfn) = do inp <- getInput "" [] prompt evalUi (cfn inp) evalUi (InputFile prompt directory cfn) = do filenames <- liftIO $ getDirectoryContents directory inp <- getInput "" filenames prompt evalUi (cfn inp) evalUi (GetCableSource destId cfn) = do src <- cableSrc destId evalUi (cfn src) evalUi (Alias aliasName synthName cfn) = do synthAlias aliasName synthName evalUi cfn evalUi (UnAlias aliasName cfn) = do synthUnAlias aliasName evalUi cfn evalUi (SetOutput i cfn) = do outputId .= i evalUi cfn
6cd1e150671c6592d34d94a5adb2bc2cf1705c2e288d94b85dcf7ae562ef6cbf
gusenov/stepik-functional-programming-hs
Demo.hs
Ðåàëèçóéòå c , âîçâðàùàþùóþ áåñêîíå÷íûé ñïèñîê ÷èñåë Ôèáîíà÷÷è . GHCi > take 10 $ fibStream [ 0,1,1,2,3,5,8,13,21,34 ] Ðåàëèçóéòå c èñïîëüçîâàíèåì ôóíêöèè zipWith ôóíêöèþ fibStream, âîçâðàùàþùóþ áåñêîíå÷íûé ñïèñîê ÷èñåë Ôèáîíà÷÷è. GHCi> take 10 $ fibStream [0,1,1,2,3,5,8,13,21,34] -} module Demo where fibStream :: [Integer] fibStream = 0 : 1 : zipWith (+) fibStream (tail fibStream) -- #Canonical_zipWith_implementation
null
https://raw.githubusercontent.com/gusenov/stepik-functional-programming-hs/904164012b9b842a15d5ba3af05cfe589295fa5c/FibStream/Demo.hs
haskell
#Canonical_zipWith_implementation
Ðåàëèçóéòå c , âîçâðàùàþùóþ áåñêîíå÷íûé ñïèñîê ÷èñåë Ôèáîíà÷÷è . GHCi > take 10 $ fibStream [ 0,1,1,2,3,5,8,13,21,34 ] Ðåàëèçóéòå c èñïîëüçîâàíèåì ôóíêöèè zipWith ôóíêöèþ fibStream, âîçâðàùàþùóþ áåñêîíå÷íûé ñïèñîê ÷èñåë Ôèáîíà÷÷è. GHCi> take 10 $ fibStream [0,1,1,2,3,5,8,13,21,34] -} module Demo where fibStream :: [Integer] fibStream = 0 : 1 : zipWith (+) fibStream (tail fibStream)
6bca0c5df60ab73220730be2dde15c7b42a9a88d8b8a48d5696cefcd1fd4aea0
nilenso/goose
retry.clj
(ns ^:no-doc goose.brokers.redis.retry (:require [goose.brokers.redis.commands :as redis-cmds] [goose.defaults :as d] [goose.retry] [goose.utils :as u])) (defn- retry-job [{:keys [redis-conn error-service-config] :as _opts} {{:keys [retry-delay-sec-fn-sym error-handler-fn-sym]} :retry-opts {:keys [retry-count]} :state :as job} ex] (let [error-handler (u/require-resolve error-handler-fn-sym) retry-delay-sec ((u/require-resolve retry-delay-sec-fn-sym) retry-count) retry-at (u/sec+current-epoch-ms retry-delay-sec) job (assoc-in job [:state :retry-at] retry-at)] (u/log-on-exceptions (error-handler error-service-config job ex)) (redis-cmds/enqueue-sorted-set redis-conn d/prefixed-retry-schedule-queue retry-at job))) (defn- bury-job [{:keys [redis-conn error-service-config] :as _opts} {{:keys [skip-dead-queue death-handler-fn-sym]} :retry-opts {:keys [last-retried-at]} :state :as job} ex] (let [death-handler (u/require-resolve death-handler-fn-sym) died-at (or last-retried-at (u/epoch-time-ms)) job (assoc-in job [:state :died-at] died-at)] (u/log-on-exceptions (death-handler error-service-config job ex)) (when-not skip-dead-queue (redis-cmds/enqueue-sorted-set redis-conn d/prefixed-dead-queue died-at job)))) (defn wrap-failure [next] (fn [opts job] (try (next opts job) (catch Exception ex (let [failed-job (goose.retry/set-failed-config job ex) retry-count (get-in failed-job [:state :retry-count]) max-retries (get-in failed-job [:retry-opts :max-retries])] (if (< retry-count max-retries) (retry-job opts failed-job ex) (bury-job opts failed-job ex)))))))
null
https://raw.githubusercontent.com/nilenso/goose/8a5f2fe1a4fa138fee5b74e00109e840b0b1fad1/src/goose/brokers/redis/retry.clj
clojure
(ns ^:no-doc goose.brokers.redis.retry (:require [goose.brokers.redis.commands :as redis-cmds] [goose.defaults :as d] [goose.retry] [goose.utils :as u])) (defn- retry-job [{:keys [redis-conn error-service-config] :as _opts} {{:keys [retry-delay-sec-fn-sym error-handler-fn-sym]} :retry-opts {:keys [retry-count]} :state :as job} ex] (let [error-handler (u/require-resolve error-handler-fn-sym) retry-delay-sec ((u/require-resolve retry-delay-sec-fn-sym) retry-count) retry-at (u/sec+current-epoch-ms retry-delay-sec) job (assoc-in job [:state :retry-at] retry-at)] (u/log-on-exceptions (error-handler error-service-config job ex)) (redis-cmds/enqueue-sorted-set redis-conn d/prefixed-retry-schedule-queue retry-at job))) (defn- bury-job [{:keys [redis-conn error-service-config] :as _opts} {{:keys [skip-dead-queue death-handler-fn-sym]} :retry-opts {:keys [last-retried-at]} :state :as job} ex] (let [death-handler (u/require-resolve death-handler-fn-sym) died-at (or last-retried-at (u/epoch-time-ms)) job (assoc-in job [:state :died-at] died-at)] (u/log-on-exceptions (death-handler error-service-config job ex)) (when-not skip-dead-queue (redis-cmds/enqueue-sorted-set redis-conn d/prefixed-dead-queue died-at job)))) (defn wrap-failure [next] (fn [opts job] (try (next opts job) (catch Exception ex (let [failed-job (goose.retry/set-failed-config job ex) retry-count (get-in failed-job [:state :retry-count]) max-retries (get-in failed-job [:retry-opts :max-retries])] (if (< retry-count max-retries) (retry-job opts failed-job ex) (bury-job opts failed-job ex)))))))