_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
bf1e08719663c0214828ab6b872d5d61e444bec92eb3230f31e4f9681ba1379c | neongreen/haskell-ex | Main.hs | import Data.Char
scary :: String -> Int
scary = sum . map value
where
value x
| isLetter x && isAscii x = ord (toUpper x) - (ord 'A' - 1)
| otherwise = 0
isScary :: String -> Bool
isScary xs = scary xs == 13
main :: IO ()
main = do
contents <- readFile "/usr/share/dict/words"
putStr $ unlines $ filter isScary $ words contents
| null | https://raw.githubusercontent.com/neongreen/haskell-ex/345115444fdf370a43390fd942e2851b9b1963ad/week1/scary/alviprofluvium/Main.hs | haskell | import Data.Char
scary :: String -> Int
scary = sum . map value
where
value x
| isLetter x && isAscii x = ord (toUpper x) - (ord 'A' - 1)
| otherwise = 0
isScary :: String -> Bool
isScary xs = scary xs == 13
main :: IO ()
main = do
contents <- readFile "/usr/share/dict/words"
putStr $ unlines $ filter isScary $ words contents
|
|
c4348366ef51150eab569961ad675bf9a5bbae88948d5ede0b1a3f9fae564666 | Dasudian/DSDIN | dsdc_chain_state.erl |
-module(dsdc_chain_state).
-export([ find_common_ancestor/2
, get_hash_at_height/1
, hash_is_connected_to_genesis/1
, hash_is_in_main_chain/1
, insert_block/1
]).
%% For tests
-export([ get_top_block_hash/1
, get_hash_at_height/2
]).
-include("blocks.hrl").
-define(internal_error(____E____), {dsdc_chain_state_error, ____E____}).
%%%===================================================================
%%% API
%%%===================================================================
-spec get_hash_at_height(dsdc_blocks:height()) -> {'ok', binary()} | 'error'.
get_hash_at_height(Height) when is_integer(Height), Height >= 0 ->
get_hash_at_height(Height, new_state_from_persistence()).
-spec insert_block(#block{}) -> 'ok' | {'error', any()}.
insert_block(Block) ->
Node = wrap_block(Block),
try internal_insert(Node, Block)
catch throw:?internal_error(What) -> {error, What}
end.
-spec hash_is_connected_to_genesis(binary()) -> boolean().
hash_is_connected_to_genesis(Hash) ->
case db_find_fork_id(Hash) of
{ok,_ForkId} -> true;
error -> false
end.
-spec find_common_ancestor(binary(), binary()) ->
{'ok', binary()} | {error, atom()}.
find_common_ancestor(Hash1, Hash2) ->
case {db_find_node(Hash1), db_find_node(Hash2)} of
{{ok,_Node1}, {ok,_Node2}} ->
case find_fork_point(Hash1, Hash2) of
error -> {error, not_found};
{ok, ForkHash} -> {ok, ForkHash}
end;
_ -> {error, unknown_hash}
end.
-spec hash_is_in_main_chain(binary()) -> boolean().
hash_is_in_main_chain(Hash) ->
case db_find_node(Hash) of
{ok,_Node} ->
State = new_state_from_persistence(),
case get_top_block_hash(State) of
undefined -> false;
TopHash -> hash_is_in_main_chain(Hash, TopHash)
end;
error -> false
end.
%%%===================================================================
Internal functions
%%%===================================================================
new_state_from_persistence() ->
Fun = fun() ->
#{ type => ?MODULE
, top_block_hash => dsdc_db:get_top_block_hash()
, genesis_block_hash => dsdc_db:get_genesis_hash()
}
end,
dsdc_db:ensure_transaction(Fun).
persist_state(State) ->
case get_genesis_hash(State) of
undefined -> ok;
GenesisHash ->
dsdc_db:write_genesis_hash(GenesisHash),
case get_top_block_hash(State) of
undefined -> ok;
TopBlockHash ->
dsdc_db:write_top_block_hash(TopBlockHash)
end
end.
-spec internal_error(_) -> no_return().
internal_error(What) ->
throw(?internal_error(What)).
get_genesis_hash(#{genesis_block_hash := GH}) -> GH.
get_top_block_hash(#{top_block_hash := H}) -> H.
set_top_block_hash(H, State) when is_binary(H) -> State#{top_block_hash => H}.
%%%-------------------------------------------------------------------
Internal ADT for differing between blocks and headers
%%%-------------------------------------------------------------------
-record(node, { header :: #header{}
, hash :: binary()
}).
hash(#node{hash = Hash}) -> Hash.
prev_hash(#node{header = H}) -> dsdc_headers:prev_hash(H).
node_height(#node{header = H}) -> dsdc_headers:height(H).
node_version(#node{header = H}) -> dsdc_headers:version(H).
node_difficulty(#node{header = H}) -> dsdc_headers:difficulty(H).
node_root_hash(#node{header = H}) -> dsdc_headers:root_hash(H).
node_miner(#node{header = H}) -> dsdc_headers:miner(H).
maybe_add_genesis_hash(#{genesis_block_hash := undefined} = State, Node) ->
case node_height(Node) =:= dsdc_block_genesis:height() of
true -> State#{genesis_block_hash => hash(Node)};
false -> State
end;
maybe_add_genesis_hash(State,_Node) ->
State.
assert_not_new_genesis(_Node, #{genesis_block_hash := undefined}) -> ok;
assert_not_new_genesis(Node, #{genesis_block_hash := GHash}) ->
case (node_height(Node) =:= dsdc_block_genesis:height()
andalso (hash(Node) =/= GHash)) of
true -> internal_error(rejecting_new_genesis_block);
false -> ok
end.
this is when we insert the genesis block the first time
node_is_genesis(Node, #{genesis_block_hash := undefined}) ->
node_height(Node) =:= dsdc_block_genesis:height();
node_is_genesis(Node, State) ->
hash(Node) =:= get_genesis_hash(State).
wrap_block(Block) ->
Header = dsdc_blocks:to_header(Block),
{ok, Hash} = dsdc_headers:hash_header(Header),
#node{ header = Header
, hash = Hash
}.
wrap_header(Header) ->
{ok, Hash} = dsdc_headers:hash_header(Header),
#node{ header = Header
, hash = Hash
}.
export_header(#node{header = Header}) ->
Header.
%% NOTE: Only return nodes in the main chain.
%% The function assumes that a node is in the main chain if
there is only one node at that height , and the height is lower
%% than the current top.
get_hash_at_height(Height, State) when is_integer(Height), Height >= 0 ->
case get_top_block_hash(State) of
undefined -> error;
Hash ->
TopNode = db_get_node(Hash),
TopHeight = node_height(TopNode),
case Height > TopHeight of
true -> error;
false ->
case db_find_nodes_at_height(Height) of
error -> error({broken_chain, Height});
{ok, [Node]} -> {ok, hash(Node)};
{ok, [_|_] = Nodes} ->
first_hash_in_main_chain(Nodes, Hash)
end
end
end.
first_hash_in_main_chain([Node|Left], TopHash) ->
case hash_is_in_main_chain(hash(Node), TopHash) of
true -> {ok, hash(Node)};
false -> first_hash_in_main_chain(Left, TopHash)
end;
first_hash_in_main_chain([],_TopHash) ->
error.
hash_is_in_main_chain(Hash, TopHash) ->
case find_fork_point(Hash, TopHash) of
{ok, Hash} -> true;
{ok, _} -> false;
error -> false
end.
%%%-------------------------------------------------------------------
%%% Chain operations
%%%-------------------------------------------------------------------
internal_insert(Node, Block) ->
case db_find_node(hash(Node)) of
error ->
%% To preserve the invariants of the chain,
%% Only add the block if we can do the whole
%% transitive operation (i.e., calculate all the state
%% trees, and update the pointers)
Fun = fun() ->
State = new_state_from_persistence(),
%% Keep track of which node we are actually
%% adding to avoid giving spurious error
%% messages.
State1 = State#{ currently_adding => hash(Node)},
assert_not_new_genesis(Node, State1),
ok = db_put_node(Block, hash(Node)),
State2 = update_state_tree(Node, maybe_add_genesis_hash(State1, Node)),
persist_state(State2),
ok
end,
try dsdc_db:ensure_transaction(Fun)
catch exit:{aborted, {throw, ?internal_error(What)}} -> internal_error(What)
end;
{ok, Node} -> ok;
{ok, Old} -> internal_error({same_key_different_content, Node, Old})
end.
assert_previous_height(Node) ->
case db_find_node(prev_hash(Node)) of
{ok, PrevNode} ->
case node_height(PrevNode) =:= (node_height(Node) - 1) of
true -> ok;
false -> internal_error(height_inconsistent_with_previous_hash)
end;
error -> ok
end.
To assert the target calculation we need DeltaHeight headers counted
backwards from the node we want to assert . If < = DeltaHeight
%% we will need all headers back to genesis.
assert_calculated_target(Node) ->
case db_find_node(prev_hash(Node)) of
error -> ok;
{ok, PrevNode} ->
case node_height(Node) of
0 -> ok;
Height ->
Delta = dsdc_governance:blocks_to_check_difficulty_count(),
assert_calculated_target(Node, PrevNode, Delta, Height)
end
end.
assert_calculated_target(Node, PrevNode, Delta, Height) when Delta >= Height ->
%% We only need to verify that the target is equal to its predecessor.
case {node_difficulty(Node), node_difficulty(PrevNode)} of
{X, X} -> ok;
{X, Y} -> internal_error({target_not_equal_to_parent, Node, X, Y})
end;
assert_calculated_target(Node, PrevNode, Delta, Height) when Delta < Height ->
case get_n_headers_from(PrevNode, Delta) of
{error, chain_too_short} ->
ok;
{ok, Headers} ->
Header = export_header(Node),
case dsdc_target:verify(Header, Headers) of
ok -> ok;
{error, {wrong_target, Actual, Expected}} ->
internal_error({wrong_target, Node, Actual, Expected})
end
end.
get_n_headers_from(Node, N) ->
get_n_headers_from(Node, N-1, []).
get_n_headers_from(Node, 0, Acc) ->
{ok, lists:reverse([export_header(Node) | Acc])};
get_n_headers_from(Node, N, Acc) ->
case db_find_node(prev_hash(Node)) of
{ok, PrevNode} ->
get_n_headers_from(PrevNode, N-1, [export_header(Node) | Acc]);
error ->
{error, chain_too_short}
end.
Transitively compute new state trees iff
%% - We can find the state trees of the previous node; and
%% - The new node is a block.
%%
%% This should be called on the newly added node.
%% It will fail if called on a node that already has its state computed.
update_state_tree(Node, State) ->
case get_state_trees_in(Node, State) of
error -> State;
{ok, Trees, Difficulty, ForkIdIn} ->
ForkId = case node_is_genesis(Node, State) of
true -> ForkIdIn;
false ->
case db_node_has_sibling_blocks(Node) of
true -> hash(Node);
false -> ForkIdIn
end
end,
{State1, NewTopDifficulty} =
update_state_tree(Node, Trees, Difficulty, ForkId, State),
OldTopHash = get_top_block_hash(State),
handle_top_block_change(OldTopHash, NewTopDifficulty, State1)
end.
update_state_tree(Node, TreesIn, Difficulty, ForkId, State) ->
case db_find_state(hash(Node)) of
{ok,_Trees,_DifficultyOut,_ForkId} ->
error({found_already_calculated_state, hash(Node)});
error ->
case apply_and_store_state_trees(Node, TreesIn, Difficulty,
ForkId, State) of
{ok, Trees, DifficultyOut} ->
update_next_state_tree(Node, Trees, DifficultyOut, ForkId, State);
error ->
{State, Difficulty}
end
end.
update_next_state_tree(Node, Trees, Difficulty, ForkId, State) ->
Hash = hash(Node),
State1 = set_top_block_hash(Hash, State),
case db_children(Node) of
[] -> {State1, Difficulty};
[Child|Left] ->
If there is only one child , it inherits the fork i d.
For more than one child , we neeed new fork_ids , which are
the first node hash of each new fork .
Children = [{Child, ForkId}|[{C, hash(C)}|| C <- Left]],
update_next_state_tree_children(Children, Trees, Difficulty,
Difficulty, State1)
end.
update_next_state_tree_children([],_Trees,_Difficulty, Max, State) ->
{State, Max};
update_next_state_tree_children([{Child, ForkId}|Left], Trees, Difficulty, Max, State) ->
{State1, Max1} = update_state_tree(Child, Trees, Difficulty, ForkId, State),
case Max1 > Max of
true ->
update_next_state_tree_children(Left, Trees, Difficulty, Max1, State1);
false ->
State2 = set_top_block_hash(get_top_block_hash(State), State1),
update_next_state_tree_children(Left, Trees, Difficulty, Max, State2)
end.
get_state_trees_in(Node, State) ->
case node_is_genesis(Node, State) of
true ->
{ok,
dsdc_block_genesis:populated_trees(),
dsdc_block_genesis:genesis_difficulty(),
hash(Node)};
false -> db_find_state(prev_hash(Node))
end.
apply_and_store_state_trees(Node, TreesIn, DifficultyIn, ForkId,
#{currently_adding := Hash}) ->
NodeHash = hash(Node),
try
assert_previous_height(Node),
Trees = apply_node_transactions(Node, TreesIn),
assert_state_hash_valid(Trees, Node),
assert_calculated_target(Node),
Difficulty = DifficultyIn + node_difficulty(Node),
ok = db_put_state(hash(Node), Trees, Difficulty, ForkId),
{ok, Trees, Difficulty}
catch
%% Only catch this if the current node is NOT the one added in
%% the call. We don't want to give an error message for any
%% other node that that. But we want to make progress in the
%% chain state even if a successor or predecessor to the
%% currently added node is faulty.
throw:?internal_error(_) when NodeHash =/= Hash -> error
end.
handle_top_block_change(OldTopHash, NewTopDifficulty, State) ->
case get_top_block_hash(State) of
OldTopHash -> State;
NewTopHash when OldTopHash =:= undefined ->
update_main_chain(get_genesis_hash(State), NewTopHash, State);
NewTopHash ->
{ok, OldTopDifficulty} = db_find_difficulty(OldTopHash),
case OldTopDifficulty >= NewTopDifficulty of
true -> set_top_block_hash(OldTopHash, State); %% Reset
false -> update_main_chain(OldTopHash, NewTopHash, State)
end
end.
update_main_chain(undefined, NewTopHash, State) ->
add_locations(NewTopHash, get_genesis_hash(State)),
State;
update_main_chain(OldTopHash, NewTopHash, State) ->
case find_fork_point(OldTopHash, NewTopHash) of
{ok, OldTopHash} ->
add_locations(OldTopHash, NewTopHash),
State;
{ok, ForkHash} ->
remove_locations(ForkHash, OldTopHash),
add_locations(ForkHash, NewTopHash),
State
end.
remove_locations(Hash, Hash) ->
ok;
remove_locations(StopHash, CurrentHash) ->
lists:foreach(fun(TxHash) ->
dsdc_db:remove_tx_location(TxHash),
dsdc_db:add_tx_hash_to_mempool(TxHash)
end, db_get_tx_hashes(CurrentHash)),
remove_locations(StopHash, db_get_prev_hash(CurrentHash)).
add_locations(Hash, Hash) ->
ok;
add_locations(StopHash, CurrentHash) ->
lists:foreach(fun(TxHash) ->
dsdc_db:add_tx_location(TxHash, CurrentHash),
dsdc_db:remove_tx_from_mempool(TxHash)
end, db_get_tx_hashes(CurrentHash)),
add_locations(StopHash, db_get_prev_hash(CurrentHash)).
assert_state_hash_valid(Trees, Node) ->
RootHash = dsdc_trees:hash(Trees),
Expected = node_root_hash(Node),
case RootHash =:= Expected of
true -> ok;
false ->
internal_error({root_hash_mismatch, RootHash, Expected})
end.
apply_node_transactions(Node, Trees) ->
Txs = db_get_txs(hash(Node)),
Height = node_height(Node),
Version = node_version(Node),
Miner = node_miner(Node),
case dsdc_block_candidate:apply_block_txs_strict(Txs, Miner, Trees, Height, Version) of
{ok, _, NewTrees} -> NewTrees;
{error,_What} -> internal_error(invalid_transactions_in_block)
end.
find_fork_point(Hash1, Hash2) ->
find_fork_point(Hash1, db_find_fork_id(Hash1), Hash2, db_find_fork_id(Hash2)).
find_fork_point(Hash1, {ok, FHash}, Hash2, {ok, FHash}) ->
Height1 = node_height(db_get_node(Hash1)),
Height2 = node_height(db_get_node(Hash2)),
case Height1 >= Height2 of
true -> {ok, Hash2};
false -> {ok, Hash1}
end;
find_fork_point(Hash1, {ok, FHash1}, Hash2, {ok, FHash2}) ->
Height1 = node_height(db_get_node(FHash1)),
Height2 = node_height(db_get_node(FHash2)),
if
Height1 > Height2 ->
PrevHash = db_get_prev_hash(FHash1),
PrevRes = db_find_fork_id(PrevHash),
find_fork_point(PrevHash, PrevRes, Hash2, {ok, FHash2});
Height2 >= Height1 ->
PrevHash = db_get_prev_hash(FHash2),
PrevRes = db_find_fork_id(PrevHash),
find_fork_point(Hash1, {ok, FHash1}, PrevHash, PrevRes)
end;
find_fork_point(_Hash1, _Res1,_Hash2,_Res2) ->
error.
%%%-------------------------------------------------------------------
Internal interface for the db
%%%-------------------------------------------------------------------
db_put_node(#block{} = Block, Hash) when is_binary(Hash) ->
ok = dsdc_db:write_block(Block).
db_find_node(Hash) when is_binary(Hash) ->
case dsdc_db:find_header(Hash) of
{value, Header} -> {ok, wrap_header(Header)};
none -> error
end.
db_get_node(Hash) when is_binary(Hash) ->
{ok, Node} = db_find_node(Hash),
Node.
db_find_nodes_at_height(Height) when is_integer(Height) ->
case dsdc_db:find_headers_at_height(Height) of
[_|_] = Headers ->
{ok, lists:map(fun(Header) -> wrap_header(Header) end, Headers)};
[] -> error
end.
db_put_state(Hash, Trees, Difficulty, ForkId) when is_binary(Hash) ->
Trees1 = dsdc_trees:commit_to_db(Trees),
ok = dsdc_db:write_block_state(Hash, Trees1, Difficulty, ForkId).
db_find_state(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_state_and_data(Hash) of
{value, Trees, Difficulty, ForkId} -> {ok, Trees, Difficulty, ForkId};
none -> error
end.
db_find_difficulty(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_difficulty(Hash) of
{value, Difficulty} -> {ok, Difficulty};
none -> error
end.
db_find_fork_id(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_fork_id(Hash) of
{value, ForkId} -> {ok, ForkId};
none -> error
end.
db_get_txs(Hash) when is_binary(Hash) ->
dsdc_blocks:txs(dsdc_db:get_block(Hash)).
db_get_tx_hashes(Hash) when is_binary(Hash) ->
dsdc_db:get_block_tx_hashes(Hash).
db_get_prev_hash(Hash) when is_binary(Hash) ->
{value, PrevHash} = db_find_prev_hash(Hash),
PrevHash.
db_find_prev_hash(Hash) when is_binary(Hash) ->
case db_find_node(Hash) of
{ok, Node} -> {value, prev_hash(Node)};
error -> none
end.
db_children(#node{} = Node) ->
Height = node_height(Node),
Hash = hash(Node),
[wrap_header(Header)
|| Header <- dsdc_db:find_headers_at_height(Height + 1),
dsdc_headers:prev_hash(Header) =:= Hash].
db_node_has_sibling_blocks(Node) ->
Height = node_height(Node),
PrevHash = prev_hash(Node),
length([1 || Header <- dsdc_db:find_headers_at_height(Height),
dsdc_headers:prev_hash(Header) =:= PrevHash]) > 1.
| null | https://raw.githubusercontent.com/Dasudian/DSDIN/b27a437d8deecae68613604fffcbb9804a6f1729/apps/dsdcore/src/dsdc_chain_state.erl | erlang | For tests
===================================================================
API
===================================================================
===================================================================
===================================================================
-------------------------------------------------------------------
-------------------------------------------------------------------
NOTE: Only return nodes in the main chain.
The function assumes that a node is in the main chain if
than the current top.
-------------------------------------------------------------------
Chain operations
-------------------------------------------------------------------
To preserve the invariants of the chain,
Only add the block if we can do the whole
transitive operation (i.e., calculate all the state
trees, and update the pointers)
Keep track of which node we are actually
adding to avoid giving spurious error
messages.
we will need all headers back to genesis.
We only need to verify that the target is equal to its predecessor.
- We can find the state trees of the previous node; and
- The new node is a block.
This should be called on the newly added node.
It will fail if called on a node that already has its state computed.
Only catch this if the current node is NOT the one added in
the call. We don't want to give an error message for any
other node that that. But we want to make progress in the
chain state even if a successor or predecessor to the
currently added node is faulty.
Reset
-------------------------------------------------------------------
------------------------------------------------------------------- |
-module(dsdc_chain_state).
-export([ find_common_ancestor/2
, get_hash_at_height/1
, hash_is_connected_to_genesis/1
, hash_is_in_main_chain/1
, insert_block/1
]).
-export([ get_top_block_hash/1
, get_hash_at_height/2
]).
-include("blocks.hrl").
-define(internal_error(____E____), {dsdc_chain_state_error, ____E____}).
-spec get_hash_at_height(dsdc_blocks:height()) -> {'ok', binary()} | 'error'.
get_hash_at_height(Height) when is_integer(Height), Height >= 0 ->
get_hash_at_height(Height, new_state_from_persistence()).
-spec insert_block(#block{}) -> 'ok' | {'error', any()}.
insert_block(Block) ->
Node = wrap_block(Block),
try internal_insert(Node, Block)
catch throw:?internal_error(What) -> {error, What}
end.
-spec hash_is_connected_to_genesis(binary()) -> boolean().
hash_is_connected_to_genesis(Hash) ->
case db_find_fork_id(Hash) of
{ok,_ForkId} -> true;
error -> false
end.
-spec find_common_ancestor(binary(), binary()) ->
{'ok', binary()} | {error, atom()}.
find_common_ancestor(Hash1, Hash2) ->
case {db_find_node(Hash1), db_find_node(Hash2)} of
{{ok,_Node1}, {ok,_Node2}} ->
case find_fork_point(Hash1, Hash2) of
error -> {error, not_found};
{ok, ForkHash} -> {ok, ForkHash}
end;
_ -> {error, unknown_hash}
end.
-spec hash_is_in_main_chain(binary()) -> boolean().
hash_is_in_main_chain(Hash) ->
case db_find_node(Hash) of
{ok,_Node} ->
State = new_state_from_persistence(),
case get_top_block_hash(State) of
undefined -> false;
TopHash -> hash_is_in_main_chain(Hash, TopHash)
end;
error -> false
end.
Internal functions
new_state_from_persistence() ->
Fun = fun() ->
#{ type => ?MODULE
, top_block_hash => dsdc_db:get_top_block_hash()
, genesis_block_hash => dsdc_db:get_genesis_hash()
}
end,
dsdc_db:ensure_transaction(Fun).
persist_state(State) ->
case get_genesis_hash(State) of
undefined -> ok;
GenesisHash ->
dsdc_db:write_genesis_hash(GenesisHash),
case get_top_block_hash(State) of
undefined -> ok;
TopBlockHash ->
dsdc_db:write_top_block_hash(TopBlockHash)
end
end.
-spec internal_error(_) -> no_return().
internal_error(What) ->
throw(?internal_error(What)).
get_genesis_hash(#{genesis_block_hash := GH}) -> GH.
get_top_block_hash(#{top_block_hash := H}) -> H.
set_top_block_hash(H, State) when is_binary(H) -> State#{top_block_hash => H}.
Internal ADT for differing between blocks and headers
-record(node, { header :: #header{}
, hash :: binary()
}).
hash(#node{hash = Hash}) -> Hash.
prev_hash(#node{header = H}) -> dsdc_headers:prev_hash(H).
node_height(#node{header = H}) -> dsdc_headers:height(H).
node_version(#node{header = H}) -> dsdc_headers:version(H).
node_difficulty(#node{header = H}) -> dsdc_headers:difficulty(H).
node_root_hash(#node{header = H}) -> dsdc_headers:root_hash(H).
node_miner(#node{header = H}) -> dsdc_headers:miner(H).
maybe_add_genesis_hash(#{genesis_block_hash := undefined} = State, Node) ->
case node_height(Node) =:= dsdc_block_genesis:height() of
true -> State#{genesis_block_hash => hash(Node)};
false -> State
end;
maybe_add_genesis_hash(State,_Node) ->
State.
assert_not_new_genesis(_Node, #{genesis_block_hash := undefined}) -> ok;
assert_not_new_genesis(Node, #{genesis_block_hash := GHash}) ->
case (node_height(Node) =:= dsdc_block_genesis:height()
andalso (hash(Node) =/= GHash)) of
true -> internal_error(rejecting_new_genesis_block);
false -> ok
end.
this is when we insert the genesis block the first time
node_is_genesis(Node, #{genesis_block_hash := undefined}) ->
node_height(Node) =:= dsdc_block_genesis:height();
node_is_genesis(Node, State) ->
hash(Node) =:= get_genesis_hash(State).
wrap_block(Block) ->
Header = dsdc_blocks:to_header(Block),
{ok, Hash} = dsdc_headers:hash_header(Header),
#node{ header = Header
, hash = Hash
}.
wrap_header(Header) ->
{ok, Hash} = dsdc_headers:hash_header(Header),
#node{ header = Header
, hash = Hash
}.
export_header(#node{header = Header}) ->
Header.
there is only one node at that height , and the height is lower
get_hash_at_height(Height, State) when is_integer(Height), Height >= 0 ->
case get_top_block_hash(State) of
undefined -> error;
Hash ->
TopNode = db_get_node(Hash),
TopHeight = node_height(TopNode),
case Height > TopHeight of
true -> error;
false ->
case db_find_nodes_at_height(Height) of
error -> error({broken_chain, Height});
{ok, [Node]} -> {ok, hash(Node)};
{ok, [_|_] = Nodes} ->
first_hash_in_main_chain(Nodes, Hash)
end
end
end.
first_hash_in_main_chain([Node|Left], TopHash) ->
case hash_is_in_main_chain(hash(Node), TopHash) of
true -> {ok, hash(Node)};
false -> first_hash_in_main_chain(Left, TopHash)
end;
first_hash_in_main_chain([],_TopHash) ->
error.
hash_is_in_main_chain(Hash, TopHash) ->
case find_fork_point(Hash, TopHash) of
{ok, Hash} -> true;
{ok, _} -> false;
error -> false
end.
internal_insert(Node, Block) ->
case db_find_node(hash(Node)) of
error ->
Fun = fun() ->
State = new_state_from_persistence(),
State1 = State#{ currently_adding => hash(Node)},
assert_not_new_genesis(Node, State1),
ok = db_put_node(Block, hash(Node)),
State2 = update_state_tree(Node, maybe_add_genesis_hash(State1, Node)),
persist_state(State2),
ok
end,
try dsdc_db:ensure_transaction(Fun)
catch exit:{aborted, {throw, ?internal_error(What)}} -> internal_error(What)
end;
{ok, Node} -> ok;
{ok, Old} -> internal_error({same_key_different_content, Node, Old})
end.
assert_previous_height(Node) ->
case db_find_node(prev_hash(Node)) of
{ok, PrevNode} ->
case node_height(PrevNode) =:= (node_height(Node) - 1) of
true -> ok;
false -> internal_error(height_inconsistent_with_previous_hash)
end;
error -> ok
end.
To assert the target calculation we need DeltaHeight headers counted
backwards from the node we want to assert . If < = DeltaHeight
assert_calculated_target(Node) ->
case db_find_node(prev_hash(Node)) of
error -> ok;
{ok, PrevNode} ->
case node_height(Node) of
0 -> ok;
Height ->
Delta = dsdc_governance:blocks_to_check_difficulty_count(),
assert_calculated_target(Node, PrevNode, Delta, Height)
end
end.
assert_calculated_target(Node, PrevNode, Delta, Height) when Delta >= Height ->
case {node_difficulty(Node), node_difficulty(PrevNode)} of
{X, X} -> ok;
{X, Y} -> internal_error({target_not_equal_to_parent, Node, X, Y})
end;
assert_calculated_target(Node, PrevNode, Delta, Height) when Delta < Height ->
case get_n_headers_from(PrevNode, Delta) of
{error, chain_too_short} ->
ok;
{ok, Headers} ->
Header = export_header(Node),
case dsdc_target:verify(Header, Headers) of
ok -> ok;
{error, {wrong_target, Actual, Expected}} ->
internal_error({wrong_target, Node, Actual, Expected})
end
end.
get_n_headers_from(Node, N) ->
get_n_headers_from(Node, N-1, []).
get_n_headers_from(Node, 0, Acc) ->
{ok, lists:reverse([export_header(Node) | Acc])};
get_n_headers_from(Node, N, Acc) ->
case db_find_node(prev_hash(Node)) of
{ok, PrevNode} ->
get_n_headers_from(PrevNode, N-1, [export_header(Node) | Acc]);
error ->
{error, chain_too_short}
end.
Transitively compute new state trees iff
update_state_tree(Node, State) ->
case get_state_trees_in(Node, State) of
error -> State;
{ok, Trees, Difficulty, ForkIdIn} ->
ForkId = case node_is_genesis(Node, State) of
true -> ForkIdIn;
false ->
case db_node_has_sibling_blocks(Node) of
true -> hash(Node);
false -> ForkIdIn
end
end,
{State1, NewTopDifficulty} =
update_state_tree(Node, Trees, Difficulty, ForkId, State),
OldTopHash = get_top_block_hash(State),
handle_top_block_change(OldTopHash, NewTopDifficulty, State1)
end.
update_state_tree(Node, TreesIn, Difficulty, ForkId, State) ->
case db_find_state(hash(Node)) of
{ok,_Trees,_DifficultyOut,_ForkId} ->
error({found_already_calculated_state, hash(Node)});
error ->
case apply_and_store_state_trees(Node, TreesIn, Difficulty,
ForkId, State) of
{ok, Trees, DifficultyOut} ->
update_next_state_tree(Node, Trees, DifficultyOut, ForkId, State);
error ->
{State, Difficulty}
end
end.
update_next_state_tree(Node, Trees, Difficulty, ForkId, State) ->
Hash = hash(Node),
State1 = set_top_block_hash(Hash, State),
case db_children(Node) of
[] -> {State1, Difficulty};
[Child|Left] ->
If there is only one child , it inherits the fork i d.
For more than one child , we neeed new fork_ids , which are
the first node hash of each new fork .
Children = [{Child, ForkId}|[{C, hash(C)}|| C <- Left]],
update_next_state_tree_children(Children, Trees, Difficulty,
Difficulty, State1)
end.
update_next_state_tree_children([],_Trees,_Difficulty, Max, State) ->
{State, Max};
update_next_state_tree_children([{Child, ForkId}|Left], Trees, Difficulty, Max, State) ->
{State1, Max1} = update_state_tree(Child, Trees, Difficulty, ForkId, State),
case Max1 > Max of
true ->
update_next_state_tree_children(Left, Trees, Difficulty, Max1, State1);
false ->
State2 = set_top_block_hash(get_top_block_hash(State), State1),
update_next_state_tree_children(Left, Trees, Difficulty, Max, State2)
end.
get_state_trees_in(Node, State) ->
case node_is_genesis(Node, State) of
true ->
{ok,
dsdc_block_genesis:populated_trees(),
dsdc_block_genesis:genesis_difficulty(),
hash(Node)};
false -> db_find_state(prev_hash(Node))
end.
apply_and_store_state_trees(Node, TreesIn, DifficultyIn, ForkId,
#{currently_adding := Hash}) ->
NodeHash = hash(Node),
try
assert_previous_height(Node),
Trees = apply_node_transactions(Node, TreesIn),
assert_state_hash_valid(Trees, Node),
assert_calculated_target(Node),
Difficulty = DifficultyIn + node_difficulty(Node),
ok = db_put_state(hash(Node), Trees, Difficulty, ForkId),
{ok, Trees, Difficulty}
catch
throw:?internal_error(_) when NodeHash =/= Hash -> error
end.
handle_top_block_change(OldTopHash, NewTopDifficulty, State) ->
case get_top_block_hash(State) of
OldTopHash -> State;
NewTopHash when OldTopHash =:= undefined ->
update_main_chain(get_genesis_hash(State), NewTopHash, State);
NewTopHash ->
{ok, OldTopDifficulty} = db_find_difficulty(OldTopHash),
case OldTopDifficulty >= NewTopDifficulty of
false -> update_main_chain(OldTopHash, NewTopHash, State)
end
end.
update_main_chain(undefined, NewTopHash, State) ->
add_locations(NewTopHash, get_genesis_hash(State)),
State;
update_main_chain(OldTopHash, NewTopHash, State) ->
case find_fork_point(OldTopHash, NewTopHash) of
{ok, OldTopHash} ->
add_locations(OldTopHash, NewTopHash),
State;
{ok, ForkHash} ->
remove_locations(ForkHash, OldTopHash),
add_locations(ForkHash, NewTopHash),
State
end.
remove_locations(Hash, Hash) ->
ok;
remove_locations(StopHash, CurrentHash) ->
lists:foreach(fun(TxHash) ->
dsdc_db:remove_tx_location(TxHash),
dsdc_db:add_tx_hash_to_mempool(TxHash)
end, db_get_tx_hashes(CurrentHash)),
remove_locations(StopHash, db_get_prev_hash(CurrentHash)).
add_locations(Hash, Hash) ->
ok;
add_locations(StopHash, CurrentHash) ->
lists:foreach(fun(TxHash) ->
dsdc_db:add_tx_location(TxHash, CurrentHash),
dsdc_db:remove_tx_from_mempool(TxHash)
end, db_get_tx_hashes(CurrentHash)),
add_locations(StopHash, db_get_prev_hash(CurrentHash)).
assert_state_hash_valid(Trees, Node) ->
RootHash = dsdc_trees:hash(Trees),
Expected = node_root_hash(Node),
case RootHash =:= Expected of
true -> ok;
false ->
internal_error({root_hash_mismatch, RootHash, Expected})
end.
apply_node_transactions(Node, Trees) ->
Txs = db_get_txs(hash(Node)),
Height = node_height(Node),
Version = node_version(Node),
Miner = node_miner(Node),
case dsdc_block_candidate:apply_block_txs_strict(Txs, Miner, Trees, Height, Version) of
{ok, _, NewTrees} -> NewTrees;
{error,_What} -> internal_error(invalid_transactions_in_block)
end.
find_fork_point(Hash1, Hash2) ->
find_fork_point(Hash1, db_find_fork_id(Hash1), Hash2, db_find_fork_id(Hash2)).
find_fork_point(Hash1, {ok, FHash}, Hash2, {ok, FHash}) ->
Height1 = node_height(db_get_node(Hash1)),
Height2 = node_height(db_get_node(Hash2)),
case Height1 >= Height2 of
true -> {ok, Hash2};
false -> {ok, Hash1}
end;
find_fork_point(Hash1, {ok, FHash1}, Hash2, {ok, FHash2}) ->
Height1 = node_height(db_get_node(FHash1)),
Height2 = node_height(db_get_node(FHash2)),
if
Height1 > Height2 ->
PrevHash = db_get_prev_hash(FHash1),
PrevRes = db_find_fork_id(PrevHash),
find_fork_point(PrevHash, PrevRes, Hash2, {ok, FHash2});
Height2 >= Height1 ->
PrevHash = db_get_prev_hash(FHash2),
PrevRes = db_find_fork_id(PrevHash),
find_fork_point(Hash1, {ok, FHash1}, PrevHash, PrevRes)
end;
find_fork_point(_Hash1, _Res1,_Hash2,_Res2) ->
error.
Internal interface for the db
db_put_node(#block{} = Block, Hash) when is_binary(Hash) ->
ok = dsdc_db:write_block(Block).
db_find_node(Hash) when is_binary(Hash) ->
case dsdc_db:find_header(Hash) of
{value, Header} -> {ok, wrap_header(Header)};
none -> error
end.
db_get_node(Hash) when is_binary(Hash) ->
{ok, Node} = db_find_node(Hash),
Node.
db_find_nodes_at_height(Height) when is_integer(Height) ->
case dsdc_db:find_headers_at_height(Height) of
[_|_] = Headers ->
{ok, lists:map(fun(Header) -> wrap_header(Header) end, Headers)};
[] -> error
end.
db_put_state(Hash, Trees, Difficulty, ForkId) when is_binary(Hash) ->
Trees1 = dsdc_trees:commit_to_db(Trees),
ok = dsdc_db:write_block_state(Hash, Trees1, Difficulty, ForkId).
db_find_state(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_state_and_data(Hash) of
{value, Trees, Difficulty, ForkId} -> {ok, Trees, Difficulty, ForkId};
none -> error
end.
db_find_difficulty(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_difficulty(Hash) of
{value, Difficulty} -> {ok, Difficulty};
none -> error
end.
db_find_fork_id(Hash) when is_binary(Hash) ->
case dsdc_db:find_block_fork_id(Hash) of
{value, ForkId} -> {ok, ForkId};
none -> error
end.
db_get_txs(Hash) when is_binary(Hash) ->
dsdc_blocks:txs(dsdc_db:get_block(Hash)).
db_get_tx_hashes(Hash) when is_binary(Hash) ->
dsdc_db:get_block_tx_hashes(Hash).
db_get_prev_hash(Hash) when is_binary(Hash) ->
{value, PrevHash} = db_find_prev_hash(Hash),
PrevHash.
db_find_prev_hash(Hash) when is_binary(Hash) ->
case db_find_node(Hash) of
{ok, Node} -> {value, prev_hash(Node)};
error -> none
end.
db_children(#node{} = Node) ->
Height = node_height(Node),
Hash = hash(Node),
[wrap_header(Header)
|| Header <- dsdc_db:find_headers_at_height(Height + 1),
dsdc_headers:prev_hash(Header) =:= Hash].
db_node_has_sibling_blocks(Node) ->
Height = node_height(Node),
PrevHash = prev_hash(Node),
length([1 || Header <- dsdc_db:find_headers_at_height(Height),
dsdc_headers:prev_hash(Header) =:= PrevHash]) > 1.
|
1ebb079e4722633afca81e2ba64c058fa994119296729ec1d3b157696f9598ec | DomainDrivenArchitecture/dda-cloudspec | cloudspec_test_runner.cljs | Copyright 2014 - 2018 meissa . All Rights Reserved .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS - IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns dda.cloudspec-test-runner
(:require [doo.runner :refer-macros [doo-tests]]
[pjstadig.humane-test-output]
[dda.template-test]))
(doo-tests 'dda.template-test)
| null | https://raw.githubusercontent.com/DomainDrivenArchitecture/dda-cloudspec/8197de8b5eb528fce14d71ec83298a38d282ab90/test/cljs/dda/cloudspec_test_runner.cljs | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2014 - 2018 meissa . All Rights Reserved .
distributed under the License is distributed on an " AS - IS " BASIS ,
(ns dda.cloudspec-test-runner
(:require [doo.runner :refer-macros [doo-tests]]
[pjstadig.humane-test-output]
[dda.template-test]))
(doo-tests 'dda.template-test)
|
973f52d0052a2503a4d73c501b3f9b065ef8387910bf8b5839e53e876db7dc9d | cgrand/parsnip | asm.clj | (ns parsnip.asm)
(defn- map-targets-drop-labels [f pgm]
(vec (mapcat (fn [[op x]]
(case op
(:CALL :JUMP :FORK) [op (f x)]
:LABEL nil
[op x])) (partition 2 pgm))))
(defn link [pgm]
(let [labels (reduce (fn [labels pc]
(let [label (nth pgm (inc pc))
pc (- pc (* 2 (count labels)))]
(when-some [pc' (labels label)]
(throw (ex-info "Label used twice." {:label label :pcs [pc' pc]})))
(assoc labels label pc)))
{}
(filter #(= :LABEL (nth pgm %)) (range 0 (count pgm) 2)))]
(map-targets-drop-labels #(or (labels %) (throw (ex-info "Label not found." {:label (labels %)}))) pgm)))
(defn unlink [pgm]
(let [labels (into (sorted-map) (keep (fn [[op arg]] (case op (:FORK :JUMP :CALL) [arg (gensym :label_)] nil)) (partition 2 pgm)))
slice (fn [from to]
(map-targets-drop-labels labels (subvec pgm from to)))]
(reduce (fn [unlinked-pgm [[from label] [to]]]
(-> unlinked-pgm
(conj :LABEL label)
(into (slice from to))))
(slice 0 (first (keys labels)))
(partition 2 1 [[(count pgm)]] labels)))) | null | https://raw.githubusercontent.com/cgrand/parsnip/48ab030f2645b47d77f5f22d3a0c4d5dc4b3e688/src/parsnip/asm.clj | clojure | (ns parsnip.asm)
(defn- map-targets-drop-labels [f pgm]
(vec (mapcat (fn [[op x]]
(case op
(:CALL :JUMP :FORK) [op (f x)]
:LABEL nil
[op x])) (partition 2 pgm))))
(defn link [pgm]
(let [labels (reduce (fn [labels pc]
(let [label (nth pgm (inc pc))
pc (- pc (* 2 (count labels)))]
(when-some [pc' (labels label)]
(throw (ex-info "Label used twice." {:label label :pcs [pc' pc]})))
(assoc labels label pc)))
{}
(filter #(= :LABEL (nth pgm %)) (range 0 (count pgm) 2)))]
(map-targets-drop-labels #(or (labels %) (throw (ex-info "Label not found." {:label (labels %)}))) pgm)))
(defn unlink [pgm]
(let [labels (into (sorted-map) (keep (fn [[op arg]] (case op (:FORK :JUMP :CALL) [arg (gensym :label_)] nil)) (partition 2 pgm)))
slice (fn [from to]
(map-targets-drop-labels labels (subvec pgm from to)))]
(reduce (fn [unlinked-pgm [[from label] [to]]]
(-> unlinked-pgm
(conj :LABEL label)
(into (slice from to))))
(slice 0 (first (keys labels)))
(partition 2 1 [[(count pgm)]] labels)))) |
|
a53a3bdb766a86c7deb0aa2bfcc155981722a06300bd4bdd259d6a927c50c09e | maoe/influxdb-haskell | Ping.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
#if __GLASGOW_HASKELL__ >= 800
# OPTIONS_GHC -Wno - missing - signatures #
#else
# OPTIONS_GHC -fno - warn - missing - signatures #
#endif
module Database.InfluxDB.Ping
* interface
ping
* parameters
, PingParams
, pingParams
, server
, manager
, timeout
-- * Pong
, Pong
, roundtripTime
, influxdbVersion
) where
import Control.Exception
import Control.Lens
import Data.Time.Clock (NominalDiffTime)
import System.Clock
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Client as HC
import Database.InfluxDB.Types as Types
-- $setup
> > > import Database . InfluxDB.Ping
requests do not require authentication
-- | The full set of parameters for the ping API
--
-- Following lenses are available to access its fields:
--
-- * 'server'
-- * 'manager'
-- * 'timeout'
data PingParams = PingParams
{ pingServer :: !Server
, pingManager :: !(Either HC.ManagerSettings HC.Manager)
-- ^ HTTP connection manager
, pingTimeout :: !(Maybe NominalDiffTime)
-- ^ Timeout
}
| Smart constructor for ' PingParams '
--
-- Default parameters:
--
-- ['server'] 'defaultServer'
[ ' manager ' ] @'Left ' ' HC.defaultManagerSettings'@
-- ['timeout'] 'Nothing'
pingParams :: PingParams
pingParams = PingParams
{ pingServer = defaultServer
, pingManager = Left HC.defaultManagerSettings
, pingTimeout = Nothing
}
makeLensesWith
( lensRules
& generateSignatures .~ False
& lensField .~ lookingupNamer
[ ("pingServer", "_server")
, ("pingManager", "_manager")
, ("pingTimeout", "timeout")
]
)
''PingParams
-- |
-- >>> pingParams ^. server.host
-- "localhost"
instance HasServer PingParams where
server = _server
-- |
> > > let p = pingParams & manager .~ Left HC.defaultManagerSettings
instance HasManager PingParams where
manager = _manager
| The number of seconds to wait before returning a response
--
-- >>> pingParams ^. timeout
-- Nothing
> > > let p = pingParams & timeout ? ~ 1
timeout :: Lens' PingParams (Maybe NominalDiffTime)
pingRequest :: PingParams -> HC.Request
pingRequest PingParams {..} = HC.defaultRequest
{ HC.host = TE.encodeUtf8 _host
, HC.port = fromIntegral _port
, HC.secure = _ssl
, HC.method = "GET"
, HC.path = "/ping"
}
where
Server {..} = pingServer
-- | Response of a ping request
data Pong = Pong
{ _roundtripTime :: !TimeSpec
-- ^ Round-trip time of the ping
, _influxdbVersion :: !BS.ByteString
-- ^ Version string returned by InfluxDB
} deriving (Show, Eq, Ord)
makeLensesWith (lensRules & generateSignatures .~ False) ''Pong
-- | Round-trip time of the ping
roundtripTime :: Lens' Pong TimeSpec
-- | Version string returned by InfluxDB
influxdbVersion :: Lens' Pong BS.ByteString
-- | Send a ping to InfluxDB.
--
-- It may throw an 'InfluxException'.
ping :: PingParams -> IO Pong
ping params = do
manager' <- either HC.newManager return $ pingManager params
startTime <- getTimeMonotonic
HC.withResponse request manager' $ \response -> do
endTime <- getTimeMonotonic
case lookup "X-Influxdb-Version" (HC.responseHeaders response) of
Just version ->
return $! Pong (diffTimeSpec endTime startTime) version
Nothing ->
throwIO $ UnexpectedResponse
"The X-Influxdb-Version header was missing in the response."
request
""
`catch` (throwIO . HTTPException)
where
request = (pingRequest params)
{ HC.responseTimeout = case pingTimeout params of
Nothing -> HC.responseTimeoutNone
Just sec -> HC.responseTimeoutMicro $
round $ realToFrac sec / (10**(-6) :: Double)
}
getTimeMonotonic = getTime Monotonic
| null | https://raw.githubusercontent.com/maoe/influxdb-haskell/25c5d91e7d6e9643e0944df2896e1ad8d4c22b26/src/Database/InfluxDB/Ping.hs | haskell | # LANGUAGE OverloadedStrings #
* Pong
$setup
| The full set of parameters for the ping API
Following lenses are available to access its fields:
* 'server'
* 'manager'
* 'timeout'
^ HTTP connection manager
^ Timeout
Default parameters:
['server'] 'defaultServer'
['timeout'] 'Nothing'
|
>>> pingParams ^. server.host
"localhost"
|
>>> pingParams ^. timeout
Nothing
| Response of a ping request
^ Round-trip time of the ping
^ Version string returned by InfluxDB
| Round-trip time of the ping
| Version string returned by InfluxDB
| Send a ping to InfluxDB.
It may throw an 'InfluxException'. | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
#if __GLASGOW_HASKELL__ >= 800
# OPTIONS_GHC -Wno - missing - signatures #
#else
# OPTIONS_GHC -fno - warn - missing - signatures #
#endif
module Database.InfluxDB.Ping
* interface
ping
* parameters
, PingParams
, pingParams
, server
, manager
, timeout
, Pong
, roundtripTime
, influxdbVersion
) where
import Control.Exception
import Control.Lens
import Data.Time.Clock (NominalDiffTime)
import System.Clock
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Client as HC
import Database.InfluxDB.Types as Types
> > > import Database . InfluxDB.Ping
requests do not require authentication
data PingParams = PingParams
{ pingServer :: !Server
, pingManager :: !(Either HC.ManagerSettings HC.Manager)
, pingTimeout :: !(Maybe NominalDiffTime)
}
| Smart constructor for ' PingParams '
[ ' manager ' ] @'Left ' ' HC.defaultManagerSettings'@
pingParams :: PingParams
pingParams = PingParams
{ pingServer = defaultServer
, pingManager = Left HC.defaultManagerSettings
, pingTimeout = Nothing
}
makeLensesWith
( lensRules
& generateSignatures .~ False
& lensField .~ lookingupNamer
[ ("pingServer", "_server")
, ("pingManager", "_manager")
, ("pingTimeout", "timeout")
]
)
''PingParams
instance HasServer PingParams where
server = _server
> > > let p = pingParams & manager .~ Left HC.defaultManagerSettings
instance HasManager PingParams where
manager = _manager
| The number of seconds to wait before returning a response
> > > let p = pingParams & timeout ? ~ 1
timeout :: Lens' PingParams (Maybe NominalDiffTime)
pingRequest :: PingParams -> HC.Request
pingRequest PingParams {..} = HC.defaultRequest
{ HC.host = TE.encodeUtf8 _host
, HC.port = fromIntegral _port
, HC.secure = _ssl
, HC.method = "GET"
, HC.path = "/ping"
}
where
Server {..} = pingServer
data Pong = Pong
{ _roundtripTime :: !TimeSpec
, _influxdbVersion :: !BS.ByteString
} deriving (Show, Eq, Ord)
makeLensesWith (lensRules & generateSignatures .~ False) ''Pong
roundtripTime :: Lens' Pong TimeSpec
influxdbVersion :: Lens' Pong BS.ByteString
ping :: PingParams -> IO Pong
ping params = do
manager' <- either HC.newManager return $ pingManager params
startTime <- getTimeMonotonic
HC.withResponse request manager' $ \response -> do
endTime <- getTimeMonotonic
case lookup "X-Influxdb-Version" (HC.responseHeaders response) of
Just version ->
return $! Pong (diffTimeSpec endTime startTime) version
Nothing ->
throwIO $ UnexpectedResponse
"The X-Influxdb-Version header was missing in the response."
request
""
`catch` (throwIO . HTTPException)
where
request = (pingRequest params)
{ HC.responseTimeout = case pingTimeout params of
Nothing -> HC.responseTimeoutNone
Just sec -> HC.responseTimeoutMicro $
round $ realToFrac sec / (10**(-6) :: Double)
}
getTimeMonotonic = getTime Monotonic
|
648b1b2f271db8b50b242a698d8b47b519c8a600de3a331779791de21485b5a7 | emqx/emqx | emqx_delayed_api_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_delayed_api_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BASE_CONF, #{
<<"dealyed">> => <<"true">>,
<<"max_delayed_messages">> => <<"0">>
}).
-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1]).
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
ok = emqx_common_test_helpers:load_config(emqx_modules_schema, ?BASE_CONF, #{
raw_with_default => true
}),
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_modules]
),
emqx_delayed:load(),
Config.
end_per_suite(Config) ->
ok = emqx_delayed:unload(),
emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_modules]),
Config.
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(),
Config.
%%------------------------------------------------------------------------------
%% Test Cases
%%------------------------------------------------------------------------------
t_status(_Config) ->
Path = uri(["mqtt", "delayed"]),
{ok, 200, R1} = request(
put,
Path,
#{enable => false, max_delayed_messages => 10}
),
?assertMatch(#{enable := false, max_delayed_messages := 10}, decode_json(R1)),
{ok, 200, R2} = request(
put,
Path,
#{enable => true, max_delayed_messages => 12}
),
?assertMatch(#{enable := true, max_delayed_messages := 12}, decode_json(R2)),
?assertMatch(
{ok, 200, _},
request(
put,
Path,
#{enable => true}
)
),
?assertMatch(
{ok, 400, _},
request(
put,
Path,
#{enable => true, max_delayed_messages => -5}
)
),
{ok, 200, ConfJson} = request(get, Path),
ReturnConf = decode_json(ConfJson),
?assertMatch(#{enable := true, max_delayed_messages := 12}, ReturnConf).
t_messages(_) ->
clear_all_record(),
emqx_delayed:load(),
{ok, C1} = emqtt:start_link([{clean_start, true}]),
{ok, _} = emqtt:connect(C1),
timer:sleep(500),
Each = fun(I) ->
Topic = list_to_binary(io_lib:format("$delayed/~B/msgs", [I + 60])),
emqtt:publish(
C1,
Topic,
<<"">>,
[{qos, 0}, {retain, true}]
)
end,
lists:foreach(Each, lists:seq(1, 5)),
timer:sleep(1000),
Msgs = get_messages(5),
[First | _] = Msgs,
?assertMatch(
#{
delayed_interval := _,
delayed_remaining := _,
expected_at := _,
from_clientid := _,
from_username := _,
msgid := _,
node := _,
publish_at := _,
qos := _,
topic := <<"msgs">>
},
First
),
MsgId = maps:get(msgid, First),
{ok, 200, LookupMsg} = request(
get,
uri(["mqtt", "delayed", "messages", node(), MsgId])
),
?assertEqual(MsgId, maps:get(msgid, decode_json(LookupMsg))),
?assertMatch(
{ok, 404, _},
request(
get,
uri(["mqtt", "delayed", "messages", node(), emqx_guid:to_hexstr(emqx_guid:gen())])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", node(), "invalid_msg_id"])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", atom_to_list('[email protected]'), MsgId])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", "some_unknown_atom", MsgId])
)
),
?assertMatch(
{ok, 404, _},
request(
delete,
uri(["mqtt", "delayed", "messages", node(), emqx_guid:to_hexstr(emqx_guid:gen())])
)
),
?assertMatch(
{ok, 204, _},
request(
delete,
uri(["mqtt", "delayed", "messages", node(), MsgId])
)
),
_ = get_messages(4),
ok = emqtt:disconnect(C1).
t_large_payload(_) ->
clear_all_record(),
emqx_delayed:load(),
{ok, C1} = emqtt:start_link([{clean_start, true}]),
{ok, _} = emqtt:connect(C1),
timer:sleep(500),
Topic = <<"$delayed/123/msgs">>,
emqtt:publish(
C1,
Topic,
iolist_to_binary([<<"x">> || _ <- lists:seq(1, 5000)]),
[{qos, 0}, {retain, true}]
),
timer:sleep(1000),
[#{msgid := MsgId}] = get_messages(1),
{ok, 200, Msg} = request(
get,
uri(["mqtt", "delayed", "messages", node(), MsgId])
),
?assertMatch(
#{
payload := <<"PAYLOAD_TOO_LARGE">>,
topic := <<"msgs">>
},
decode_json(Msg)
).
%%--------------------------------------------------------------------
%% HTTP Request
%%--------------------------------------------------------------------
decode_json(Data) ->
BinJson = emqx_json:decode(Data, [return_maps]),
emqx_map_lib:unsafe_atom_key_map(BinJson).
clear_all_record() ->
ets:delete_all_objects(emqx_delayed).
get_messages(Len) ->
{ok, 200, MsgsJson} = request(get, uri(["mqtt", "delayed", "messages"])),
#{data := Msgs} = decode_json(MsgsJson),
MsgLen = erlang:length(Msgs),
?assertEqual(
Len,
MsgLen,
lists:flatten(
io_lib:format("message length is:~p~nWhere:~p~nHooks:~p~n", [
MsgLen, erlang:whereis(emqx_delayed), ets:tab2list(emqx_hooks)
])
)
),
Msgs.
| null | https://raw.githubusercontent.com/emqx/emqx/a26c05f4f6d332364aa4195818ee0d6d95dadbbe/apps/emqx_modules/test/emqx_delayed_api_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
------------------------------------------------------------------------------
Test Cases
------------------------------------------------------------------------------
--------------------------------------------------------------------
HTTP Request
-------------------------------------------------------------------- | Copyright ( c ) 2020 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_delayed_api_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BASE_CONF, #{
<<"dealyed">> => <<"true">>,
<<"max_delayed_messages">> => <<"0">>
}).
-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1]).
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
ok = emqx_common_test_helpers:load_config(emqx_modules_schema, ?BASE_CONF, #{
raw_with_default => true
}),
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_modules]
),
emqx_delayed:load(),
Config.
end_per_suite(Config) ->
ok = emqx_delayed:unload(),
emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_modules]),
Config.
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(),
Config.
t_status(_Config) ->
Path = uri(["mqtt", "delayed"]),
{ok, 200, R1} = request(
put,
Path,
#{enable => false, max_delayed_messages => 10}
),
?assertMatch(#{enable := false, max_delayed_messages := 10}, decode_json(R1)),
{ok, 200, R2} = request(
put,
Path,
#{enable => true, max_delayed_messages => 12}
),
?assertMatch(#{enable := true, max_delayed_messages := 12}, decode_json(R2)),
?assertMatch(
{ok, 200, _},
request(
put,
Path,
#{enable => true}
)
),
?assertMatch(
{ok, 400, _},
request(
put,
Path,
#{enable => true, max_delayed_messages => -5}
)
),
{ok, 200, ConfJson} = request(get, Path),
ReturnConf = decode_json(ConfJson),
?assertMatch(#{enable := true, max_delayed_messages := 12}, ReturnConf).
t_messages(_) ->
clear_all_record(),
emqx_delayed:load(),
{ok, C1} = emqtt:start_link([{clean_start, true}]),
{ok, _} = emqtt:connect(C1),
timer:sleep(500),
Each = fun(I) ->
Topic = list_to_binary(io_lib:format("$delayed/~B/msgs", [I + 60])),
emqtt:publish(
C1,
Topic,
<<"">>,
[{qos, 0}, {retain, true}]
)
end,
lists:foreach(Each, lists:seq(1, 5)),
timer:sleep(1000),
Msgs = get_messages(5),
[First | _] = Msgs,
?assertMatch(
#{
delayed_interval := _,
delayed_remaining := _,
expected_at := _,
from_clientid := _,
from_username := _,
msgid := _,
node := _,
publish_at := _,
qos := _,
topic := <<"msgs">>
},
First
),
MsgId = maps:get(msgid, First),
{ok, 200, LookupMsg} = request(
get,
uri(["mqtt", "delayed", "messages", node(), MsgId])
),
?assertEqual(MsgId, maps:get(msgid, decode_json(LookupMsg))),
?assertMatch(
{ok, 404, _},
request(
get,
uri(["mqtt", "delayed", "messages", node(), emqx_guid:to_hexstr(emqx_guid:gen())])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", node(), "invalid_msg_id"])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", atom_to_list('[email protected]'), MsgId])
)
),
?assertMatch(
{ok, 400, _},
request(
get,
uri(["mqtt", "delayed", "messages", "some_unknown_atom", MsgId])
)
),
?assertMatch(
{ok, 404, _},
request(
delete,
uri(["mqtt", "delayed", "messages", node(), emqx_guid:to_hexstr(emqx_guid:gen())])
)
),
?assertMatch(
{ok, 204, _},
request(
delete,
uri(["mqtt", "delayed", "messages", node(), MsgId])
)
),
_ = get_messages(4),
ok = emqtt:disconnect(C1).
t_large_payload(_) ->
clear_all_record(),
emqx_delayed:load(),
{ok, C1} = emqtt:start_link([{clean_start, true}]),
{ok, _} = emqtt:connect(C1),
timer:sleep(500),
Topic = <<"$delayed/123/msgs">>,
emqtt:publish(
C1,
Topic,
iolist_to_binary([<<"x">> || _ <- lists:seq(1, 5000)]),
[{qos, 0}, {retain, true}]
),
timer:sleep(1000),
[#{msgid := MsgId}] = get_messages(1),
{ok, 200, Msg} = request(
get,
uri(["mqtt", "delayed", "messages", node(), MsgId])
),
?assertMatch(
#{
payload := <<"PAYLOAD_TOO_LARGE">>,
topic := <<"msgs">>
},
decode_json(Msg)
).
decode_json(Data) ->
BinJson = emqx_json:decode(Data, [return_maps]),
emqx_map_lib:unsafe_atom_key_map(BinJson).
clear_all_record() ->
ets:delete_all_objects(emqx_delayed).
get_messages(Len) ->
{ok, 200, MsgsJson} = request(get, uri(["mqtt", "delayed", "messages"])),
#{data := Msgs} = decode_json(MsgsJson),
MsgLen = erlang:length(Msgs),
?assertEqual(
Len,
MsgLen,
lists:flatten(
io_lib:format("message length is:~p~nWhere:~p~nHooks:~p~n", [
MsgLen, erlang:whereis(emqx_delayed), ets:tab2list(emqx_hooks)
])
)
),
Msgs.
|
182f6efe809ac07a0fea1babb273db2de13f7c4f949962fd2d5ef61e1891f98b | feeley/define-library | read.scm | (define-library (scheme read)
(namespace "")
(export
read
))
| null | https://raw.githubusercontent.com/feeley/define-library/56a6eda7ef9248751f4cada832edf98f5c6bb469/scheme/read/read.scm | scheme | (define-library (scheme read)
(namespace "")
(export
read
))
|
|
fc667a94219d38f73124773289a2f8ad34128a5d5c3395a2ea715e8d448e4561 | input-output-hk/cardano-addresses | DelegationSpec.hs | # LANGUAGE FlexibleContexts #
module Command.Address.DelegationSpec
( spec
) where
import Prelude
import Test.Hspec
( Spec, SpecWith, it, shouldBe, shouldContain )
import Test.Utils
( cli, describeCmd )
spec :: Spec
spec = describeCmd [ "address", "delegation" ] $ do
specFromExtendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromExtendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromNonextendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromNonextendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromKeyHash defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromKeyHash defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromScript
defaultAddrMainnet
"all [stake_shared_vkh1nqc00hvlc6cq0sfhretk0rmzw8dywmusp8retuqnnxzajtzhjg5]"
"addr1y9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qfe5nnvf2a5vzmvdfhda0yw08qrj32kn4ytx2l7xpd08l7q0xlqfx"
specMalformedAddress "💩"
specMalformedAddress "\NUL"
specMalformedAddress
"Ae2tdPwUPEYz6ExfbWubiXPB6daUuhJxikMEb4eXRp5oKZBKZwrbJ2k7EZe"
specMalformedAddress
"DdzFFzCqrhsf6hiTYkK5gBAhVDwg3SiaHiEL9wZLYU3WqLUpx6DP\
\5ZRJr4rtNRXbVNfk89FCHCDR365647os9AEJ8MKZNvG7UKTpythG"
specInvalidAddress
"addr1qdu5vlrf4xkxv2qpwngf6cjhtw542ayty80v8dyr49rf5ewvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2q5ggg4z"
specMalformedXPub "💩"
specInvalidXPub
"stake_xvk1qfqcf4tp4ensj5qypqs640rt06pe5x7v2eul00c7rakzzvsakw3caelfuh6cg6nrkdv9y2ctkeu"
specFromExtendedKey :: [String] -> String -> String -> String -> SpecWith ()
specFromExtendedKey phrase path addr want = it ("delegation from key " <> want) $ do
stakeKey <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--with-chain-code" ]
out <- cli [ "address", "delegation", stakeKey ] addr
out `shouldBe` want
specFromNonextendedKey :: [String] -> String -> String -> String -> SpecWith ()
specFromNonextendedKey phrase path addr want = it ("delegation from key " <> want) $ do
stakeKey <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--without-chain-code" ]
out <- cli [ "address", "delegation", stakeKey ] addr
out `shouldBe` want
specFromKeyHash :: [String] -> String -> String -> String -> SpecWith ()
specFromKeyHash phrase path addr want = it ("delegation from key " <> want) $ do
stakeKeyHash <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--with-chain-code" ]
>>= cli [ "key", "hash" ]
out <- cli [ "address", "delegation", stakeKeyHash ] addr
out `shouldBe` want
specFromScript :: String -> String -> String -> SpecWith ()
specFromScript addr script want = it ("delegation from script " <> want) $ do
scriptHash <- cli [ "script", "hash", script ] ""
out <- cli [ "address", "delegation", scriptHash ] addr
out `shouldBe` want
specMalformedAddress :: String -> SpecWith ()
specMalformedAddress addr = it ("malformed address " <> addr) $ do
(out, err) <- cli [ "address", "delegation", defaultXPub ] addr
out `shouldBe` ""
err `shouldContain` "Bech32 error"
specInvalidAddress :: String -> SpecWith ()
specInvalidAddress addr = it ("invalid address " <> addr) $ do
(out, err) <- cli [ "address", "delegation", defaultXPub ] addr
out `shouldBe` ""
err `shouldContain` "Only payment addresses can be extended"
specMalformedXPub :: String -> SpecWith ()
specMalformedXPub xpub = it ("malformed xpub " <> xpub) $ do
(out, err) <- cli [ "address", "delegation", xpub ] defaultAddrMainnet
out `shouldBe` ""
err `shouldContain` "Couldn't parse delegation credentials."
specInvalidXPub :: String -> SpecWith ()
specInvalidXPub xpub = it ("invalid xpub " <> xpub) $ do
(out, err) <- cli [ "address", "delegation", xpub ] defaultAddrMainnet
out `shouldBe` ""
err `shouldContain` "Couldn't parse delegation credentials."
defaultPhrase :: [String]
defaultPhrase =
[ "art", "forum", "devote", "street", "sure"
, "rather", "head", "chuckle", "guard", "poverty"
, "release", "quote", "oak", "craft", "enemy"
]
defaultXPub :: String
defaultXPub =
"stake_xvk1z0lq4d73l4xtk42s3364s2fpn4m5xtuacfkfj4dxxt9uhccvl\
\g6pamdykgvcna3w4jf6zr3yqenuasug3gp22peqm6vduzrzw8uj6asu49xvf"
defaultAddrMainnet :: String
defaultAddrMainnet =
"addr1v9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qgx2curq"
defaultAddrTestnet :: String
defaultAddrTestnet =
"addr_test1vptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qgazvqv9"
| null | https://raw.githubusercontent.com/input-output-hk/cardano-addresses/d6dcd277d92c76e45d1024f7d82837fc0907aa12/command-line/test/Command/Address/DelegationSpec.hs | haskell | # LANGUAGE FlexibleContexts #
module Command.Address.DelegationSpec
( spec
) where
import Prelude
import Test.Hspec
( Spec, SpecWith, it, shouldBe, shouldContain )
import Test.Utils
( cli, describeCmd )
spec :: Spec
spec = describeCmd [ "address", "delegation" ] $ do
specFromExtendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromExtendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromNonextendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromNonextendedKey defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromKeyHash defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrMainnet
"addr1q9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qdqhgvu"
specFromKeyHash defaultPhrase "1852H/1815H/0H/2/0"
defaultAddrTestnet
"addr_test1qptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qwv\
\xwdrt70qlcpeeagscasafhffqsxy36t90ldv06wqrk2qwk2gqr"
specFromScript
defaultAddrMainnet
"all [stake_shared_vkh1nqc00hvlc6cq0sfhretk0rmzw8dywmusp8retuqnnxzajtzhjg5]"
"addr1y9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qfe5nnvf2a5vzmvdfhda0yw08qrj32kn4ytx2l7xpd08l7q0xlqfx"
specMalformedAddress "💩"
specMalformedAddress "\NUL"
specMalformedAddress
"Ae2tdPwUPEYz6ExfbWubiXPB6daUuhJxikMEb4eXRp5oKZBKZwrbJ2k7EZe"
specMalformedAddress
"DdzFFzCqrhsf6hiTYkK5gBAhVDwg3SiaHiEL9wZLYU3WqLUpx6DP\
\5ZRJr4rtNRXbVNfk89FCHCDR365647os9AEJ8MKZNvG7UKTpythG"
specInvalidAddress
"addr1qdu5vlrf4xkxv2qpwngf6cjhtw542ayty80v8dyr49rf5ewvxwdrt\
\70qlcpeeagscasafhffqsxy36t90ldv06wqrk2q5ggg4z"
specMalformedXPub "💩"
specInvalidXPub
"stake_xvk1qfqcf4tp4ensj5qypqs640rt06pe5x7v2eul00c7rakzzvsakw3caelfuh6cg6nrkdv9y2ctkeu"
specFromExtendedKey :: [String] -> String -> String -> String -> SpecWith ()
specFromExtendedKey phrase path addr want = it ("delegation from key " <> want) $ do
stakeKey <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--with-chain-code" ]
out <- cli [ "address", "delegation", stakeKey ] addr
out `shouldBe` want
specFromNonextendedKey :: [String] -> String -> String -> String -> SpecWith ()
specFromNonextendedKey phrase path addr want = it ("delegation from key " <> want) $ do
stakeKey <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--without-chain-code" ]
out <- cli [ "address", "delegation", stakeKey ] addr
out `shouldBe` want
specFromKeyHash :: [String] -> String -> String -> String -> SpecWith ()
specFromKeyHash phrase path addr want = it ("delegation from key " <> want) $ do
stakeKeyHash <- cli [ "key", "from-recovery-phrase", "shelley" ] (unwords phrase)
>>= cli [ "key", "child", path ]
>>= cli [ "key", "public", "--with-chain-code" ]
>>= cli [ "key", "hash" ]
out <- cli [ "address", "delegation", stakeKeyHash ] addr
out `shouldBe` want
specFromScript :: String -> String -> String -> SpecWith ()
specFromScript addr script want = it ("delegation from script " <> want) $ do
scriptHash <- cli [ "script", "hash", script ] ""
out <- cli [ "address", "delegation", scriptHash ] addr
out `shouldBe` want
specMalformedAddress :: String -> SpecWith ()
specMalformedAddress addr = it ("malformed address " <> addr) $ do
(out, err) <- cli [ "address", "delegation", defaultXPub ] addr
out `shouldBe` ""
err `shouldContain` "Bech32 error"
specInvalidAddress :: String -> SpecWith ()
specInvalidAddress addr = it ("invalid address " <> addr) $ do
(out, err) <- cli [ "address", "delegation", defaultXPub ] addr
out `shouldBe` ""
err `shouldContain` "Only payment addresses can be extended"
specMalformedXPub :: String -> SpecWith ()
specMalformedXPub xpub = it ("malformed xpub " <> xpub) $ do
(out, err) <- cli [ "address", "delegation", xpub ] defaultAddrMainnet
out `shouldBe` ""
err `shouldContain` "Couldn't parse delegation credentials."
specInvalidXPub :: String -> SpecWith ()
specInvalidXPub xpub = it ("invalid xpub " <> xpub) $ do
(out, err) <- cli [ "address", "delegation", xpub ] defaultAddrMainnet
out `shouldBe` ""
err `shouldContain` "Couldn't parse delegation credentials."
defaultPhrase :: [String]
defaultPhrase =
[ "art", "forum", "devote", "street", "sure"
, "rather", "head", "chuckle", "guard", "poverty"
, "release", "quote", "oak", "craft", "enemy"
]
defaultXPub :: String
defaultXPub =
"stake_xvk1z0lq4d73l4xtk42s3364s2fpn4m5xtuacfkfj4dxxt9uhccvl\
\g6pamdykgvcna3w4jf6zr3yqenuasug3gp22peqm6vduzrzw8uj6asu49xvf"
defaultAddrMainnet :: String
defaultAddrMainnet =
"addr1v9therz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qgx2curq"
defaultAddrTestnet :: String
defaultAddrTestnet =
"addr_test1vptherz8fgux9ywdysrcpaclznyyvl23l2zfcery3f4m9qgazvqv9"
|
|
1c7a4b154ebec02d6652afe6ca4aca2d894866d513038992f7f2d8f8abdafc11 | dimitri/pgloader | regress.lisp | ;;;
;;; Regression tests driver.
;;;
;;; We're using SQL EXCEPT to compare what we loaded with what we expected
;;; to load.
;;;
(in-package #:pgloader)
(define-condition regression-test-error (error)
((filename :initarg :filename :reader regression-test-filename))
(:report (lambda (err stream)
(format stream
"Regression test failed: ~s"
(regression-test-filename err)))))
(defun process-regression-test (load-file &key start-logger)
"Run a regression test for given LOAD-FILE."
(unless (probe-file load-file)
(format t "Regression testing ~s: file does not exists." load-file)
#-pgloader-image (values nil +os-code-error-regress+)
#+pgloader-image (uiop:quit +os-code-error-regress+))
;; now do our work
(with-monitor (:start-logger start-logger)
(log-message :log "Regression testing: ~s" load-file)
(process-command-file (list load-file) :flush-summary nil)
;; once we are done running the load-file, compare the loaded data with
;; our expected data file
(bind ((expected-data-source
(regression-test-expected-data-source load-file))
((target-conn target-table-name gucs)
(parse-target-pg-db-uri load-file))
(target-table (create-table target-table-name))
(*pg-settings* (pgloader.pgsql:sanitize-user-gucs gucs))
(*pgsql-reserved-keywords* (list-reserved-keywords target-conn))
;; change target table-name schema
(expected-data-target
(let ((e-d-t (clone-connection target-conn)))
(setf (pgconn-table-name e-d-t)
;;
;; The connection facility still works with cons here,
;; rather than table structure instances, because of
;; depedencies as explained in
;; src/parsers/command-db-uri.lisp
;;
(cons "expected" (table-name target-table)))
e-d-t))
(expected-target-table
(create-table (cons "expected" (table-name target-table)))))
(log-message :log "Comparing loaded data against ~s"
(cdr (pgloader.sources::md-spec expected-data-source)))
;; prepare expected table in "expected" schema
(with-pgsql-connection (target-conn)
(with-schema (unqualified-table-name target-table)
(let* ((tname (apply-identifier-case unqualified-table-name))
(drop (format nil "drop table if exists expected.~a;"
tname))
(create (format nil "create table expected.~a(like ~a);"
tname tname)))
(log-message :notice "~a" drop)
(pomo:query drop)
(log-message :notice "~a" create)
(pomo:query create))))
;; load expected data
(load-data :from expected-data-source
:into expected-data-target
:target-table-name expected-target-table
:options '(:truncate t)
:start-logger nil
:flush-summary t)
;; now compare both
(with-pgsql-connection (target-conn)
(with-schema (unqualified-table-name target-table)
(let* ((tname (apply-identifier-case unqualified-table-name))
(cols (loop :for (name type)
:in (list-columns tname)
;;
;; We can't just use table names here, because
;; PostgreSQL support for the POINT datatype fails
;; to implement EXCEPT support, and the query then
;; fails with:
;;
;; could not identify an equality operator for type point
;;
:collect (if (string= "point" type)
(format nil "~s::text" name)
(format nil "~s" name))))
(sql (format nil
"select count(*) from (select ~{~a~^, ~} from expected.~a except select ~{~a~^, ~} from ~a) ss"
cols
tname
cols
tname))
(diff-count (pomo:query sql :single)))
(log-message :notice "~a" sql)
(log-message :notice "Got a diff of ~a rows" diff-count)
;; signal a regression test error when diff isn't 0
(unless (zerop diff-count)
(error 'regression-test-error :filename load-file))
(log-message :log "Regress pass.")
(values diff-count +os-code-success+)))))))
;;;
;;; TODO: use the catalogs structures and introspection facilities.
;;;
(defun list-columns (table-name &optional schema)
"Returns the list of columns for table TABLE-NAME in schema SCHEMA, and
must be run with an already established PostgreSQL connection."
(pomo:query (format nil "
select attname, t.oid::regtype
from pg_class c
join pg_namespace n on n.oid = c.relnamespace
left join pg_attribute a on c.oid = a.attrelid
join pg_type t on t.oid = a.atttypid
~a.~a~]'::regclass and > 0
order by attnum" schema schema table-name)))
;;;
;;; Helper functions
;;;
(defun regression-test-expected-data-source (load-file)
"Returns the source specification where to read the expected result for
the given LOAD-FILE."
(let* ((load-file-dir (uiop:pathname-directory-pathname
(if (uiop:absolute-pathname-p load-file)
load-file
(uiop:merge-pathnames* load-file
(uiop:getcwd)))))
(expected-subdir (uiop:native-namestring
(uiop:merge-pathnames* "regress/expected/"
load-file-dir)))
(expected-data-file (make-pathname :defaults load-file
:type "out"
:directory expected-subdir))
(expected-data-source (uiop:native-namestring expected-data-file)))
(parse-source-string-for-type :copy expected-data-source)))
| null | https://raw.githubusercontent.com/dimitri/pgloader/3047c9afe141763e9e7ec05b7f2a6aa97cf06801/src/regress/regress.lisp | lisp |
Regression tests driver.
We're using SQL EXCEPT to compare what we loaded with what we expected
to load.
now do our work
once we are done running the load-file, compare the loaded data with
our expected data file
change target table-name schema
The connection facility still works with cons here,
rather than table structure instances, because of
depedencies as explained in
src/parsers/command-db-uri.lisp
prepare expected table in "expected" schema
load expected data
now compare both
We can't just use table names here, because
PostgreSQL support for the POINT datatype fails
to implement EXCEPT support, and the query then
fails with:
could not identify an equality operator for type point
signal a regression test error when diff isn't 0
TODO: use the catalogs structures and introspection facilities.
Helper functions
|
(in-package #:pgloader)
(define-condition regression-test-error (error)
((filename :initarg :filename :reader regression-test-filename))
(:report (lambda (err stream)
(format stream
"Regression test failed: ~s"
(regression-test-filename err)))))
(defun process-regression-test (load-file &key start-logger)
"Run a regression test for given LOAD-FILE."
(unless (probe-file load-file)
(format t "Regression testing ~s: file does not exists." load-file)
#-pgloader-image (values nil +os-code-error-regress+)
#+pgloader-image (uiop:quit +os-code-error-regress+))
(with-monitor (:start-logger start-logger)
(log-message :log "Regression testing: ~s" load-file)
(process-command-file (list load-file) :flush-summary nil)
(bind ((expected-data-source
(regression-test-expected-data-source load-file))
((target-conn target-table-name gucs)
(parse-target-pg-db-uri load-file))
(target-table (create-table target-table-name))
(*pg-settings* (pgloader.pgsql:sanitize-user-gucs gucs))
(*pgsql-reserved-keywords* (list-reserved-keywords target-conn))
(expected-data-target
(let ((e-d-t (clone-connection target-conn)))
(setf (pgconn-table-name e-d-t)
(cons "expected" (table-name target-table)))
e-d-t))
(expected-target-table
(create-table (cons "expected" (table-name target-table)))))
(log-message :log "Comparing loaded data against ~s"
(cdr (pgloader.sources::md-spec expected-data-source)))
(with-pgsql-connection (target-conn)
(with-schema (unqualified-table-name target-table)
(let* ((tname (apply-identifier-case unqualified-table-name))
(drop (format nil "drop table if exists expected.~a;"
tname))
(create (format nil "create table expected.~a(like ~a);"
tname tname)))
(log-message :notice "~a" drop)
(pomo:query drop)
(log-message :notice "~a" create)
(pomo:query create))))
(load-data :from expected-data-source
:into expected-data-target
:target-table-name expected-target-table
:options '(:truncate t)
:start-logger nil
:flush-summary t)
(with-pgsql-connection (target-conn)
(with-schema (unqualified-table-name target-table)
(let* ((tname (apply-identifier-case unqualified-table-name))
(cols (loop :for (name type)
:in (list-columns tname)
:collect (if (string= "point" type)
(format nil "~s::text" name)
(format nil "~s" name))))
(sql (format nil
"select count(*) from (select ~{~a~^, ~} from expected.~a except select ~{~a~^, ~} from ~a) ss"
cols
tname
cols
tname))
(diff-count (pomo:query sql :single)))
(log-message :notice "~a" sql)
(log-message :notice "Got a diff of ~a rows" diff-count)
(unless (zerop diff-count)
(error 'regression-test-error :filename load-file))
(log-message :log "Regress pass.")
(values diff-count +os-code-success+)))))))
(defun list-columns (table-name &optional schema)
"Returns the list of columns for table TABLE-NAME in schema SCHEMA, and
must be run with an already established PostgreSQL connection."
(pomo:query (format nil "
select attname, t.oid::regtype
from pg_class c
join pg_namespace n on n.oid = c.relnamespace
left join pg_attribute a on c.oid = a.attrelid
join pg_type t on t.oid = a.atttypid
~a.~a~]'::regclass and > 0
order by attnum" schema schema table-name)))
(defun regression-test-expected-data-source (load-file)
"Returns the source specification where to read the expected result for
the given LOAD-FILE."
(let* ((load-file-dir (uiop:pathname-directory-pathname
(if (uiop:absolute-pathname-p load-file)
load-file
(uiop:merge-pathnames* load-file
(uiop:getcwd)))))
(expected-subdir (uiop:native-namestring
(uiop:merge-pathnames* "regress/expected/"
load-file-dir)))
(expected-data-file (make-pathname :defaults load-file
:type "out"
:directory expected-subdir))
(expected-data-source (uiop:native-namestring expected-data-file)))
(parse-source-string-for-type :copy expected-data-source)))
|
ee5ebc73024732011eb023786c2b9a3b61f80d6b7a98e51633908cf9d1d9993c | dyne/social-wallet-api | handler.clj | Social Wallet REST API
Copyright ( C ) 2017- Dyne.org foundation
designed , written and maintained by
< >
This file is part of Social Wallet REST API .
Social Wallet REST API is free software ; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation , either version 3 of the License , or ( at your option ) any later version .
Social Wallet REST API is distributed in the hope that it will be useful , but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License along with this program . If not , see < / > .
Additional permission under GNU AGPL version 3 section 7 .
If you modify Social Wallet REST API , or any covered work , by linking or combining it with any library ( or a modified version of that library ) , containing parts covered by the terms of EPL v 1.0 , the licensors of this Program grant you additional permission to convey the resulting work . Your modified version must prominently offer all users interacting with it remotely through a computer network ( if your version supports such interaction ) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge , through some standard or customary means of facilitating copying of software . Corresponding Source for a non - source form of such a combination shall include the source code for the parts of the libraries ( dependencies ) covered by the terms of EPL v 1.0 used as well as that of the covered work .
(ns social-wallet-api.test.handler
(:require [midje.sweet :refer :all]
[ring.mock.request :as mock]
[social-wallet-api.handler :as h]
[auxiliary.config :refer [config-read]]
[taoensso.timbre :as log]
[cheshire.core :as cheshire]))
(def test-app-name "social-wallet-api-test")
(def mongo-db-only {:connection "mongo"
:type "db-only"})
(defn parse-body [body]
(cheshire/parse-string (slurp body) true))
(against-background [(before :contents (h/init
(config-read test-app-name)
social-wallet-api.test.handler/test-app-name))
(after :contents (h/destroy))]
(facts "Some basic requests work properly"
(fact "Get the label using the blockchain type as string"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/label")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string mongo-db-only))))
body (parse-body (:body response))]
(:status response) => 200
body => {:currency "Testcoin"}))
(fact "Get the label using the blockchain type as keyword"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/label")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string mongo-db-only))))
body (parse-body (:body response))]
(:status response) => 200
body => {:currency "Testcoin"}))
(fact "Check that the amount returned after the creation of a transanction in mongo is the same as the input one"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/transactions/new")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string (merge
mongo-db-only
{:from-id "test-1"
:to-id "test-2"
:amount "0.1"
:tags ["blabla"]})))))
body (parse-body (:body response))]
(:status response) => 200
(:amount body) => 0.1))))
| null | https://raw.githubusercontent.com/dyne/social-wallet-api/72cc18989382297e1315a0ab4aac50b9882aa374/test/social_wallet_api/test/handler.clj | clojure | you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation , either version 3 of the License , or ( at your option ) any later version .
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for more details . | Social Wallet REST API
Copyright ( C ) 2017- Dyne.org foundation
designed , written and maintained by
< >
This file is part of Social Wallet REST API .
You should have received a copy of the GNU Affero General Public License along with this program . If not , see < / > .
Additional permission under GNU AGPL version 3 section 7 .
If you modify Social Wallet REST API , or any covered work , by linking or combining it with any library ( or a modified version of that library ) , containing parts covered by the terms of EPL v 1.0 , the licensors of this Program grant you additional permission to convey the resulting work . Your modified version must prominently offer all users interacting with it remotely through a computer network ( if your version supports such interaction ) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge , through some standard or customary means of facilitating copying of software . Corresponding Source for a non - source form of such a combination shall include the source code for the parts of the libraries ( dependencies ) covered by the terms of EPL v 1.0 used as well as that of the covered work .
(ns social-wallet-api.test.handler
(:require [midje.sweet :refer :all]
[ring.mock.request :as mock]
[social-wallet-api.handler :as h]
[auxiliary.config :refer [config-read]]
[taoensso.timbre :as log]
[cheshire.core :as cheshire]))
(def test-app-name "social-wallet-api-test")
(def mongo-db-only {:connection "mongo"
:type "db-only"})
(defn parse-body [body]
(cheshire/parse-string (slurp body) true))
(against-background [(before :contents (h/init
(config-read test-app-name)
social-wallet-api.test.handler/test-app-name))
(after :contents (h/destroy))]
(facts "Some basic requests work properly"
(fact "Get the label using the blockchain type as string"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/label")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string mongo-db-only))))
body (parse-body (:body response))]
(:status response) => 200
body => {:currency "Testcoin"}))
(fact "Get the label using the blockchain type as keyword"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/label")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string mongo-db-only))))
body (parse-body (:body response))]
(:status response) => 200
body => {:currency "Testcoin"}))
(fact "Check that the amount returned after the creation of a transanction in mongo is the same as the input one"
(let [response (h/app
(->
(mock/request :post "/wallet/v1/transactions/new")
(mock/content-type "application/json")
(mock/body (cheshire/generate-string (merge
mongo-db-only
{:from-id "test-1"
:to-id "test-2"
:amount "0.1"
:tags ["blabla"]})))))
body (parse-body (:body response))]
(:status response) => 200
(:amount body) => 0.1))))
|
70248e22eec073a592f9124621e133b7b2a27054c88968593261eca0816e955c | elaforge/karya | Z1.hs | Copyright 2013
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
| Korg Z1 keyboard .
module User.Elaforge.Instrument.Z1 where
import qualified Data.Bits as Bits
import Data.Bits ((.|.))
import qualified Data.ByteString as B
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import qualified Data.Text as Text
import Data.Word (Word8)
import System.FilePath ((</>))
import qualified App.Config as Config
import qualified App.Path as Path
import qualified Cmd.Instrument.MidiInst as MidiInst
import qualified Cmd.Instrument.MidiInstDb as MidiInstDb
import qualified Derive.ScoreT as ScoreT
import qualified Instrument.Common as Common
import qualified Instrument.InstT as InstT
import qualified Instrument.Sysex as Sysex
import qualified Midi.Encode
import qualified Midi.Midi as Midi
import qualified Perform.Midi.Patch as Patch
import Global
import User.Elaforge.Instrument.Z1Spec
synth_name :: InstT.SynthName
synth_name = "z1"
load :: Path.AppDir -> IO (Maybe MidiInst.Synth)
load = MidiInstDb.load_synth (const mempty) synth_name "Korg Z1"
make_db :: Path.AppDir -> IO ()
make_db app_dir = do
let dir = Path.to_absolute app_dir Config.instrument_dir
</> untxt synth_name
bank_a <- Sysex.parse_builtins 0 program_dump (dir </> "bank_a.syx")
bank_b <- Sysex.parse_builtins 1 program_dump (dir </> "bank_b.syx")
sysex <- Sysex.parse_dir [current_program_dump, program_dump, sysex_manager]
(dir </> "sysex")
MidiInstDb.save_synth app_dir synth_name $
map (override_pb . MidiInst.patch_from_pair) $
concat [bank_a, bank_b, sysex]
where
current_program_dump =
fmap (:[]) . (rmap_to_patch <=< decode_current_program)
program_dump = mapM rmap_to_patch <=< decode_program_dump
-- Each patch has its own pb range, but you can override them in the
-- multiset.
override_pb = MidiInst.patch#Patch.defaults#Patch.pitch_bend_range
#= Just (-24, 24)
synth_controls :: [(Midi.Control, ScoreT.Control)]
synth_controls =
-- The PE controls are the "performance expression" knobs whose effect
-- depends on the instrument.
[ (19, "knob1"), (20, "knob2"), (21, "knob3"), (22, "knob4"), (23, "knob5")
, (16, "pad-x"), (17, "pad-y")
, (65, "port-sw") -- Turn portamento on and off.
, (80, "sw1"), (81, "sw2") -- General purpose on/off switches.
filter 1
, (85, "filter1-cutoff"), (86, "filter1-q"), (87, "filter1-eg")
, (24, "filter1-attack"), (25, "filter1-decay"), (26, "filter1-sustain")
, (27, "filter1-release")
filter 2
, (88, "filter2-cutoff"), (89, "filter2-q"), (90, "filter2-eg")
, (28, "filter2-attack"), (29, "filter2-decay"), (30, "filter2-sustain")
, (31, "filter2-release")
-- amp
, (76, "amp-attack"), (77, "amp-decay"), (78, "amp-sustain")
, (79, "amp-release")
]
-- * decode sysex
decode_current_program :: ByteString -> Either String Sysex.RMap
decode_current_program bytes = do
(header, bytes) <- decode current_program_dump_header bytes
(rmap, _) <- decode patch_spec (dekorg bytes)
return $ header <> rmap
-- | Decode a dump for a program at a certain memory location. This also
-- parses bank dumps, which are just encoded as a bunch of dumps at consecutive
-- memory locations.
decode_program_dump :: ByteString -> Either String [Sysex.RMap]
decode_program_dump bytes = do
-- If there is just one, then the bank and unit fields are valid.
-- Otherwise, they are 0.
(rmap, bytes) <- decode program_dump_header bytes
let syxs = exact_chunks
(spec_bytes patch_spec) (dekorg bytes)
mapM (fmap ((rmap <>) . fst) . decode patch_spec) syxs
sysex_manager :: ByteString -> Either String [(Patch.Patch, Common.Common ())]
sysex_manager bytes = do
bytes <- Sysex.expect_bytes bytes $ Char8.pack "Sysex Manager"
The first sysex is something else .
let sysexes = drop 1 $ Sysex.extract_sysex bytes
patches <- mapM (rmap_to_patch <=< decode_current_program) sysexes
-- Add the initialize here, since 'bytes' isn't actually a valid sysex.
return [(Sysex.initialize_sysex sysex patch, common)
| (sysex, (patch, common)) <- zip sysexes patches]
test_decode = do
let fn = " inst_db / z1 / sysex / / apollo44.syx "
let fn = " inst_db / z1 / sysex / lib1 / z1 o00o00 "
-- let fn = "inst_db/z1/sysex/lib1/z1 o00o05 Composite Synth.syx"
let fn = "inst_db/z1/sysex/lib1/z1 o00o00 .C.H.A.O.S..syx"
decode_current_program <$> B.readFile fn
-- * encode sysex
-- set_pitch_bend fn = do
-- bytes <- B.readFile fn
-- records <- require "parse" $ decode_program_dump bytes
-- records <- require "set" $ mapM set records
set_bank_pitch_bend :: Bank -> FilePath -> IO ()
set_bank_pitch_bend bank fn = do
bytes <- B.readFile fn
records <- require "parse" $ decode_program_dump bytes
records <- require "set" $ mapM set records
bytes <- require "unparse" $ encode_bank_dump All bank records
B.writeFile (fn ++ ".modified") bytes
where
set = Sysex.put_rmap "pitch bend.intensity +" (24 :: Int)
<=< Sysex.put_rmap "pitch bend.intensity -" (-24 :: Int)
require msg = either (errorIO . ((msg <> ": ") <>) . txt) return
encode_current_program :: Sysex.RMap -> Either String ByteString
encode_current_program rmap =
encode_sysex (encode current_program_dump_header rmap)
(encode patch_spec rmap)
encode_program_dump :: Sysex.RMap -> Either String ByteString
encode_program_dump rmap =
encode_sysex (encode program_dump_header rmap)
(encode patch_spec rmap)
data Unit = Program | Bank | All deriving (Show)
data Bank = A | B deriving (Show)
encode_bank_dump :: Unit -> Bank -> [Sysex.RMap] -> Either String ByteString
encode_bank_dump unit bank rmaps = do
header_rmap <- set_bank $ Sysex.spec_to_rmap program_dump_header
encode_sysex (encode program_dump_header header_rmap)
(concatMapM (encode patch_spec) rmaps)
where
set_bank = Sysex.put_rmap "bank" (Text.toLower (showt bank))
<=< Sysex.put_rmap "unit" (Text.toLower (showt unit))
encode_sysex :: Either String ByteString -> Either String ByteString
-> Either String ByteString
encode_sysex encode_header encode_body = do
header <- encode_header
body <- encode_body
return $ header <> enkorg body <> B.singleton Midi.Encode.eox_byte
-- ** record
rmap_to_patch :: Sysex.RMap -> Either String (Patch.Patch, Common.Common ())
rmap_to_patch rmap = do
name <- get "name"
category <- get "category"
pb_range <- (,) <$> get "pitch bend.intensity -"
<*> get "pitch bend.intensity +"
osc1 <- get "osc.0.type"
osc2 <- get "osc.1.type"
let tags = [("category", category), ("z1-osc", osc1), ("z1-osc", osc2)]
let common = Common.tags #= tags $ Common.common ()
return (Patch.patch pb_range name, common)
where
get :: (Sysex.RecordVal a) => String -> Either String a
get = flip Sysex.get_rmap rmap
current_multi_data_dump :: Word8
current_multi_data_dump = 0x69
multi_data_dump :: Word8
multi_data_dump = 0x4d
| Z1 sysexes use a scheme where the eighth bits are packed into a single
byte preceeding its 7 7bit bytes .
dekorg :: ByteString -> ByteString
dekorg = mconcatMap smoosh . chunks 8
where
smoosh bs = case B.uncons bs of
Just (b7, bytes) -> snd $
B.mapAccumL (\i to -> (i+1, copy_bit b7 i to)) 0 bytes
Nothing -> mempty
copy_bit from i to = if Bits.testBit from i
then Bits.setBit to 7 else Bits.clearBit to 7
enkorg :: ByteString -> ByteString
enkorg = mconcatMap expand . chunks 7
where
expand bs = B.cons bits (B.map (`Bits.clearBit` 7) bs)
where bits = B.foldr get_bits 0 bs
get_bits b accum =
Bits.shiftL accum 1 .|. (if Bits.testBit b 7 then 1 else 0)
chunks :: Int -> ByteString -> [ByteString]
chunks size bs
| B.null pre = []
| otherwise = pre : chunks size post
where (pre, post) = B.splitAt size bs
exact_chunks :: Int -> ByteString -> [ByteString]
exact_chunks size bs
| B.length pre < size = []
| otherwise = pre : exact_chunks size post
where (pre, post) = B.splitAt size bs
-- * test
test_multiset = do
bytes <- B.drop 9 <$> B.readFile "inst_db/multi1.syx"
return $ decode multiset_spec (dekorg bytes)
test_dump = do
bytes <- B.readFile "inst_db/z1/bank_b.syx"
return $ decode_program_dump bytes
test_encode = do
bytes <- B.readFile "inst_db/z1/bank_b.syx"
let Right recs = decode_program_dump bytes
return $ encode patch_spec (head recs)
test_patch = do
bytes <- B.readFile
"inst_db/z1/sysex/lib1/z1 o00o00 ANALOG INIT.syx"
return $ decode_current_program bytes
read_patch = do
b <- dekorg . B.drop 6 <$> B.readFile
"inst_db/z1/sysex/lib1/z1 o00o00 ANALOG INIT.syx"
return $ decode patch_spec b
| null | https://raw.githubusercontent.com/elaforge/karya/de1b6e8cb0a17870801cc4dd49de8de62eb6c5fe/User/Elaforge/Instrument/Z1.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
Each patch has its own pb range, but you can override them in the
multiset.
The PE controls are the "performance expression" knobs whose effect
depends on the instrument.
Turn portamento on and off.
General purpose on/off switches.
amp
* decode sysex
| Decode a dump for a program at a certain memory location. This also
parses bank dumps, which are just encoded as a bunch of dumps at consecutive
memory locations.
If there is just one, then the bank and unit fields are valid.
Otherwise, they are 0.
Add the initialize here, since 'bytes' isn't actually a valid sysex.
let fn = "inst_db/z1/sysex/lib1/z1 o00o05 Composite Synth.syx"
* encode sysex
set_pitch_bend fn = do
bytes <- B.readFile fn
records <- require "parse" $ decode_program_dump bytes
records <- require "set" $ mapM set records
** record
* test | Copyright 2013
| Korg Z1 keyboard .
module User.Elaforge.Instrument.Z1 where
import qualified Data.Bits as Bits
import Data.Bits ((.|.))
import qualified Data.ByteString as B
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import qualified Data.Text as Text
import Data.Word (Word8)
import System.FilePath ((</>))
import qualified App.Config as Config
import qualified App.Path as Path
import qualified Cmd.Instrument.MidiInst as MidiInst
import qualified Cmd.Instrument.MidiInstDb as MidiInstDb
import qualified Derive.ScoreT as ScoreT
import qualified Instrument.Common as Common
import qualified Instrument.InstT as InstT
import qualified Instrument.Sysex as Sysex
import qualified Midi.Encode
import qualified Midi.Midi as Midi
import qualified Perform.Midi.Patch as Patch
import Global
import User.Elaforge.Instrument.Z1Spec
synth_name :: InstT.SynthName
synth_name = "z1"
load :: Path.AppDir -> IO (Maybe MidiInst.Synth)
load = MidiInstDb.load_synth (const mempty) synth_name "Korg Z1"
make_db :: Path.AppDir -> IO ()
make_db app_dir = do
let dir = Path.to_absolute app_dir Config.instrument_dir
</> untxt synth_name
bank_a <- Sysex.parse_builtins 0 program_dump (dir </> "bank_a.syx")
bank_b <- Sysex.parse_builtins 1 program_dump (dir </> "bank_b.syx")
sysex <- Sysex.parse_dir [current_program_dump, program_dump, sysex_manager]
(dir </> "sysex")
MidiInstDb.save_synth app_dir synth_name $
map (override_pb . MidiInst.patch_from_pair) $
concat [bank_a, bank_b, sysex]
where
current_program_dump =
fmap (:[]) . (rmap_to_patch <=< decode_current_program)
program_dump = mapM rmap_to_patch <=< decode_program_dump
override_pb = MidiInst.patch#Patch.defaults#Patch.pitch_bend_range
#= Just (-24, 24)
synth_controls :: [(Midi.Control, ScoreT.Control)]
synth_controls =
[ (19, "knob1"), (20, "knob2"), (21, "knob3"), (22, "knob4"), (23, "knob5")
, (16, "pad-x"), (17, "pad-y")
filter 1
, (85, "filter1-cutoff"), (86, "filter1-q"), (87, "filter1-eg")
, (24, "filter1-attack"), (25, "filter1-decay"), (26, "filter1-sustain")
, (27, "filter1-release")
filter 2
, (88, "filter2-cutoff"), (89, "filter2-q"), (90, "filter2-eg")
, (28, "filter2-attack"), (29, "filter2-decay"), (30, "filter2-sustain")
, (31, "filter2-release")
, (76, "amp-attack"), (77, "amp-decay"), (78, "amp-sustain")
, (79, "amp-release")
]
decode_current_program :: ByteString -> Either String Sysex.RMap
decode_current_program bytes = do
(header, bytes) <- decode current_program_dump_header bytes
(rmap, _) <- decode patch_spec (dekorg bytes)
return $ header <> rmap
decode_program_dump :: ByteString -> Either String [Sysex.RMap]
decode_program_dump bytes = do
(rmap, bytes) <- decode program_dump_header bytes
let syxs = exact_chunks
(spec_bytes patch_spec) (dekorg bytes)
mapM (fmap ((rmap <>) . fst) . decode patch_spec) syxs
sysex_manager :: ByteString -> Either String [(Patch.Patch, Common.Common ())]
sysex_manager bytes = do
bytes <- Sysex.expect_bytes bytes $ Char8.pack "Sysex Manager"
The first sysex is something else .
let sysexes = drop 1 $ Sysex.extract_sysex bytes
patches <- mapM (rmap_to_patch <=< decode_current_program) sysexes
return [(Sysex.initialize_sysex sysex patch, common)
| (sysex, (patch, common)) <- zip sysexes patches]
test_decode = do
let fn = " inst_db / z1 / sysex / / apollo44.syx "
let fn = " inst_db / z1 / sysex / lib1 / z1 o00o00 "
let fn = "inst_db/z1/sysex/lib1/z1 o00o00 .C.H.A.O.S..syx"
decode_current_program <$> B.readFile fn
set_bank_pitch_bend :: Bank -> FilePath -> IO ()
set_bank_pitch_bend bank fn = do
bytes <- B.readFile fn
records <- require "parse" $ decode_program_dump bytes
records <- require "set" $ mapM set records
bytes <- require "unparse" $ encode_bank_dump All bank records
B.writeFile (fn ++ ".modified") bytes
where
set = Sysex.put_rmap "pitch bend.intensity +" (24 :: Int)
<=< Sysex.put_rmap "pitch bend.intensity -" (-24 :: Int)
require msg = either (errorIO . ((msg <> ": ") <>) . txt) return
encode_current_program :: Sysex.RMap -> Either String ByteString
encode_current_program rmap =
encode_sysex (encode current_program_dump_header rmap)
(encode patch_spec rmap)
encode_program_dump :: Sysex.RMap -> Either String ByteString
encode_program_dump rmap =
encode_sysex (encode program_dump_header rmap)
(encode patch_spec rmap)
data Unit = Program | Bank | All deriving (Show)
data Bank = A | B deriving (Show)
encode_bank_dump :: Unit -> Bank -> [Sysex.RMap] -> Either String ByteString
encode_bank_dump unit bank rmaps = do
header_rmap <- set_bank $ Sysex.spec_to_rmap program_dump_header
encode_sysex (encode program_dump_header header_rmap)
(concatMapM (encode patch_spec) rmaps)
where
set_bank = Sysex.put_rmap "bank" (Text.toLower (showt bank))
<=< Sysex.put_rmap "unit" (Text.toLower (showt unit))
encode_sysex :: Either String ByteString -> Either String ByteString
-> Either String ByteString
encode_sysex encode_header encode_body = do
header <- encode_header
body <- encode_body
return $ header <> enkorg body <> B.singleton Midi.Encode.eox_byte
rmap_to_patch :: Sysex.RMap -> Either String (Patch.Patch, Common.Common ())
rmap_to_patch rmap = do
name <- get "name"
category <- get "category"
pb_range <- (,) <$> get "pitch bend.intensity -"
<*> get "pitch bend.intensity +"
osc1 <- get "osc.0.type"
osc2 <- get "osc.1.type"
let tags = [("category", category), ("z1-osc", osc1), ("z1-osc", osc2)]
let common = Common.tags #= tags $ Common.common ()
return (Patch.patch pb_range name, common)
where
get :: (Sysex.RecordVal a) => String -> Either String a
get = flip Sysex.get_rmap rmap
current_multi_data_dump :: Word8
current_multi_data_dump = 0x69
multi_data_dump :: Word8
multi_data_dump = 0x4d
| Z1 sysexes use a scheme where the eighth bits are packed into a single
byte preceeding its 7 7bit bytes .
dekorg :: ByteString -> ByteString
dekorg = mconcatMap smoosh . chunks 8
where
smoosh bs = case B.uncons bs of
Just (b7, bytes) -> snd $
B.mapAccumL (\i to -> (i+1, copy_bit b7 i to)) 0 bytes
Nothing -> mempty
copy_bit from i to = if Bits.testBit from i
then Bits.setBit to 7 else Bits.clearBit to 7
enkorg :: ByteString -> ByteString
enkorg = mconcatMap expand . chunks 7
where
expand bs = B.cons bits (B.map (`Bits.clearBit` 7) bs)
where bits = B.foldr get_bits 0 bs
get_bits b accum =
Bits.shiftL accum 1 .|. (if Bits.testBit b 7 then 1 else 0)
chunks :: Int -> ByteString -> [ByteString]
chunks size bs
| B.null pre = []
| otherwise = pre : chunks size post
where (pre, post) = B.splitAt size bs
exact_chunks :: Int -> ByteString -> [ByteString]
exact_chunks size bs
| B.length pre < size = []
| otherwise = pre : exact_chunks size post
where (pre, post) = B.splitAt size bs
test_multiset = do
bytes <- B.drop 9 <$> B.readFile "inst_db/multi1.syx"
return $ decode multiset_spec (dekorg bytes)
test_dump = do
bytes <- B.readFile "inst_db/z1/bank_b.syx"
return $ decode_program_dump bytes
test_encode = do
bytes <- B.readFile "inst_db/z1/bank_b.syx"
let Right recs = decode_program_dump bytes
return $ encode patch_spec (head recs)
test_patch = do
bytes <- B.readFile
"inst_db/z1/sysex/lib1/z1 o00o00 ANALOG INIT.syx"
return $ decode_current_program bytes
read_patch = do
b <- dekorg . B.drop 6 <$> B.readFile
"inst_db/z1/sysex/lib1/z1 o00o00 ANALOG INIT.syx"
return $ decode patch_spec b
|
58f36855149aca06e2d5fa15edc4fcda29b847503a039adb7792cb6baaf3eccd | vmchale/shake-dhall | Dhall.hs | module Development.Shake.Dhall ( needDhall
, needDhallCli
, dhallDeps
) where
import Control.Monad (filterM, (<=<))
import Control.Monad.IO.Class (liftIO)
import Data.Containers.ListUtils (nubOrd)
import Development.Shake (Action, Stdout (Stdout), command,
doesFileExist, need)
import Dhall.Dep
-- | 'need' some @.dhall@ files and imported dependencies
needDhall :: [FilePath] -> Action ()
needDhall fps =
need =<< liftIO (nubOrd . concat . (fps:) <$> traverse getAllFileDeps fps)
-- | Same as 'needDhall' but shells out to the command-line executable
--
-- @since 0.1.1.0
needDhallCli :: [FilePath] -> Action ()
needDhallCli =
need . concat <=< traverse dhallDeps
-- | Uses @dhall resolve --transitive-dependencies@ to work; command-line tool
-- must be installed.
--
-- @since 0.1.1.0
dhallDeps :: FilePath -> Action [FilePath]
dhallDeps inp = do
(Stdout out) <- command [] "dhall" ["resolve", "--transitive-dependencies", "--file", inp]
(inp:) <$> filterM doesFileExist (lines out)
| null | https://raw.githubusercontent.com/vmchale/shake-dhall/3fa3cf72c2fe77c7e3985cf01bcc4f3f53a39666/src/Development/Shake/Dhall.hs | haskell | | 'need' some @.dhall@ files and imported dependencies
| Same as 'needDhall' but shells out to the command-line executable
@since 0.1.1.0
| Uses @dhall resolve --transitive-dependencies@ to work; command-line tool
must be installed.
@since 0.1.1.0 | module Development.Shake.Dhall ( needDhall
, needDhallCli
, dhallDeps
) where
import Control.Monad (filterM, (<=<))
import Control.Monad.IO.Class (liftIO)
import Data.Containers.ListUtils (nubOrd)
import Development.Shake (Action, Stdout (Stdout), command,
doesFileExist, need)
import Dhall.Dep
needDhall :: [FilePath] -> Action ()
needDhall fps =
need =<< liftIO (nubOrd . concat . (fps:) <$> traverse getAllFileDeps fps)
needDhallCli :: [FilePath] -> Action ()
needDhallCli =
need . concat <=< traverse dhallDeps
dhallDeps :: FilePath -> Action [FilePath]
dhallDeps inp = do
(Stdout out) <- command [] "dhall" ["resolve", "--transitive-dependencies", "--file", inp]
(inp:) <$> filterM doesFileExist (lines out)
|
6a97343e47673bfa78a0d8d546eab9a3e67c58a05f5f2d5c04ee4d86369787a5 | wireapp/wire-server | User.hs | {-# LANGUAGE OverloadedStrings #-}
-- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Brig.Types.User
( ManagedByUpdate (..),
RichInfoUpdate (..),
PasswordResetPair,
HavePendingInvitations (..),
)
where
import Data.Aeson
import Imports
import Wire.API.User
import Wire.API.User.Password
import Wire.API.User.RichInfo
newtype ManagedByUpdate = ManagedByUpdate {mbuManagedBy :: ManagedBy} deriving (Eq, Show, Generic)
data HavePendingInvitations
= WithPendingInvitations
| NoPendingInvitations
deriving (Eq, Show, Generic)
newtype RichInfoUpdate = RichInfoUpdate {riuRichInfo :: RichInfoAssocList} deriving (Eq, Show, Generic)
instance FromJSON ManagedByUpdate where
parseJSON = withObject "managed-by-update" $ \o ->
ManagedByUpdate <$> o .: "managed_by"
instance ToJSON ManagedByUpdate where
toJSON m = object ["managed_by" .= mbuManagedBy m]
instance FromJSON RichInfoUpdate where
parseJSON = withObject "rich-info-update" $ \o ->
RichInfoUpdate <$> o .: "rich_info"
instance ToJSON RichInfoUpdate where
toJSON (RichInfoUpdate rif) = object ["rich_info" .= rif]
type PasswordResetPair = (PasswordResetKey, PasswordResetCode)
| null | https://raw.githubusercontent.com/wireapp/wire-server/2e1290d79e43685f5fecacd95b7170e3714ad848/libs/brig-types/src/Brig/Types/User.hs | haskell | # LANGUAGE OverloadedStrings #
This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. |
Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Brig.Types.User
( ManagedByUpdate (..),
RichInfoUpdate (..),
PasswordResetPair,
HavePendingInvitations (..),
)
where
import Data.Aeson
import Imports
import Wire.API.User
import Wire.API.User.Password
import Wire.API.User.RichInfo
newtype ManagedByUpdate = ManagedByUpdate {mbuManagedBy :: ManagedBy} deriving (Eq, Show, Generic)
data HavePendingInvitations
= WithPendingInvitations
| NoPendingInvitations
deriving (Eq, Show, Generic)
newtype RichInfoUpdate = RichInfoUpdate {riuRichInfo :: RichInfoAssocList} deriving (Eq, Show, Generic)
instance FromJSON ManagedByUpdate where
parseJSON = withObject "managed-by-update" $ \o ->
ManagedByUpdate <$> o .: "managed_by"
instance ToJSON ManagedByUpdate where
toJSON m = object ["managed_by" .= mbuManagedBy m]
instance FromJSON RichInfoUpdate where
parseJSON = withObject "rich-info-update" $ \o ->
RichInfoUpdate <$> o .: "rich_info"
instance ToJSON RichInfoUpdate where
toJSON (RichInfoUpdate rif) = object ["rich_info" .= rif]
type PasswordResetPair = (PasswordResetKey, PasswordResetCode)
|
eaef69646bcf05ba809d39e66264923501164db12e203134e4c9f9da9e1ea402 | mathematical-systems/clml | swank-loader.lisp | ;;;; -*- indent-tabs-mode: nil -*-
;;;
;;; swank-loader.lisp --- Compile and load the Slime backend.
;;;
Created 2003 , < >
;;;
;;; This code has been placed in the Public Domain. All warranties
;;; are disclaimed.
;;;
;; If you want customize the source- or fasl-directory you can set
;; swank-loader:*source-directory* resp. swank-loader:*fasl-directory*
;; before loading this files. (you also need to create the
;; swank-loader package.)
;; E.g.:
;;
;; (make-package :swank-loader)
( defparameter swank - loader::*fasl - directory * " /tmp / fasl/ " )
;; (load ".../swank-loader.lisp")
(cl:defpackage :swank-loader
(:use :cl)
(:export :init
:dump-image
:*source-directory*
:*fasl-directory*))
(cl:in-package :swank-loader)
(defvar *source-directory*
(make-pathname :name nil :type nil
:defaults (or *load-pathname* *default-pathname-defaults*))
"The directory where to look for the source.")
(defparameter *sysdep-files*
#+cmu '(swank-source-path-parser swank-source-file-cache swank-cmucl)
#+scl '(swank-source-path-parser swank-source-file-cache swank-scl)
#+sbcl '(swank-source-path-parser swank-source-file-cache
swank-sbcl swank-gray)
#+clozure '(metering swank-ccl swank-gray)
#+lispworks '(swank-lispworks swank-gray)
#+allegro '(swank-allegro swank-gray)
#+clisp '(xref metering swank-clisp swank-gray)
#+armedbear '(swank-abcl)
#+cormanlisp '(swank-corman swank-gray)
#+ecl '(swank-source-path-parser swank-source-file-cache swank-ecl swank-gray))
(defparameter *implementation-features*
'(:allegro :lispworks :sbcl :clozure :cmu :clisp :ccl :corman :cormanlisp
:armedbear :gcl :ecl :scl))
(defparameter *os-features*
'(:macosx :linux :windows :mswindows :win32 :solaris :darwin :sunos :hpux
:unix))
(defparameter *architecture-features*
'(:powerpc :ppc :x86 :x86-64 :amd64 :i686 :i586 :i486 :pc386 :iapx386
:sparc64 :sparc :hppa64 :hppa))
(defun lisp-version-string ()
#+(or clozure cmu) (substitute-if #\_ (lambda (x) (find x " /"))
(lisp-implementation-version))
#+(or cormanlisp scl sbcl ecl) (lisp-implementation-version)
#+lispworks (lisp-implementation-version)
#+allegro (format nil "~A~A~A~A"
excl::*common-lisp-version-number*
ANSI vs MoDeRn
(if (member :64bit *features*) "-64bit" "")
(excl:ics-target-case
(:-ics "")
(:+ics "-ics")))
#+clisp (let ((s (lisp-implementation-version)))
(subseq s 0 (position #\space s)))
#+armedbear (lisp-implementation-version))
(defun unique-dir-name ()
"Return a name that can be used as a directory name that is
unique to a Lisp implementation, Lisp implementation version,
operating system, and hardware architecture."
(flet ((first-of (features)
(loop for f in features
when (find f *features*) return it))
(maybe-warn (value fstring &rest args)
(cond (value)
(t (apply #'warn fstring args)
"unknown"))))
(let ((lisp (maybe-warn (first-of *implementation-features*)
"No implementation feature found in ~a."
*implementation-features*))
(os (maybe-warn (first-of *os-features*)
"No os feature found in ~a." *os-features*))
(arch (maybe-warn (first-of *architecture-features*)
"No architecture feature found in ~a."
*architecture-features*))
(version (maybe-warn (lisp-version-string)
"Don't know how to get Lisp ~
implementation version.")))
(format nil "~(~@{~a~^-~}~)" lisp version os arch))))
(defun file-newer-p (new-file old-file)
"Returns true if NEW-FILE is newer than OLD-FILE."
(> (file-write-date new-file) (file-write-date old-file)))
(defun slime-version-string ()
"Return a string identifying the SLIME version.
Return nil if nothing appropriate is available."
(with-open-file (s (merge-pathnames "ChangeLog" *source-directory*)
:if-does-not-exist nil)
(and s (symbol-name (read s)))))
(defun default-fasl-dir ()
(merge-pathnames
(make-pathname
:directory `(:relative ".slime" "fasl"
,@(if (slime-version-string) (list (slime-version-string)))
,(unique-dir-name)))
(user-homedir-pathname)))
(defun binary-pathname (src-pathname binary-dir)
"Return the pathname where SRC-PATHNAME's binary should be compiled."
(let ((cfp (compile-file-pathname src-pathname)))
(merge-pathnames (make-pathname :name (pathname-name cfp)
:type (pathname-type cfp))
binary-dir)))
(defun handle-loadtime-error (condition binary-pathname)
(pprint-logical-block (*error-output* () :per-line-prefix ";; ")
(format *error-output*
"~%Error while loading: ~A~%Condition: ~A~%Aborting.~%"
binary-pathname condition))
(when (equal (directory-namestring binary-pathname)
(directory-namestring (default-fasl-dir)))
(ignore-errors (delete-file binary-pathname)))
(abort))
(defun compile-files (files fasl-dir load)
"Compile each file in FILES if the source is newer than its
corresponding binary, or the file preceding it was recompiled.
If LOAD is true, load the fasl file."
(let ((needs-recompile nil))
(dolist (src files)
(let ((dest (binary-pathname src fasl-dir)))
(handler-case
(progn
(when (or needs-recompile
(not (probe-file dest))
(file-newer-p src dest))
;; need a to recompile src-pathname, so we'll
;; need to recompile everything after this too.
(setq needs-recompile t)
(ensure-directories-exist dest)
(compile-file src :output-file dest :print nil :verbose t))
(when load
(load dest :verbose t)))
;; Fail as early as possible
(serious-condition (c)
(handle-loadtime-error c dest)))))))
#+(or cormanlisp ecl)
(defun compile-files (files fasl-dir load)
"Corman Lisp and ECL have trouble with compiled files."
(declare (ignore fasl-dir))
(when load
(dolist (file files)
(load file :verbose t)
(force-output))))
(defun load-user-init-file ()
"Load the user init file, return NIL if it does not exist."
(load (merge-pathnames (user-homedir-pathname)
(make-pathname :name ".swank" :type "lisp"))
:if-does-not-exist nil))
(defun load-site-init-file (dir)
(load (make-pathname :name "site-init" :type "lisp"
:defaults dir)
:if-does-not-exist nil))
(defun src-files (names src-dir)
(mapcar (lambda (name)
(make-pathname :name (string-downcase name) :type "lisp"
:defaults src-dir))
names))
(defvar *swank-files* `(swank-backend ,@*sysdep-files* swank-match swank))
(defvar *contribs* '(swank-c-p-c swank-arglists swank-fuzzy
swank-fancy-inspector
swank-presentations swank-presentation-streams
#+(or asdf sbcl) swank-asdf
swank-package-fu
swank-sbcl-exts
)
"List of names for contrib modules.")
(defvar *fasl-directory* (default-fasl-dir)
"The directory where fasl files should be placed.")
(defun append-dir (absolute name)
(merge-pathnames
(make-pathname :directory `(:relative ,name) :defaults absolute)
absolute))
(defun contrib-dir (base-dir)
(append-dir base-dir "contrib"))
(defun q (s) (read-from-string s))
(defun load-swank (&key (src-dir *source-directory*)
(fasl-dir *fasl-directory*))
(compile-files (src-files *swank-files* src-dir) fasl-dir t)
(funcall (q "swank::before-init")
(slime-version-string)
(list (contrib-dir fasl-dir)
(contrib-dir src-dir))))
(defun compile-contribs (&key (src-dir (contrib-dir *source-directory*))
(fasl-dir (contrib-dir *fasl-directory*))
load)
(compile-files (src-files *contribs* src-dir) fasl-dir load))
(defun loadup ()
(load-swank)
(compile-contribs :load t))
(defun setup ()
(load-site-init-file *source-directory*)
(load-user-init-file)
(when (#-clisp probe-file
#+clisp ext:probe-directory
(contrib-dir *source-directory*))
(eval `(pushnew 'compile-contribs ,(q "swank::*after-init-hook*"))))
(funcall (q "swank::init")))
(defun init (&key delete reload load-contribs (setup t))
"Load SWANK and initialize some global variables.
If DELETE is true, delete any existing SWANK packages.
If RELOAD is true, reload SWANK, even if the SWANK package already exists.
If LOAD-CONTRIBS is true, load all contribs
If SETUP is true, load user init files and initialize some
global variabes in SWANK."
(when (and delete (find-package :swank))
(mapc #'delete-package '(:swank :swank-io-package :swank-backend)))
(cond ((or (not (find-package :swank)) reload)
(load-swank))
(t
(warn "Not reloading SWANK. Package already exists.")))
(when load-contribs
(compile-contribs :load t))
(when setup
(setup)))
(defun dump-image (filename)
(init :setup nil)
(funcall (q "swank-backend:save-image") filename))
| null | https://raw.githubusercontent.com/mathematical-systems/clml/918e41e67ee2a8102c55a84b4e6e85bbdde933f5/addons/slime/swank-loader.lisp | lisp | -*- indent-tabs-mode: nil -*-
swank-loader.lisp --- Compile and load the Slime backend.
This code has been placed in the Public Domain. All warranties
are disclaimed.
If you want customize the source- or fasl-directory you can set
swank-loader:*source-directory* resp. swank-loader:*fasl-directory*
before loading this files. (you also need to create the
swank-loader package.)
E.g.:
(make-package :swank-loader)
(load ".../swank-loader.lisp")
need a to recompile src-pathname, so we'll
need to recompile everything after this too.
Fail as early as possible | Created 2003 , < >
( defparameter swank - loader::*fasl - directory * " /tmp / fasl/ " )
(cl:defpackage :swank-loader
(:use :cl)
(:export :init
:dump-image
:*source-directory*
:*fasl-directory*))
(cl:in-package :swank-loader)
(defvar *source-directory*
(make-pathname :name nil :type nil
:defaults (or *load-pathname* *default-pathname-defaults*))
"The directory where to look for the source.")
(defparameter *sysdep-files*
#+cmu '(swank-source-path-parser swank-source-file-cache swank-cmucl)
#+scl '(swank-source-path-parser swank-source-file-cache swank-scl)
#+sbcl '(swank-source-path-parser swank-source-file-cache
swank-sbcl swank-gray)
#+clozure '(metering swank-ccl swank-gray)
#+lispworks '(swank-lispworks swank-gray)
#+allegro '(swank-allegro swank-gray)
#+clisp '(xref metering swank-clisp swank-gray)
#+armedbear '(swank-abcl)
#+cormanlisp '(swank-corman swank-gray)
#+ecl '(swank-source-path-parser swank-source-file-cache swank-ecl swank-gray))
(defparameter *implementation-features*
'(:allegro :lispworks :sbcl :clozure :cmu :clisp :ccl :corman :cormanlisp
:armedbear :gcl :ecl :scl))
(defparameter *os-features*
'(:macosx :linux :windows :mswindows :win32 :solaris :darwin :sunos :hpux
:unix))
(defparameter *architecture-features*
'(:powerpc :ppc :x86 :x86-64 :amd64 :i686 :i586 :i486 :pc386 :iapx386
:sparc64 :sparc :hppa64 :hppa))
(defun lisp-version-string ()
#+(or clozure cmu) (substitute-if #\_ (lambda (x) (find x " /"))
(lisp-implementation-version))
#+(or cormanlisp scl sbcl ecl) (lisp-implementation-version)
#+lispworks (lisp-implementation-version)
#+allegro (format nil "~A~A~A~A"
excl::*common-lisp-version-number*
ANSI vs MoDeRn
(if (member :64bit *features*) "-64bit" "")
(excl:ics-target-case
(:-ics "")
(:+ics "-ics")))
#+clisp (let ((s (lisp-implementation-version)))
(subseq s 0 (position #\space s)))
#+armedbear (lisp-implementation-version))
(defun unique-dir-name ()
"Return a name that can be used as a directory name that is
unique to a Lisp implementation, Lisp implementation version,
operating system, and hardware architecture."
(flet ((first-of (features)
(loop for f in features
when (find f *features*) return it))
(maybe-warn (value fstring &rest args)
(cond (value)
(t (apply #'warn fstring args)
"unknown"))))
(let ((lisp (maybe-warn (first-of *implementation-features*)
"No implementation feature found in ~a."
*implementation-features*))
(os (maybe-warn (first-of *os-features*)
"No os feature found in ~a." *os-features*))
(arch (maybe-warn (first-of *architecture-features*)
"No architecture feature found in ~a."
*architecture-features*))
(version (maybe-warn (lisp-version-string)
"Don't know how to get Lisp ~
implementation version.")))
(format nil "~(~@{~a~^-~}~)" lisp version os arch))))
(defun file-newer-p (new-file old-file)
"Returns true if NEW-FILE is newer than OLD-FILE."
(> (file-write-date new-file) (file-write-date old-file)))
(defun slime-version-string ()
"Return a string identifying the SLIME version.
Return nil if nothing appropriate is available."
(with-open-file (s (merge-pathnames "ChangeLog" *source-directory*)
:if-does-not-exist nil)
(and s (symbol-name (read s)))))
(defun default-fasl-dir ()
(merge-pathnames
(make-pathname
:directory `(:relative ".slime" "fasl"
,@(if (slime-version-string) (list (slime-version-string)))
,(unique-dir-name)))
(user-homedir-pathname)))
(defun binary-pathname (src-pathname binary-dir)
"Return the pathname where SRC-PATHNAME's binary should be compiled."
(let ((cfp (compile-file-pathname src-pathname)))
(merge-pathnames (make-pathname :name (pathname-name cfp)
:type (pathname-type cfp))
binary-dir)))
(defun handle-loadtime-error (condition binary-pathname)
(pprint-logical-block (*error-output* () :per-line-prefix ";; ")
(format *error-output*
"~%Error while loading: ~A~%Condition: ~A~%Aborting.~%"
binary-pathname condition))
(when (equal (directory-namestring binary-pathname)
(directory-namestring (default-fasl-dir)))
(ignore-errors (delete-file binary-pathname)))
(abort))
(defun compile-files (files fasl-dir load)
"Compile each file in FILES if the source is newer than its
corresponding binary, or the file preceding it was recompiled.
If LOAD is true, load the fasl file."
(let ((needs-recompile nil))
(dolist (src files)
(let ((dest (binary-pathname src fasl-dir)))
(handler-case
(progn
(when (or needs-recompile
(not (probe-file dest))
(file-newer-p src dest))
(setq needs-recompile t)
(ensure-directories-exist dest)
(compile-file src :output-file dest :print nil :verbose t))
(when load
(load dest :verbose t)))
(serious-condition (c)
(handle-loadtime-error c dest)))))))
#+(or cormanlisp ecl)
(defun compile-files (files fasl-dir load)
"Corman Lisp and ECL have trouble with compiled files."
(declare (ignore fasl-dir))
(when load
(dolist (file files)
(load file :verbose t)
(force-output))))
(defun load-user-init-file ()
"Load the user init file, return NIL if it does not exist."
(load (merge-pathnames (user-homedir-pathname)
(make-pathname :name ".swank" :type "lisp"))
:if-does-not-exist nil))
(defun load-site-init-file (dir)
(load (make-pathname :name "site-init" :type "lisp"
:defaults dir)
:if-does-not-exist nil))
(defun src-files (names src-dir)
(mapcar (lambda (name)
(make-pathname :name (string-downcase name) :type "lisp"
:defaults src-dir))
names))
(defvar *swank-files* `(swank-backend ,@*sysdep-files* swank-match swank))
(defvar *contribs* '(swank-c-p-c swank-arglists swank-fuzzy
swank-fancy-inspector
swank-presentations swank-presentation-streams
#+(or asdf sbcl) swank-asdf
swank-package-fu
swank-sbcl-exts
)
"List of names for contrib modules.")
(defvar *fasl-directory* (default-fasl-dir)
"The directory where fasl files should be placed.")
(defun append-dir (absolute name)
(merge-pathnames
(make-pathname :directory `(:relative ,name) :defaults absolute)
absolute))
(defun contrib-dir (base-dir)
(append-dir base-dir "contrib"))
(defun q (s) (read-from-string s))
(defun load-swank (&key (src-dir *source-directory*)
(fasl-dir *fasl-directory*))
(compile-files (src-files *swank-files* src-dir) fasl-dir t)
(funcall (q "swank::before-init")
(slime-version-string)
(list (contrib-dir fasl-dir)
(contrib-dir src-dir))))
(defun compile-contribs (&key (src-dir (contrib-dir *source-directory*))
(fasl-dir (contrib-dir *fasl-directory*))
load)
(compile-files (src-files *contribs* src-dir) fasl-dir load))
(defun loadup ()
(load-swank)
(compile-contribs :load t))
(defun setup ()
(load-site-init-file *source-directory*)
(load-user-init-file)
(when (#-clisp probe-file
#+clisp ext:probe-directory
(contrib-dir *source-directory*))
(eval `(pushnew 'compile-contribs ,(q "swank::*after-init-hook*"))))
(funcall (q "swank::init")))
(defun init (&key delete reload load-contribs (setup t))
"Load SWANK and initialize some global variables.
If DELETE is true, delete any existing SWANK packages.
If RELOAD is true, reload SWANK, even if the SWANK package already exists.
If LOAD-CONTRIBS is true, load all contribs
If SETUP is true, load user init files and initialize some
global variabes in SWANK."
(when (and delete (find-package :swank))
(mapc #'delete-package '(:swank :swank-io-package :swank-backend)))
(cond ((or (not (find-package :swank)) reload)
(load-swank))
(t
(warn "Not reloading SWANK. Package already exists.")))
(when load-contribs
(compile-contribs :load t))
(when setup
(setup)))
(defun dump-image (filename)
(init :setup nil)
(funcall (q "swank-backend:save-image") filename))
|
9a0bcdcb95f5a5d2b4041853b1167bdffcebbd38a1521a83663781a3dfa901f7 | rowangithub/DOrder | a_init.ml | let rec init (i:int) (n:int) (x:int) (a:int array) =
if (i >= n) then ()
else
(let _ = Array.set a i x
in init (i+1) n x a)
let main l =
let n = Array.length l in
init 0 n 1 l
let vec = [|0;0;0|]
let _ = main vec | null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/tests/array/a_init.ml | ocaml | let rec init (i:int) (n:int) (x:int) (a:int array) =
if (i >= n) then ()
else
(let _ = Array.set a i x
in init (i+1) n x a)
let main l =
let n = Array.length l in
init 0 n 1 l
let vec = [|0;0;0|]
let _ = main vec |
|
400dca31c93a6f3986be4f27b98fe221e9aba1ee4927de3dd92734b38a43a0b1 | rickardlindberg/brainfuck | Brainfuck.hs | module Brainfuck where
import qualified Data.Map as M
import Data.Maybe
import Data.Char
data Op = MLeft
| MRight
| Inc
| Dec
| In
| Out
deriving (Show, Eq)
type Program = [Op]
type Input = [Int]
type Output = [Int]
type Position = Int
type Tape = M.Map Int Int
data Machine = Machine
{ input :: Input
, position :: Position
, tape :: Tape
} deriving (Show, Eq)
parseOp :: Char -> Maybe Op
parseOp '<' = Just MLeft
parseOp '>' = Just MRight
parseOp '+' = Just Inc
parseOp '-' = Just Dec
parseOp '.' = Just Out
parseOp ',' = Just In
parseOp _ = Nothing
parseProgram :: [Char] -> [Op]
parseProgram program = mapMaybe parseOp program
withDefault :: a -> (a -> a) -> Maybe a -> Maybe a
withDefault def f (Just old) = Just (f old)
withDefault def f Nothing = Just (f def)
executeOp :: Op -> Machine -> (Machine, Maybe Int)
executeOp MLeft machine = (machine { position = position machine - 1 }, Nothing)
executeOp MRight machine = (machine { position = position machine + 1 }, Nothing)
executeOp Inc machine =
(machine { tape = M.alter (withDefault 0 (+1)) (position machine) (tape machine) },
Nothing)
executeOp Dec machine =
(machine { tape = M.alter (withDefault 0 (subtract 1)) (position machine) (tape machine) },
Nothing)
executeOp In machine@Machine { input=(x:xs), tape=tape, position=position } =
(machine { tape = M.insert position x tape,
input = xs },
Nothing)
executeOp Out machine@Machine { tape=tape, position=position } =
(machine, Just (M.findWithDefault 0 position tape))
initialMachine :: Input -> Machine
initialMachine input = Machine input 0 M.empty
executeProgram :: Machine -> Program -> Output
executeProgram _ [] = []
executeProgram machine (op:ops) =
let (newMachine,output) = executeOp op machine
in case output of
Just x -> x:(executeProgram newMachine ops)
Nothing -> executeProgram newMachine ops
execute' :: String -> String -> String
execute' program input =
map chr $ executeProgram (initialMachine (map ord input)) (parseProgram program)
execute :: String -> IO ()
execute program =
interact (execute' program) >> putStrLn "done!"
| null | https://raw.githubusercontent.com/rickardlindberg/brainfuck/fa4940f131adb3682b892f05bb5debef9576b27d/versions/raek_levsa/Brainfuck.hs | haskell | module Brainfuck where
import qualified Data.Map as M
import Data.Maybe
import Data.Char
data Op = MLeft
| MRight
| Inc
| Dec
| In
| Out
deriving (Show, Eq)
type Program = [Op]
type Input = [Int]
type Output = [Int]
type Position = Int
type Tape = M.Map Int Int
data Machine = Machine
{ input :: Input
, position :: Position
, tape :: Tape
} deriving (Show, Eq)
parseOp :: Char -> Maybe Op
parseOp '<' = Just MLeft
parseOp '>' = Just MRight
parseOp '+' = Just Inc
parseOp '-' = Just Dec
parseOp '.' = Just Out
parseOp ',' = Just In
parseOp _ = Nothing
parseProgram :: [Char] -> [Op]
parseProgram program = mapMaybe parseOp program
withDefault :: a -> (a -> a) -> Maybe a -> Maybe a
withDefault def f (Just old) = Just (f old)
withDefault def f Nothing = Just (f def)
executeOp :: Op -> Machine -> (Machine, Maybe Int)
executeOp MLeft machine = (machine { position = position machine - 1 }, Nothing)
executeOp MRight machine = (machine { position = position machine + 1 }, Nothing)
executeOp Inc machine =
(machine { tape = M.alter (withDefault 0 (+1)) (position machine) (tape machine) },
Nothing)
executeOp Dec machine =
(machine { tape = M.alter (withDefault 0 (subtract 1)) (position machine) (tape machine) },
Nothing)
executeOp In machine@Machine { input=(x:xs), tape=tape, position=position } =
(machine { tape = M.insert position x tape,
input = xs },
Nothing)
executeOp Out machine@Machine { tape=tape, position=position } =
(machine, Just (M.findWithDefault 0 position tape))
initialMachine :: Input -> Machine
initialMachine input = Machine input 0 M.empty
executeProgram :: Machine -> Program -> Output
executeProgram _ [] = []
executeProgram machine (op:ops) =
let (newMachine,output) = executeOp op machine
in case output of
Just x -> x:(executeProgram newMachine ops)
Nothing -> executeProgram newMachine ops
execute' :: String -> String -> String
execute' program input =
map chr $ executeProgram (initialMachine (map ord input)) (parseProgram program)
execute :: String -> IO ()
execute program =
interact (execute' program) >> putStrLn "done!"
|
|
4e0078934519509eb4256f56b0338073e88d69b093ba2fb5d0f8ed3b6739106e | rainbyte/frag | Command.hs | $ I d : Command.hs , v 1.2 2003/11/10 21:28:58 antony Exp $
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* I N V A D E R S *
* *
* Module : Command *
* Purpose : The Invader command type . *
* Author : *
* *
* Copyright ( c ) Yale University , 2003 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* I N V A D E R S *
* *
* Module: Command *
* Purpose: The Invader command type. *
* Author: Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module Command (
Command(..)
) where
data Command =
CmdQuit -- Quit Invaders.
| CmdNewGame -- Play game.
| CmdFreeze -- Freeze game.
| CmdResume -- Resume game.
-- | CmdUp -- Move Up.
-- | CmdDown -- Move Down.
-- | CmdLeft -- Move Left.
| CmdRight -- Move Right .
| null | https://raw.githubusercontent.com/rainbyte/frag/28893048f093f369c896932ff297150ef8ed2dd0/src/Command.hs | haskell | Quit Invaders.
Play game.
Freeze game.
Resume game.
| CmdUp -- Move Up.
| CmdDown -- Move Down.
| CmdLeft -- Move Left.
Move Right . | $ I d : Command.hs , v 1.2 2003/11/10 21:28:58 antony Exp $
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* I N V A D E R S *
* *
* Module : Command *
* Purpose : The Invader command type . *
* Author : *
* *
* Copyright ( c ) Yale University , 2003 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* I N V A D E R S *
* *
* Module: Command *
* Purpose: The Invader command type. *
* Author: Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module Command (
Command(..)
) where
data Command =
|
21bf5b59abaa89e64267a6a037715cbde0badb47e53de29f8ca39f86f2eed266 | slipstream/SlipStreamServer | authn_info_header_test.clj | (ns com.sixsq.slipstream.ssclj.middleware.authn-info-header-test
(:require
[clojure.test :refer :all]
[com.sixsq.slipstream.auth.cookies :as cookies]
[com.sixsq.slipstream.ssclj.middleware.authn-info-header :refer :all]
[ring.util.codec :as codec]))
(defn serialize-cookie-value
"replaces the map cookie value with a serialized string"
[{:keys [value] :as cookie}]
(assoc cookie :value (codec/form-encode value)))
(def session "session/2ba95fe4-7bf0-495d-9954-251d7417b3ce")
(def session-a "session/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa")
(def cookie-id (serialize-cookie-value (cookies/claims-cookie {:username "uname2"})))
(def cookie-id-roles (serialize-cookie-value
(cookies/claims-cookie {:username "uname2"
:roles "USER alpha-role"
:session session-a})))
(deftest check-is-session?
(are [expected s] (= expected (is-session? s))
nil nil
nil ""
nil "USER"
session session
session-a session-a))
(deftest check-extract-authn-info
(are [expected header] (= expected (extract-authn-info {:headers {authn-info-header header}}))
nil nil
nil ""
["uname" #{}] "uname"
["uname" #{}] " uname"
["uname" #{"r1"}] "uname r1"
["uname" #{"r1"}] " uname r1"
["uname" #{"r1"}] "uname r1 "
["uname" #{"r1" "r2"}] "uname r1 r2"))
(deftest check-extract-info
(are [expected request] (= expected (extract-info request))
nil {}
["uname" #{"r1"}] {:headers {authn-info-header "uname r1"}}
["uname2" #{"USER" "alpha-role" session-a}] {:cookies {authn-cookie cookie-id-roles}}
["uname" #{"r1"}] {:headers {authn-info-header "uname r1"}
:cookies {authn-cookie cookie-id-roles}}))
(deftest check-extract-header-claims
(are [expected header] (= expected (extract-header-claims {:headers {authn-info-header header}}))
nil nil
nil ""
{:username "uname"} "uname"
{:username "uname", :roles #{"r1"}} "uname r1"
{:username "uname", :roles #{"r1" "r2"}} "uname r1 r2"
{:username "uname", :roles #{"r1" "r2"}, :session session} (str "uname r1 r2 " session)))
(deftest check-identity-map
(let [anon-map {:current "ANON"
:authentications {"ANON" {:roles #{"ANON"}}}}]
(are [expected v] (= expected (create-identity-map v))
anon-map nil
anon-map [nil nil]
anon-map [nil []]
{:current "ANON"
:authentications {"ANON" {:roles #{"roles" "ANON"}}}}
[nil ["roles"]]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"ANON"}}}}
["uname" []]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "ANON"}}}}
["uname" ["r1"]]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
["uname" ["r1" "r2"]])))
(deftest check-handler
(let [handler (wrap-authn-info-header identity)
anon-map {:current "ANON"
:authentications {"ANON" {:roles #{"ANON"}}}}]
(are [expected request] (= expected (:identity (handler request)))
anon-map {}
anon-map {:headers {"header-1" "value"}}
anon-map {:headers {authn-info-header nil}}
anon-map {:headers {authn-info-header ""}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"ANON"}}}}
{:headers {authn-info-header "uname"}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "ANON"}}}}
{:headers {authn-info-header "uname r1"}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
{:headers {authn-info-header "uname r1 r2"}}
{:current "uname2"
:authentications {"uname2" {:identity "uname2"
:roles #{"ANON"}}}}
{:cookies {authn-cookie cookie-id}}
{:current "uname2"
:authentications {"uname2" {:identity "uname2"
:roles #{"USER" "alpha-role" session-a "ANON"}}}}
{:cookies {authn-cookie cookie-id-roles}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
{:headers {authn-info-header "uname r1 r2"}
:cookies {authn-cookie cookie-id-roles}})))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi/test/com/sixsq/slipstream/ssclj/middleware/authn_info_header_test.clj | clojure | (ns com.sixsq.slipstream.ssclj.middleware.authn-info-header-test
(:require
[clojure.test :refer :all]
[com.sixsq.slipstream.auth.cookies :as cookies]
[com.sixsq.slipstream.ssclj.middleware.authn-info-header :refer :all]
[ring.util.codec :as codec]))
(defn serialize-cookie-value
"replaces the map cookie value with a serialized string"
[{:keys [value] :as cookie}]
(assoc cookie :value (codec/form-encode value)))
(def session "session/2ba95fe4-7bf0-495d-9954-251d7417b3ce")
(def session-a "session/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa")
(def cookie-id (serialize-cookie-value (cookies/claims-cookie {:username "uname2"})))
(def cookie-id-roles (serialize-cookie-value
(cookies/claims-cookie {:username "uname2"
:roles "USER alpha-role"
:session session-a})))
(deftest check-is-session?
(are [expected s] (= expected (is-session? s))
nil nil
nil ""
nil "USER"
session session
session-a session-a))
(deftest check-extract-authn-info
(are [expected header] (= expected (extract-authn-info {:headers {authn-info-header header}}))
nil nil
nil ""
["uname" #{}] "uname"
["uname" #{}] " uname"
["uname" #{"r1"}] "uname r1"
["uname" #{"r1"}] " uname r1"
["uname" #{"r1"}] "uname r1 "
["uname" #{"r1" "r2"}] "uname r1 r2"))
(deftest check-extract-info
(are [expected request] (= expected (extract-info request))
nil {}
["uname" #{"r1"}] {:headers {authn-info-header "uname r1"}}
["uname2" #{"USER" "alpha-role" session-a}] {:cookies {authn-cookie cookie-id-roles}}
["uname" #{"r1"}] {:headers {authn-info-header "uname r1"}
:cookies {authn-cookie cookie-id-roles}}))
(deftest check-extract-header-claims
(are [expected header] (= expected (extract-header-claims {:headers {authn-info-header header}}))
nil nil
nil ""
{:username "uname"} "uname"
{:username "uname", :roles #{"r1"}} "uname r1"
{:username "uname", :roles #{"r1" "r2"}} "uname r1 r2"
{:username "uname", :roles #{"r1" "r2"}, :session session} (str "uname r1 r2 " session)))
(deftest check-identity-map
(let [anon-map {:current "ANON"
:authentications {"ANON" {:roles #{"ANON"}}}}]
(are [expected v] (= expected (create-identity-map v))
anon-map nil
anon-map [nil nil]
anon-map [nil []]
{:current "ANON"
:authentications {"ANON" {:roles #{"roles" "ANON"}}}}
[nil ["roles"]]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"ANON"}}}}
["uname" []]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "ANON"}}}}
["uname" ["r1"]]
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
["uname" ["r1" "r2"]])))
(deftest check-handler
(let [handler (wrap-authn-info-header identity)
anon-map {:current "ANON"
:authentications {"ANON" {:roles #{"ANON"}}}}]
(are [expected request] (= expected (:identity (handler request)))
anon-map {}
anon-map {:headers {"header-1" "value"}}
anon-map {:headers {authn-info-header nil}}
anon-map {:headers {authn-info-header ""}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"ANON"}}}}
{:headers {authn-info-header "uname"}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "ANON"}}}}
{:headers {authn-info-header "uname r1"}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
{:headers {authn-info-header "uname r1 r2"}}
{:current "uname2"
:authentications {"uname2" {:identity "uname2"
:roles #{"ANON"}}}}
{:cookies {authn-cookie cookie-id}}
{:current "uname2"
:authentications {"uname2" {:identity "uname2"
:roles #{"USER" "alpha-role" session-a "ANON"}}}}
{:cookies {authn-cookie cookie-id-roles}}
{:current "uname"
:authentications {"uname" {:identity "uname"
:roles #{"r1" "r2" "ANON"}}}}
{:headers {authn-info-header "uname r1 r2"}
:cookies {authn-cookie cookie-id-roles}})))
|
|
322cfa1c333a35fb159d6a2a432900b94c9e86e1583f35de21a7d2c357963178 | bos/rwh | actions2.hs | {-- snippet all --}
str2message :: String -> String
str2message input = "Data: " ++ input
str2action :: String -> IO ()
str2action = putStrLn . str2message
numbers :: [Int]
numbers = [1..10]
main = do str2action "Start of the program"
mapM_ (str2action . show) numbers
str2action "Done!"
{-- /snippet all --}
| null | https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch07/actions2.hs | haskell | - snippet all -
- /snippet all - | str2message :: String -> String
str2message input = "Data: " ++ input
str2action :: String -> IO ()
str2action = putStrLn . str2message
numbers :: [Int]
numbers = [1..10]
main = do str2action "Start of the program"
mapM_ (str2action . show) numbers
str2action "Done!"
|
8a700856e2ad59fef06fc04b9daa8c47b6c115f5095b3c5bd9f94db7800dc922 | reiddraper/sumo | client.clj | (ns sumo.test.client
(:require [sumo.client :as client]
[sumo.mr-helpers :as mr-helpers])
(:use midje.sweet ))
(def c (client/connect))
(fact "can ping the client"
(client/ping c) => true)
(fact "get of non-existant key returns empty result"
(client/get c "does-not-exist" "does-not-exist") => [])
(defn- put-then-get [obj]
(client/put c "test-bucket" "test-key" obj)
(client/get c "test-bucket" "test-key"))
(against-background [(before :facts (client/put c "test-bucket" "get-head"
{ :content-type "text/plain"
:value "get-head test"}))]
(fact "get-head"
(client/get c "test-bucket" "get-head" {:head true}) => (one-of (contains {:value ""})) ))
(fact "put-get-json"
(put-then-get {:content-type "application/json"
:value [1 "2" '(3)]}) => (one-of (contains {:value [1 "2" '(3)]})))
(fact "can save and retrieve, with JSON as the default"
(put-then-get {:value [1 "2" '(3)]}) => (one-of (contains {:value [1 "2" '(3)]})))
(fact "put-get-indexes"
(let [indexes {:a #{1 "binary"}
:b #{2}}]
(put-then-get {:content-type "application/json"
:value "Hello"
:indexes indexes}) => (one-of (contains {:indexes indexes}))))
(fact "summing the keys in an empty bucket through
map-reduce results in zero keys being summed"
(let [query {"inputs" "non-existent-bucket"
"query" [(mr-helpers/map-js "function(v) {return [1]}")
(mr-helpers/reduce-erlang "riak_kv_mapreduce" "reduce_sum")]}]
(client/map-reduce c query) => [0]))
| null | https://raw.githubusercontent.com/reiddraper/sumo/bd330f14483bfdc7ccb9dbdc3f60266b8e1d95f4/test/sumo/test/client.clj | clojure | (ns sumo.test.client
(:require [sumo.client :as client]
[sumo.mr-helpers :as mr-helpers])
(:use midje.sweet ))
(def c (client/connect))
(fact "can ping the client"
(client/ping c) => true)
(fact "get of non-existant key returns empty result"
(client/get c "does-not-exist" "does-not-exist") => [])
(defn- put-then-get [obj]
(client/put c "test-bucket" "test-key" obj)
(client/get c "test-bucket" "test-key"))
(against-background [(before :facts (client/put c "test-bucket" "get-head"
{ :content-type "text/plain"
:value "get-head test"}))]
(fact "get-head"
(client/get c "test-bucket" "get-head" {:head true}) => (one-of (contains {:value ""})) ))
(fact "put-get-json"
(put-then-get {:content-type "application/json"
:value [1 "2" '(3)]}) => (one-of (contains {:value [1 "2" '(3)]})))
(fact "can save and retrieve, with JSON as the default"
(put-then-get {:value [1 "2" '(3)]}) => (one-of (contains {:value [1 "2" '(3)]})))
(fact "put-get-indexes"
(let [indexes {:a #{1 "binary"}
:b #{2}}]
(put-then-get {:content-type "application/json"
:value "Hello"
:indexes indexes}) => (one-of (contains {:indexes indexes}))))
(fact "summing the keys in an empty bucket through
map-reduce results in zero keys being summed"
(let [query {"inputs" "non-existent-bucket"
"query" [(mr-helpers/map-js "function(v) {return [1]}")
(mr-helpers/reduce-erlang "riak_kv_mapreduce" "reduce_sum")]}]
(client/map-reduce c query) => [0]))
|
|
10e36411a9ca3c295f773541eb09532d6aeb04918b530031e27d7bb31daa4ee5 | pariyatti/kosa | db.clj | (ns kosa.mobile.today.stacked-inspiration.db
(:refer-clojure :exclude [list get])
(:require [kuti.record :as record]
[kuti.record.query :as query]
[kuti.storage.nested :refer [expand-all]]
[kuti.record.nested :as nested]))
(defn list []
(map expand-all (record/list :stacked-inspiration)))
(defn find-all [attr param]
(query/find-all :stacked-inspiration attr param))
(defn save! [e]
(-> e
(assoc :kuti/type :stacked-inspiration)
(nested/collapse-one :stacked-inspiration/image-attachment)
record/timestamp
record/publish
record/save!))
(defn get [id]
(expand-all (record/get id)))
| null | https://raw.githubusercontent.com/pariyatti/kosa/42bbbae367d3ee4e028bdb812c2def1181228c93/src/kosa/mobile/today/stacked_inspiration/db.clj | clojure | (ns kosa.mobile.today.stacked-inspiration.db
(:refer-clojure :exclude [list get])
(:require [kuti.record :as record]
[kuti.record.query :as query]
[kuti.storage.nested :refer [expand-all]]
[kuti.record.nested :as nested]))
(defn list []
(map expand-all (record/list :stacked-inspiration)))
(defn find-all [attr param]
(query/find-all :stacked-inspiration attr param))
(defn save! [e]
(-> e
(assoc :kuti/type :stacked-inspiration)
(nested/collapse-one :stacked-inspiration/image-attachment)
record/timestamp
record/publish
record/save!))
(defn get [id]
(expand-all (record/get id)))
|
|
01a2e51d87a23d1ec1207c995dd9a452c7d2517bfd94eb9917c22d9be8570cce | pa-ba/compdata-param | SmartConstructors.hs | # LANGUAGE TemplateHaskell , CPP #
--------------------------------------------------------------------------------
-- |
Module : Data . Comp . . Multi . Derive . SmartConstructors
Copyright : ( c ) 2011 ,
-- License : BSD3
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC Extensions )
--
-- Automatically derive smart constructors for higher-order difunctors.
--
--------------------------------------------------------------------------------
module Data.Comp.Param.Multi.Derive.SmartConstructors
(
smartConstructors
) where
import Language.Haskell.TH hiding (Cxt)
import Data.Comp.Derive.Utils
import Data.Comp.Param.Multi.Sum
import Data.Comp.Param.Multi.Term
import Data.Comp.Param.Multi.HDifunctor
import Control.Arrow ((&&&))
import Control.Monad
| Derive smart constructors for a higher - order difunctor . The smart
constructors are similar to the ordinary constructors , but a
' inject . is automatically inserted .
constructors are similar to the ordinary constructors, but a
'inject . hdimap Var id' is automatically inserted. -}
smartConstructors :: Name -> Q [Dec]
smartConstructors fname = do
Just (DataInfo _cxt tname targs constrs _deriving) <- abstractNewtypeQ $ reify fname
let iVar = tyVarBndrName $ last targs
let cons = map (abstractConType &&& iTp iVar) constrs
liftM concat $ mapM (genSmartConstr (map tyVarBndrName targs) tname) cons
where iTp iVar (ForallC _ cxt constr) =
Check if the GADT phantom type is constrained
case [y | Just (x, y) <- map isEqualP cxt, x == VarT iVar] of
[] -> case constr of
#if __GLASGOW_HASKELL__ >= 800
GadtC _ _ (AppT _ tp) -> Just tp
#endif
_ -> Nothing
tp:_ -> Just tp
iTp _ _ = Nothing
genSmartConstr targs tname ((name, args), miTp) = do
let bname = nameBase name
genSmartConstr' targs tname (mkName $ 'i' : bname) name args miTp
genSmartConstr' targs tname sname name args miTp = do
varNs <- newNames args "x"
let pats = map varP varNs
vars = map varE varNs
val = foldl appE (conE name) vars
sig = genSig targs tname sname args miTp
function = [funD sname [clause pats (normalB [|inject (hdimap Var id $val)|]) []]]
sequence $ sig ++ function
isVar (VarT n) = [n]
isVar _ = []
genSig targs tname sname 0 miTp = (:[]) $ do
hvar <- newName "h"
fvar <- newName "f"
avar <- newName "a"
bvar <- newName "b"
ivar <- newName "i"
let targs' = init $ init $ init targs
vars = hvar:fvar:avar:bvar:maybe [ivar] isVar miTp++targs'
h = varT hvar
f = varT fvar
a = varT avar
b = varT bvar
i = varT ivar
ftype = foldl appT (conT tname) (map varT targs')
constr = (conT ''(:<:) `appT` ftype) `appT` f
typ = foldl appT (conT ''Cxt) [h, f, a, b,maybe i return miTp]
typeSig = forallT (map PlainTV vars) (sequence [constr]) typ
sigD sname typeSig
genSig _ _ _ _ _ = []
| null | https://raw.githubusercontent.com/pa-ba/compdata-param/5d6b0afa95a27fd3233f86e5efc6e6a6080f4236/src/Data/Comp/Param/Multi/Derive/SmartConstructors.hs | haskell | ------------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
Automatically derive smart constructors for higher-order difunctors.
------------------------------------------------------------------------------ | # LANGUAGE TemplateHaskell , CPP #
Module : Data . Comp . . Multi . Derive . SmartConstructors
Copyright : ( c ) 2011 ,
Maintainer : < >
Portability : non - portable ( GHC Extensions )
module Data.Comp.Param.Multi.Derive.SmartConstructors
(
smartConstructors
) where
import Language.Haskell.TH hiding (Cxt)
import Data.Comp.Derive.Utils
import Data.Comp.Param.Multi.Sum
import Data.Comp.Param.Multi.Term
import Data.Comp.Param.Multi.HDifunctor
import Control.Arrow ((&&&))
import Control.Monad
| Derive smart constructors for a higher - order difunctor . The smart
constructors are similar to the ordinary constructors , but a
' inject . is automatically inserted .
constructors are similar to the ordinary constructors, but a
'inject . hdimap Var id' is automatically inserted. -}
smartConstructors :: Name -> Q [Dec]
smartConstructors fname = do
Just (DataInfo _cxt tname targs constrs _deriving) <- abstractNewtypeQ $ reify fname
let iVar = tyVarBndrName $ last targs
let cons = map (abstractConType &&& iTp iVar) constrs
liftM concat $ mapM (genSmartConstr (map tyVarBndrName targs) tname) cons
where iTp iVar (ForallC _ cxt constr) =
Check if the GADT phantom type is constrained
case [y | Just (x, y) <- map isEqualP cxt, x == VarT iVar] of
[] -> case constr of
#if __GLASGOW_HASKELL__ >= 800
GadtC _ _ (AppT _ tp) -> Just tp
#endif
_ -> Nothing
tp:_ -> Just tp
iTp _ _ = Nothing
genSmartConstr targs tname ((name, args), miTp) = do
let bname = nameBase name
genSmartConstr' targs tname (mkName $ 'i' : bname) name args miTp
genSmartConstr' targs tname sname name args miTp = do
varNs <- newNames args "x"
let pats = map varP varNs
vars = map varE varNs
val = foldl appE (conE name) vars
sig = genSig targs tname sname args miTp
function = [funD sname [clause pats (normalB [|inject (hdimap Var id $val)|]) []]]
sequence $ sig ++ function
isVar (VarT n) = [n]
isVar _ = []
genSig targs tname sname 0 miTp = (:[]) $ do
hvar <- newName "h"
fvar <- newName "f"
avar <- newName "a"
bvar <- newName "b"
ivar <- newName "i"
let targs' = init $ init $ init targs
vars = hvar:fvar:avar:bvar:maybe [ivar] isVar miTp++targs'
h = varT hvar
f = varT fvar
a = varT avar
b = varT bvar
i = varT ivar
ftype = foldl appT (conT tname) (map varT targs')
constr = (conT ''(:<:) `appT` ftype) `appT` f
typ = foldl appT (conT ''Cxt) [h, f, a, b,maybe i return miTp]
typeSig = forallT (map PlainTV vars) (sequence [constr]) typ
sigD sname typeSig
genSig _ _ _ _ _ = []
|
701ccac331a32ab0585735a98ae808757ee117970092cdf477c0fc8eb6ca2ad8 | flavioc/cl-hurd | msg-server.lisp |
(in-package :mach)
(defcfun ("mach_msg_server_timeout" %mach-msg-server-timeout)
err
(demuxer :pointer)
(max-size msg-size)
(port-set port)
(options msg-option)
(timeout msg-timeout))
(defmacro msg-server-timeout (demuxer port-set &optional timeout max-size)
"Receive RPC request messages on port-set and pass them to function demuxer with a timeout."
(with-gensyms (callback-name timeout-val)
`(progn
(defcallback ,callback-name :boolean ((in :pointer) (out :pointer))
(funcall ,demuxer in out))
(let ((,timeout-val ,(if (null timeout) 0 timeout)))
(%mach-msg-server-timeout (callback ,callback-name)
,(if (null max-size) 0 max-size)
,port-set
(if (plusp ,timeout-val) '(:rcv-timeout) '())
,timeout-val)))))
(defcfun ("mach_msg_server" %mach-msg-server)
err
(demuxer :pointer)
(max-size msg-size)
(rcv-name port))
(defmacro msg-server (demuxer port-set &optional max-size)
"Receive RPC request messages on port-set and pass them to function demuxer."
(with-gensyms (callback-name)
`(progn
(defcallback ,callback-name :boolean ((in :pointer) (out :pointer))
(funcall ,demuxer in out))
(%mach-msg-server (callback ,callback-name)
,(if (null max-size)
0
max-size)
,port-set))))
| null | https://raw.githubusercontent.com/flavioc/cl-hurd/982232f47d1a0ff4df5fde2edad03b9df871470a/mach/msg-server.lisp | lisp |
(in-package :mach)
(defcfun ("mach_msg_server_timeout" %mach-msg-server-timeout)
err
(demuxer :pointer)
(max-size msg-size)
(port-set port)
(options msg-option)
(timeout msg-timeout))
(defmacro msg-server-timeout (demuxer port-set &optional timeout max-size)
"Receive RPC request messages on port-set and pass them to function demuxer with a timeout."
(with-gensyms (callback-name timeout-val)
`(progn
(defcallback ,callback-name :boolean ((in :pointer) (out :pointer))
(funcall ,demuxer in out))
(let ((,timeout-val ,(if (null timeout) 0 timeout)))
(%mach-msg-server-timeout (callback ,callback-name)
,(if (null max-size) 0 max-size)
,port-set
(if (plusp ,timeout-val) '(:rcv-timeout) '())
,timeout-val)))))
(defcfun ("mach_msg_server" %mach-msg-server)
err
(demuxer :pointer)
(max-size msg-size)
(rcv-name port))
(defmacro msg-server (demuxer port-set &optional max-size)
"Receive RPC request messages on port-set and pass them to function demuxer."
(with-gensyms (callback-name)
`(progn
(defcallback ,callback-name :boolean ((in :pointer) (out :pointer))
(funcall ,demuxer in out))
(%mach-msg-server (callback ,callback-name)
,(if (null max-size)
0
max-size)
,port-set))))
|
|
583f566fa165f8dd29b9a9c139bcb873ed955a541bff6fe5717d895dd841f017 | typelead/eta | GivenTypeSynonym.hs | # LANGUAGE TypeFamilies #
module Main where
data A a
type T a = A a
f :: (A a ~ T Int) => a -> Int
f x = x
main :: IO ()
main = return ()
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/tests/suite/typecheck/compile/GivenTypeSynonym.hs | haskell | # LANGUAGE TypeFamilies #
module Main where
data A a
type T a = A a
f :: (A a ~ T Int) => a -> Int
f x = x
main :: IO ()
main = return ()
|
|
6c349ef336a5732bea3c0e097cb8a6ab3e392d2cdf718e46f1fc81685fab80d4 | deadcode/Learning-CL--David-Touretzky | 8.29.lisp | (defun my-member (e x)
(cond ((null x) nil)
((equal e (first x)) x)
(t (my-member e (rest x)))))
(let ((test1 '(my-member 'c '(a b c d e f)))
(test2 '(my-member 'f '(a b c d e f)))
(test3 '(my-member 'g '(a b c d e f)))
(test4 '(my-member 'g '())))
(format t "~s = ~s~%" test1 (eval test1))
(format t "~s = ~s~%" test2 (eval test2))
(format t "~s = ~s~%" test3 (eval test3))
(format t "~s = ~s~%" test4 (eval test4)))
| null | https://raw.githubusercontent.com/deadcode/Learning-CL--David-Touretzky/b4557c33f58e382f765369971e6a4747c27ca692/Chapter%208/8.29.lisp | lisp | (defun my-member (e x)
(cond ((null x) nil)
((equal e (first x)) x)
(t (my-member e (rest x)))))
(let ((test1 '(my-member 'c '(a b c d e f)))
(test2 '(my-member 'f '(a b c d e f)))
(test3 '(my-member 'g '(a b c d e f)))
(test4 '(my-member 'g '())))
(format t "~s = ~s~%" test1 (eval test1))
(format t "~s = ~s~%" test2 (eval test2))
(format t "~s = ~s~%" test3 (eval test3))
(format t "~s = ~s~%" test4 (eval test4)))
|
|
a2c550311159e23b4b5244b172288f57f67b36fda3812ac0b2d52086ad834e59 | larcenists/larceny | cpstak.scm | CPSTAK -- A continuation - passing version of the TAK benchmark .
A good test of first class procedures and tail recursion .
(define (cpstak x y z)
(define (tak x y z k)
(if (not (< y x))
(k z)
(tak (- x 1)
y
z
(lambda (v1)
(tak (- y 1)
z
x
(lambda (v2)
(tak (- z 1)
x
y
(lambda (v3)
(tak v1 v2 v3 k)))))))))
(tak x y z (lambda (a) a)))
(define (main . args)
(run-benchmark
"cpstak"
cpstak-iters
(lambda () (cpstak 18 12 6))
(lambda (result) (equal? result 7))))
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/test/Stress/src/cpstak.scm | scheme | CPSTAK -- A continuation - passing version of the TAK benchmark .
A good test of first class procedures and tail recursion .
(define (cpstak x y z)
(define (tak x y z k)
(if (not (< y x))
(k z)
(tak (- x 1)
y
z
(lambda (v1)
(tak (- y 1)
z
x
(lambda (v2)
(tak (- z 1)
x
y
(lambda (v3)
(tak v1 v2 v3 k)))))))))
(tak x y z (lambda (a) a)))
(define (main . args)
(run-benchmark
"cpstak"
cpstak-iters
(lambda () (cpstak 18 12 6))
(lambda (result) (equal? result 7))))
|
|
40051250a2d34941505967f3abe953b85a22ef6287f68724c97c4df832b0f1fd | freckle/stackctl | Options.hs | module Stackctl.Options
( Options
, envParser
, optionsParser
) where
import Stackctl.Prelude
import Data.Semigroup.Generic
import qualified Env
import Options.Applicative
import Stackctl.ColorOption
import Stackctl.DirectoryOption
import Stackctl.FilterOption
import Stackctl.VerboseOption
data Options = Options
{ oDirectory :: Maybe DirectoryOption
, oFilter :: Maybe FilterOption
, oColor :: Maybe ColorOption
, oVerbose :: Verbosity
}
deriving stock Generic
deriving Semigroup via GenericSemigroupMonoid Options
directoryL :: Lens' Options (Maybe DirectoryOption)
directoryL = lens oDirectory $ \x y -> x { oDirectory = y }
filterL :: Lens' Options (Maybe FilterOption)
filterL = lens oFilter $ \x y -> x { oFilter = y }
instance HasDirectoryOption Options where
directoryOptionL = directoryL . maybeLens defaultDirectoryOption
instance HasFilterOption Options where
filterOptionL = filterL . maybeLens defaultFilterOption
instance HasColorOption Options where
colorOptionL = lens oColor $ \x y -> x { oColor = y }
instance HasVerboseOption Options where
verboseOptionL = lens oVerbose $ \x y -> x { oVerbose = y }
-- brittany-disable-next-binding
envParser :: Env.Parser Env.Error Options
envParser = Env.prefixed "STACKCTL_" $ Options
<$> optional envDirectoryOption
<*> optional (envFilterOption "specifications")
use
use LOG_LEVEL
-- brittany-disable-next-binding
optionsParser :: Parser Options
optionsParser = Options
<$> optional directoryOption
<*> optional (filterOption "specifications")
<*> optional colorOption
<*> verboseOption
| null | https://raw.githubusercontent.com/freckle/stackctl/b04e1790dc523cea39e07c868b4fa328f4e453cb/src/Stackctl/Options.hs | haskell | brittany-disable-next-binding
brittany-disable-next-binding | module Stackctl.Options
( Options
, envParser
, optionsParser
) where
import Stackctl.Prelude
import Data.Semigroup.Generic
import qualified Env
import Options.Applicative
import Stackctl.ColorOption
import Stackctl.DirectoryOption
import Stackctl.FilterOption
import Stackctl.VerboseOption
data Options = Options
{ oDirectory :: Maybe DirectoryOption
, oFilter :: Maybe FilterOption
, oColor :: Maybe ColorOption
, oVerbose :: Verbosity
}
deriving stock Generic
deriving Semigroup via GenericSemigroupMonoid Options
directoryL :: Lens' Options (Maybe DirectoryOption)
directoryL = lens oDirectory $ \x y -> x { oDirectory = y }
filterL :: Lens' Options (Maybe FilterOption)
filterL = lens oFilter $ \x y -> x { oFilter = y }
instance HasDirectoryOption Options where
directoryOptionL = directoryL . maybeLens defaultDirectoryOption
instance HasFilterOption Options where
filterOptionL = filterL . maybeLens defaultFilterOption
instance HasColorOption Options where
colorOptionL = lens oColor $ \x y -> x { oColor = y }
instance HasVerboseOption Options where
verboseOptionL = lens oVerbose $ \x y -> x { oVerbose = y }
envParser :: Env.Parser Env.Error Options
envParser = Env.prefixed "STACKCTL_" $ Options
<$> optional envDirectoryOption
<*> optional (envFilterOption "specifications")
use
use LOG_LEVEL
optionsParser :: Parser Options
optionsParser = Options
<$> optional directoryOption
<*> optional (filterOption "specifications")
<*> optional colorOption
<*> verboseOption
|
224b8818826822037085b3a7ec0c8a14bd603f3e48150cd9fa6684f14635a3fb | mrkgnao/pebble | Simplify.hs | module Simplify where
import Data.List
import Data.Maybe
import Expr
import qualified Functions as F
-- | Cleans up nonsense like X :^ X :* (X :* ((Const 1.0 :/ X) :* Const 1.0) :+
-- | Const 1.0 :* Apply "log" X) into (hopefully) nicer expressions like
-- | X :^ X ((Const 1.0) :+ Apply "log" X),
simplify :: Expr -> Expr
simplify (Const a :+ Const b) = Const (a + b)
simplify (a :+ Const 0) = simplify a
simplify (Const 0 :+ a) = simplify a
simplify (Const a :* Const b) = Const (a * b)
simplify (a :* Const 1) = simplify a
simplify (Const 1 :* a) = simplify a
simplify (a :* Const 0) = Const 0
simplify (Const 0 :* a) = Const 0
simplify (Const a :^ Const b) = Const (a ** b)
simplify (a :^ Const 1) = simplify a
simplify (a :^ Const 0) = Const 1
simplify ((c :^ Const b) :^ Const a) =
c :^ (Const (a * b))
-- | Multiplication
-- m * (n * f) = (m * n) * f
simplify (Const a :* (Const b :* expr)) =
(Const $ a * b) :* (simplify expr)
-- mfn = mnf
simplify (Const a :* expr :* Const b) =
(Const $ a * b) :* (simplify expr)
-- fmn = mnf
simplify (expr :* Const a :* Const b) =
(Const $ a * b) :* (simplify expr)
-- m(f+g) = mf+mg
simplify (Const a :* (b :+ c)) =
(Const a :* (simplify b)) :+ (Const a :* (simplify c))
simplify (Const 0 :/ a) = Const 0
simplify (Const a :/ Const 0) =
error "Division by zero!"
simplify (Const a :/ Const b) = Const (a / b)
simplify (a :/ Const 1) = simplify a
simplify (a :/ b) | a == b = Const 1
simplify (a :* (Const b :/ c)) = Const b :* simplify (a :/ c)
-- | Trigonometric inverses
simplify (k@(Const _) :/ (Apply b e))
| isJust lk = k :* val (simplify e)
where lk = lookup b F.invsList
(Just val) = lk
simplify ((Apply f e1) :* (Apply g e2))
| e1 == e2 && isJust lk =
fg $ simplify e1
where lk = lookup (f,g) F.prodList
(Just fg) = lk
simplify ((Apply f e1) :/ (Apply g e2))
| e1 == e2 && isJust lk =
fg $ simplify e1
where lk = lookup (f,g) F.quotList
(Just fg) = lk
simplify ((Apply f x) :* (Apply g y))
| f == g && x == y = ((Apply f x) :^ (Const 2))
simplify (a :/ b) = (simplify a) :/ (simplify b)
simplify (a :^ b) = (simplify a) :^ (simplify b)
simplify (a :* b) = (simplify a) :* (simplify b)
simplify (a :+ b) = (simplify a) :+ (simplify b)
simplify x = x
fullSimplify expr =
fullSimplify' expr
(Const 0) -- placeholder
where fullSimplify' cur last
| cur == last = cur
| otherwise =
let cur' = simplify cur
in fullSimplify' cur' cur
| null | https://raw.githubusercontent.com/mrkgnao/pebble/b6f9e8220f76b1f07f419e6815e946328afb9244/Simplify.hs | haskell | | Cleans up nonsense like X :^ X :* (X :* ((Const 1.0 :/ X) :* Const 1.0) :+
| Const 1.0 :* Apply "log" X) into (hopefully) nicer expressions like
| X :^ X ((Const 1.0) :+ Apply "log" X),
| Multiplication
m * (n * f) = (m * n) * f
mfn = mnf
fmn = mnf
m(f+g) = mf+mg
| Trigonometric inverses
placeholder | module Simplify where
import Data.List
import Data.Maybe
import Expr
import qualified Functions as F
simplify :: Expr -> Expr
simplify (Const a :+ Const b) = Const (a + b)
simplify (a :+ Const 0) = simplify a
simplify (Const 0 :+ a) = simplify a
simplify (Const a :* Const b) = Const (a * b)
simplify (a :* Const 1) = simplify a
simplify (Const 1 :* a) = simplify a
simplify (a :* Const 0) = Const 0
simplify (Const 0 :* a) = Const 0
simplify (Const a :^ Const b) = Const (a ** b)
simplify (a :^ Const 1) = simplify a
simplify (a :^ Const 0) = Const 1
simplify ((c :^ Const b) :^ Const a) =
c :^ (Const (a * b))
simplify (Const a :* (Const b :* expr)) =
(Const $ a * b) :* (simplify expr)
simplify (Const a :* expr :* Const b) =
(Const $ a * b) :* (simplify expr)
simplify (expr :* Const a :* Const b) =
(Const $ a * b) :* (simplify expr)
simplify (Const a :* (b :+ c)) =
(Const a :* (simplify b)) :+ (Const a :* (simplify c))
simplify (Const 0 :/ a) = Const 0
simplify (Const a :/ Const 0) =
error "Division by zero!"
simplify (Const a :/ Const b) = Const (a / b)
simplify (a :/ Const 1) = simplify a
simplify (a :/ b) | a == b = Const 1
simplify (a :* (Const b :/ c)) = Const b :* simplify (a :/ c)
simplify (k@(Const _) :/ (Apply b e))
| isJust lk = k :* val (simplify e)
where lk = lookup b F.invsList
(Just val) = lk
simplify ((Apply f e1) :* (Apply g e2))
| e1 == e2 && isJust lk =
fg $ simplify e1
where lk = lookup (f,g) F.prodList
(Just fg) = lk
simplify ((Apply f e1) :/ (Apply g e2))
| e1 == e2 && isJust lk =
fg $ simplify e1
where lk = lookup (f,g) F.quotList
(Just fg) = lk
simplify ((Apply f x) :* (Apply g y))
| f == g && x == y = ((Apply f x) :^ (Const 2))
simplify (a :/ b) = (simplify a) :/ (simplify b)
simplify (a :^ b) = (simplify a) :^ (simplify b)
simplify (a :* b) = (simplify a) :* (simplify b)
simplify (a :+ b) = (simplify a) :+ (simplify b)
simplify x = x
fullSimplify expr =
fullSimplify' expr
where fullSimplify' cur last
| cur == last = cur
| otherwise =
let cur' = simplify cur
in fullSimplify' cur' cur
|
aa6748e8c9b78cce6ca3a8558c680678e52d87bdc1046520f77b1f7fa8833d27 | tek/ribosome | Main.hs | module Main where
import Polysemy.Test (unitTest)
import Ribosome.Menu.Test.FilterTest (test_filterFuzzy)
import Ribosome.Menu.Test.MenuTest (test_menu)
import Ribosome.Menu.Test.NvimMenuTest (test_nvimMenu)
import Test.Tasty (TestTree, defaultMain, testGroup)
tests :: TestTree
tests =
testGroup "menu" [
test_menu,
test_nvimMenu,
unitTest "fuzzy filter" test_filterFuzzy
]
main :: IO ()
main =
defaultMain tests
| null | https://raw.githubusercontent.com/tek/ribosome/ec3dd63ad47322e7fec66043dd7e6ade2f547ac1/packages/menu/test/Main.hs | haskell | module Main where
import Polysemy.Test (unitTest)
import Ribosome.Menu.Test.FilterTest (test_filterFuzzy)
import Ribosome.Menu.Test.MenuTest (test_menu)
import Ribosome.Menu.Test.NvimMenuTest (test_nvimMenu)
import Test.Tasty (TestTree, defaultMain, testGroup)
tests :: TestTree
tests =
testGroup "menu" [
test_menu,
test_nvimMenu,
unitTest "fuzzy filter" test_filterFuzzy
]
main :: IO ()
main =
defaultMain tests
|
|
8ffbf9ecd567c7023a384b906f840773a3bcb89c4a371c3f24dedeab8a129ea3 | shonfeder/um-abt | abt.ml | Copyright ( c ) 2021 Shon Feder
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. *)
module Log = Logs
module type Operator = sig
(** An operator *)
type 'a t [@@deriving sexp]
val map : ('a -> 'b) -> 'a t -> 'b t
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val to_string : string t -> string
end
module Var = struct
module Binding = struct
(* A private table of the number of times a name has been bound *)
let bnd_names : (string, int) Hashtbl.t = Hashtbl.create 100
let name_count n = Hashtbl.find_opt bnd_names n |> Option.value ~default:0
let add_name n =
let count = name_count n + 1 in
Hashtbl.add bnd_names n count;
count
open Sexplib.Std
type t = (string * int) ref [@@deriving sexp]
let v s = ref (s, add_name s)
(** Just the string component of the name *)
let name bnd = !bnd |> fst
(** Representation of name that includes the unique id *)
let name_debug bnd =
let n, c = !bnd in
n ^ Int.to_string c
let compare a b =
(* Physical equality of references *)
if a == b then
0
else
let a_name, a_count = !a in
let b_name, b_count = !b in
let name_cmp = String.compare a_name b_name in
if name_cmp = 0 then
Int.compare a_count b_count
else
name_cmp
let equal a b = Int.equal (compare a b) 0
end
module T = struct
open Sexplib.Std
type t =
| Free of string
| Bound of Binding.t
[@@deriving sexp]
let compare a b =
match (a, b) with
| Bound a, Bound b -> Binding.compare a b
| Free a, Free b -> String.compare a b
| Free _, Bound _ -> 1 (* Free vars are greater than bound vars *)
| Bound _, Free _ -> -1
end
module Set = Set.Make (T)
module Map = Map.Make (T)
include T
let equal a b = Int.equal (compare a b) 0
let is_free = function
| Free _ -> true
| _ -> false
let is_bound t = not (is_free t)
let name = function
| Free s -> s
| Bound b -> Binding.name b
let to_string = name
let to_string_debug = function
| Free s -> s
| Bound b -> Binding.name_debug b
let v s = Free s
let bind v b =
match v with
| Bound _ -> None
| Free name ->
if String.equal name (Binding.name b) then
Some (Bound b)
else
None
let of_binding b = Bound b
let to_binding = function
| Bound b -> Some b
| Free _ -> None
let is_bound_to v bnd =
match v with
| Free _ -> false
| Bound b -> b == bnd
end
module Operator_aux (O : Operator) = struct
(* Adds auxiliary functions over an operator module*)
(** [same o o'] is [true] if the operators are operators are the same without respect to their
arguments *)
let same : 'a O.t -> 'a O.t -> bool =
fun o o' ->
let to_unit = O.map (Fun.const ()) in
O.equal Unit.equal (to_unit o) (to_unit o')
TODO : Construct a lazy / incremental seq instead
let to_list : 'a O.t -> 'a List.t =
fun o -> O.fold (Fun.flip List.cons) [] o |> List.rev
* Derives a fold2 implementation from the required fold
let fold2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b O.t -> 'c O.t -> 'a =
fun f init o o' ->
let app (list_o', acc) o =
match list_o' with
| [] ->
raise
(Invalid_argument "Operator_aux.fold2 on operators of unequal size")
| o' :: res -> (res, f acc o o')
in
O.fold app (to_list o', init) o |> snd
include O
end
module Bndmap : sig
type t
val empty : t
val add : left:Var.Binding.t -> right:Var.Binding.t -> t -> t
type lookup = Var.Binding.t -> t -> Var.Binding.t option
[ find bnd m ] is the binding corresponding to [ bnd ] , regardless of which
side it was entered from
side it was entered from *)
(* val find : lookup *)
[ find_left bnd m ] is [ bnd ] if [ bnd ] was entered from the left , otherwise it
is the left - side binding corresponding to the one entered on the right
is the left-side binding corresponding to the one entered on the right *)
val find_left : lookup
[ find_left bnd m ] is [ bnd ] if [ bnd ] was entered from the right , otherwise it
is the left - side binding corresponding to the one entered on the left
is the left-side binding corresponding to the one entered on the left *)
val find_right : lookup
end = struct
module M = Map.Make (Var.Binding)
type t =
{ left : Var.Binding.t M.t
; right : Var.Binding.t M.t
}
let empty = { left = M.empty; right = M.empty }
let add ~left ~right m =
{ left = M.add left right m.left; right = M.add right left m.right }
type lookup = Var.Binding.t -> t -> Var.Binding.t option
Var . Binding.t are unique ( because identified by pointer location reference )
so we do n't need safety constraints on lookup etc .
so we don't need safety constraints on lookup etc. *)
(* let find k m =
* match M.find_opt k m.left with
* | None -> M.find_opt k m.right
* | Some v -> Some v *)
let find_left k m =
if M.mem k m.left then
Some k
else
M.find_opt k m.right
let find_right k m =
if M.mem k m.right then
Some k
else
M.find_opt k m.left
end
module type Syntax = sig
module Op : Operator
* The type of ABT 's constructed from the operators defind in [ O ]
type t = private
| Var of Var.t (** Variables *)
| Bnd of Var.Binding.t * t (** Scoped variable binding *)
| Opr of t Op.t (** Operators specified in {!Op} *)
[@@deriving sexp]
val bind : Var.Binding.t -> t -> t
* [ bind bnd t ] is a branch of the ABT , in which any free variables in [ t ]
matching the name of [ bnd ] are bound to [ bnd ] .
matching the name of [bnd] are bound to [bnd]. *)
val of_var : Var.t -> t
* [ of_var v ] is a leaf in the ABT consisting of the variable [ v ]
val v : string -> t
* [ v x ] is a leaf in the ABT consisting of a variable named [ x ]
val op : t Op.t -> t
* [ op o ] is a branch in the ABT consisting of the operator [ o ]
val ( #. ) : string -> t -> t
(** [x #. t] is a new abt obtained by binding all {i free} variables named
[x] in [t]
Note that this does {b not} substitute variables for a {i value}, (for
which, see {!subst}). This only binds the free variables within the scope
of an abstraction that ranges over the given (sub) abt [t]. *)
val subst : Var.Binding.t -> value:t -> t -> t
* [ subst bnd ~value t ] is a new ABT obtained by substituting [ value ] for
all variables bound to [ bnd ] .
all variables bound to [bnd]. *)
val subst_var : string -> value:t -> t -> t
* [ subst_var name ~value t ] is a new abt obtained by substituting [ value ] for
the outermost scope of variables bound to [ name ] in [ t ]
the outermost scope of variables bound to [name] in [t] *)
val to_sexp : t -> Sexplib.Sexp.t
(** [to_sexp t] is the representation of [t] as an s-expression *)
val of_sexp : Sexplib.Sexp.t -> t
(** [of_sexp s] is Abt represented by the s-expression [s] *)
val to_string : t -> string
(** [to_string t] is the representation of [t] as a string *)
val equal : t -> t -> bool
(** [equal t t'] is [true] when [t] and [t'] are alpha equivalent and [false] otherwise *)
val case :
var:(Var.t -> 'a)
-> bnd:(Var.Binding.t * t -> 'a)
-> opr:(t Op.t -> 'a)
-> t
-> 'a
* Case analysis for eleminating ABTs
This is an alternative to using pattern - based elimination .
@param var function to apply to variables
@param bnd function to apply to bindings
@param opr function to apply to operators
This is an alternative to using pattern-based elimination.
@param var function to apply to variables
@param bnd function to apply to bindings
@param opr function to apply to operators *)
val subterms : t -> t list
(** [subterms t] is a list of all the subterms in [t], including [t] itself *)
val free_vars : t -> Var.Set.t
(** [free_vars t] is the set of variables that are free in in [t] *)
val is_closed : t -> bool
(** [is_closed t] if [true] if there are no free variables in [t], otherwise false *)
module Unification : sig
module Subst : sig
type term = t
* An alias for the type of the ABT for reference in the context of the substitution
type t
(** Substitutions mapping free variables to terms *)
val find : Var.t -> t -> term option
(** [find v s] is [Some term] if [v] is bound to [term] in the
substitution [s], otherwise it is [None]*)
val bindings : t -> (Var.t * term) list
(** [bindings s] is a list of all the bindings in [s] *)
val to_string : t -> string
end
type error =
[ `Unification of Var.t option * t * t
| `Occurs of Var.t * t
| `Cycle of Subst.t
]
(** Errors returned when unification fails *)
val unify : t -> t -> (t * Subst.t, error) Result.t
(** [unify a b] is [Ok (union, substitution)] when [a] and [b] can be
unified into the term [union] and [substitution] is the most general
unifier. Otherwise it is [Error err)], for which, see {!type:error} *)
val ( =.= ) : t -> t -> (t, error) Result.t
* [ a = .= b ] is [ unify a b |
val ( =?= ) : t -> t -> bool
(** [a =?= b] is [true] iff [a =.= b] is an [Ok _] value *)
end
end
module Make (Op : Operator) = struct
module Op = Op
type t =
| Var of Var.t
| Bnd of Var.Binding.t * t
| Opr of t Op.t
[@@deriving sexp]
let to_sexp = sexp_of_t
let of_sexp = t_of_sexp
let rec to_string t =
t |> function
| Var v -> Var.to_string v
| Bnd (b, abt) -> Var.(name @@ of_binding b) ^ "." ^ to_string abt
| Opr op -> Op.map to_string op |> Op.to_string
Alpha - equivalence is derived by checking that the ABTs are identical
modulo the pointer structure of any bound variables .
- For operators , this just amounts to checking the equality supplied by the
given { ! : Operator } , [ O ] .
- For variable , we check that the pointer { i structure } is equivalent , and
do take no account of names , since alpha equivalence is fundamentally
concerned with the ( anonymous ) binding structure of ABTs .
modulo the pointer structure of any bound variables.
- For operators, this just amounts to checking the equality supplied by the
given {!modtype:Operator}, [O].
- For variable, we check that the pointer {i structure} is equivalent, and
do take no account of names, since alpha equivalence is fundamentally
concerned with the (anonymous) binding structure of ABTs. *)
let equal : t -> t -> bool =
let bindings_correlated bndmap bnd bnd' =
match Bndmap.find_right bnd bndmap with
| Some bnd'' -> Var.Binding.equal bnd' bnd''
| None -> false
in
let rec equal : Bndmap.t -> t -> t -> bool =
fun bndmap t t' ->
[%log debug "check ɑ-equality of %s %s" (to_string t) (to_string t')];
match (t, t') with
| Opr o, Opr o' -> Op.equal (equal bndmap) o o'
| Bnd (left, t), Bnd (right, t') ->
Associate corresponding bindings in the bindmap
equal (Bndmap.add ~left ~right bndmap) t t'
| Var (Bound bnd), Var (Bound bnd') -> bindings_correlated bndmap bnd bnd'
| Var v, Var v' -> Var.equal v v'
| _ -> false
in
fun a b -> equal Bndmap.empty a b
let of_var : Var.t -> t = fun v -> Var v
let bind : Var.Binding.t -> t -> t =
fun bnd t ->
let rec scope = function
| Opr op -> Opr (Op.map scope op)
| Bnd (b, t) -> Bnd (b, scope t)
| Var v ->
match Var.bind v bnd with
| None -> Var v
| Some v' -> Var v'
in
Bnd (bnd, scope t)
let ( #. ) : string -> t -> t =
fun name abt ->
let binding : Var.Binding.t = Var.Binding.v name in
bind binding abt
let rec subst : Var.Binding.t -> value:t -> t -> t =
fun bnd ~value -> function
| Opr op -> Opr (Op.map (subst bnd ~value) op)
| Bnd (b, t) ->
(* As an optimization, we don't go any deeper if the variable is shadowed.
* We could, safely, but there's no point. *)
if String.equal (Var.Binding.name b) (Var.Binding.name bnd) then
Bnd (b, t)
else
Bnd (b, subst bnd ~value t)
| Var v ->
if Var.is_bound_to v bnd then
value
else
Var v
let rec subst_var : string -> value:t -> t -> t =
fun name ~value -> function
| Var v -> Var v
| Opr op -> Opr (Op.map (subst_var name ~value) op)
| Bnd (b, t) ->
if Var.Binding.name b = name then
subst b ~value t
else
Bnd (b, subst_var name ~value t)
let op a = Opr a
let v : string -> t = fun s -> Var (Var.v s)
let rec subterms : t -> t list =
fun t ->
match t with
| Var _ -> [ t ]
| Bnd (_, t') -> t :: subterms t'
| Opr o -> t :: Op.fold (fun ts t' -> subterms t' @ ts) [] o
let case ~var ~bnd ~opr = function
| Var v -> var v
| Bnd (b, t) -> bnd (b, t)
| Opr o -> opr o
let is_free_var : t -> bool =
fun t ->
match t with
| Var (Free _) -> true
| _ -> false
let free_vars : t -> Var.Set.t =
fun t ->
let rec free fv = function
| Var (Free _ as v) -> Var.Set.add v fv
| Var (Bound _) -> fv
| Bnd (_, t') -> free fv t'
| Opr o -> Op.fold free fv o
in
free Var.Set.empty t
let is_closed : t -> bool = fun t -> Var.Set.is_empty (free_vars t)
module Unification = struct
Initial , naive approach :
* 1 . get all free vars of a and b
* 1 . build mgu and substitute for all vars
* 3 . then check for alpha - equiv
*
* Will take 3n complexity
*
* TODO To optimize : need to unify on a single pass , which will require way of identifying if two
* operators have the same head . Perhaps via an operator function ` sort : O.t - > ( string * int ) ` ?
* 1. get all free vars of a and b
* 1. build mgu and substitute for all vars
* 3. then check for alpha-equiv
*
* Will take 3n complexity
*
* TODO To optimize: need to unify on a single pass, which will require way of identifying if two
* operators have the same head. Perhaps via an operator function `sort : O.t -> (string * int)`? *)
let fail ?v t t' =
[%log debug "unification failure: %s <> %s " (to_string t) (to_string t')];
`Unification (v, t, t')
let occurs_err v t =
[%log debug "fail: %s ocurrs in %s" (Var.to_string v) (to_string t)];
`Occurs (v, t)
(* Error when a substitution is added for a variable already assigned to an incompatible value *)
module Subst = struct
type term = t
type t =
{ bnds : Bndmap.t (* Correspondences between bindings *)
; vars : term ref Var.Map.t
(* Substitution mappings from free vars to terms *)
}
Substitution maps free variables to mutable refs .
When two free variables are assigned to be aliases , they simply share the same ref .
Therefore , assigning one variable , sufficies to assign all of its aliases .
When two free variables are assigned to be aliases, they simply share the same ref.
Therefore, assigning one variable, sufficies to assign all of its aliases. *)
let empty : t = { bnds = Bndmap.empty; vars = Var.Map.empty }
TODO Work out coherent scheme for dealing with binder transitions !
let ( let* ) = Option.bind
(* Find is left-biased ito alpha equivalent variables *)
let find v ({ bnds; vars } : t) =
let* { contents = term } = Var.Map.find_opt v vars in
match term with
| Var (Bound bnd) ->
Bndmap.find_left bnd bnds
|> Option.map (fun b -> Var.of_binding b |> of_var)
| _ -> Some term
let bindings { vars; _ } =
Var.Map.bindings vars |> List.map (fun (v, t) -> (v, !t))
let term_to_string = to_string
let to_string s =
s
|> bindings
|> List.map (fun (v, term) ->
Printf.sprintf "%s -> %s" (Var.to_string v) (to_string term))
|> String.concat ", "
|> Printf.sprintf "[ %s ]"
let cycle_err s =
[%log debug "fail: cycle between variables %s" (to_string s)];
`Cycle s
let add s v term =
[%log
debug
"add substitution: %s -> %s"
(Var.to_string v)
(term_to_string term)];
if not (Var.is_free v) then
failwith "Invalid argument: Subst.add with non free var ";
(* TODO Remove exponential occurs check *)
if (not (is_free_var term)) && Var.Set.mem v (free_vars term) then
Error (occurs_err v term)
else
let vars = s.vars in
match term with
| Bnd (_, _)
| Opr _ -> (
Var.Map.find_opt v vars |> function
| None -> Ok { s with vars = Var.Map.add v (ref term) vars }
| Some ref_term when equal !ref_term term -> Ok s
| Some ref_var when is_free_var !ref_var ->
ref_var := term;
Ok s
| Some clash_term -> Error (fail ~v term !clash_term))
| Var v' ->
match (Var.Map.find_opt v vars, Var.Map.find_opt v' vars) with
| Some term_ref, None ->
Ok { s with vars = Var.Map.add v' term_ref vars }
| None, Some term_ref' ->
Ok { s with vars = Var.Map.add v term_ref' vars }
| Some term_ref, Some term_ref' ->
TODO Should this be a structural equality check ?
if term_ref == term_ref' then
Ok s
else
Error (fail ~v !term_ref !term_ref')
| None, None ->
let ref_var = ref (of_var v) in
Ok
{ s with
vars = Var.Map.add v ref_var vars |> Var.Map.add v' ref_var
}
let log_substitution s term =
[%log
debug
"applying substitution: %s %s"
(term_to_string term)
(to_string s)]
(* Find the corresponding binding for substitution of a *)
let lookup_binding lookup bnd s =
let ( let* ) = Option.bind in
let default = bnd |> Var.of_binding |> of_var in
Option.value ~default
@@ let* f = lookup in
let* bnd' = f bnd s.bnds in
Some (Var.of_binding bnd' |> of_var)
exception Cycle_in_apply of t
(* Effect the substitution of free variables in a term, according to the subtitution s
- unassigned free var -> free var
- assigned free var -> assigned value
- compound term -> substitute into each of it's compounds
- bound var -> bound var
When [lookup] is provided, it tells us how to find binding
correlates for the apprpriate side of a unification *)
let apply : ?lookup:Bndmap.lookup -> t -> term -> term =
fun ?lookup s term ->
[%log debug "apply invoked for %s" (term_to_string term)];
let lookup = lookup_binding lookup in
(* cyc_vars are the vars we're already tring to substitute for
lets us detect cycles *)
let rec aux cyc_vars s term =
log_substitution s term;
match term with
| Bnd (b, t') -> Bnd (b, aux cyc_vars s t')
| Opr o -> Op.map (aux cyc_vars s) o |> op
| Var (Bound bnd) -> lookup bnd s
| Var (Free _ as v) ->
match Var.Map.find_opt v s.vars with
| None -> term
| Some { contents = substitute } -> (
if Var.Set.mem v cyc_vars then raise (Cycle_in_apply s);
let cyc_vars = Var.Set.add v cyc_vars in
match substitute with
| Var (Bound bnd) -> lookup bnd s
| Var (Free _) -> substitute
| _ ->
(* TODO Shouldn't need to recurse down except to replace bindings for a side *)
aux cyc_vars s substitute)
in
aux Var.Set.empty s term
let ( let* ) = Result.bind
module Op = Operator_aux (Op)
(* Caution: Here be mutability! Never allow a mutable substitution to
escape the abstract type! *)
let build a b =
[%log
debug
"building substitution for %s %s"
(term_to_string a)
(term_to_string b)];
let rec aux s_res a b =
let* s = s_res in
match (a, b) with
| Opr ao, Opr bo when Op.same ao bo -> Op.fold2 aux (Ok s) ao bo
| Bnd (left, a'), Bnd (right, b') ->
(* Correlate the bindings *)
let s = { s with bnds = Bndmap.add ~left ~right s.bnds } in
aux (Ok s) a' b'
| Var (Free _ as v), _ -> add s v b
| _, Var (Free _ as v) -> add s v a
| Var (Bound _), Var (Bound _) ->
(* We can't decide anything about bound variables at this point, assume they are ok *)
Ok s
| _ -> Error (fail a b)
in
let* subst = aux (Ok empty) a b in
try
Var.Map.iter (fun _ cell -> cell := apply subst !cell) subst.vars;
[%log
debug
"substution for %s %s built: %s"
(term_to_string a)
(term_to_string b)
(to_string subst)];
Ok subst
with
| Cycle_in_apply s -> Error (cycle_err s)
end
let ( let* ) = Result.bind
type error =
[ `Unification of Var.t option * t * t
| `Occurs of Var.t * t
| `Cycle of Subst.t
]
let unify a b =
let result =
[%log debug "unification start: %s =.= %s" (to_string a) (to_string b)];
let* subst = Subst.build a b in
let a' = Subst.apply ~lookup:Bndmap.find_left subst a in
let b' = Subst.apply ~lookup:Bndmap.find_right subst b in
[%log
debug
"checking for alpha equivalence: %s = %s"
(to_string a')
(to_string b')];
if equal a' b' then
Ok (a', subst)
else
Error (fail a' b')
in
match result with
| Ok (u, _) ->
[%log
debug
"unification success: %s =.= %s => %s"
(to_string a)
(to_string b)
(to_string u)];
result
| Error _ ->
[%log
debug "unification failure: %s =/= %s" (to_string a) (to_string b)];
result
let ( =.= ) a b = unify a b |> Result.map fst
let ( =?= ) a b = unify a b |> Result.is_ok
end
end
| null | https://raw.githubusercontent.com/shonfeder/um-abt/2b3860b8f9217b04e7cb0645ede7726988c3735b/lib/abt.ml | ocaml | * An operator
A private table of the number of times a name has been bound
* Just the string component of the name
* Representation of name that includes the unique id
Physical equality of references
Free vars are greater than bound vars
Adds auxiliary functions over an operator module
* [same o o'] is [true] if the operators are operators are the same without respect to their
arguments
val find : lookup
let find k m =
* match M.find_opt k m.left with
* | None -> M.find_opt k m.right
* | Some v -> Some v
* Variables
* Scoped variable binding
* Operators specified in {!Op}
* [x #. t] is a new abt obtained by binding all {i free} variables named
[x] in [t]
Note that this does {b not} substitute variables for a {i value}, (for
which, see {!subst}). This only binds the free variables within the scope
of an abstraction that ranges over the given (sub) abt [t].
* [to_sexp t] is the representation of [t] as an s-expression
* [of_sexp s] is Abt represented by the s-expression [s]
* [to_string t] is the representation of [t] as a string
* [equal t t'] is [true] when [t] and [t'] are alpha equivalent and [false] otherwise
* [subterms t] is a list of all the subterms in [t], including [t] itself
* [free_vars t] is the set of variables that are free in in [t]
* [is_closed t] if [true] if there are no free variables in [t], otherwise false
* Substitutions mapping free variables to terms
* [find v s] is [Some term] if [v] is bound to [term] in the
substitution [s], otherwise it is [None]
* [bindings s] is a list of all the bindings in [s]
* Errors returned when unification fails
* [unify a b] is [Ok (union, substitution)] when [a] and [b] can be
unified into the term [union] and [substitution] is the most general
unifier. Otherwise it is [Error err)], for which, see {!type:error}
* [a =?= b] is [true] iff [a =.= b] is an [Ok _] value
As an optimization, we don't go any deeper if the variable is shadowed.
* We could, safely, but there's no point.
Error when a substitution is added for a variable already assigned to an incompatible value
Correspondences between bindings
Substitution mappings from free vars to terms
Find is left-biased ito alpha equivalent variables
TODO Remove exponential occurs check
Find the corresponding binding for substitution of a
Effect the substitution of free variables in a term, according to the subtitution s
- unassigned free var -> free var
- assigned free var -> assigned value
- compound term -> substitute into each of it's compounds
- bound var -> bound var
When [lookup] is provided, it tells us how to find binding
correlates for the apprpriate side of a unification
cyc_vars are the vars we're already tring to substitute for
lets us detect cycles
TODO Shouldn't need to recurse down except to replace bindings for a side
Caution: Here be mutability! Never allow a mutable substitution to
escape the abstract type!
Correlate the bindings
We can't decide anything about bound variables at this point, assume they are ok | Copyright ( c ) 2021 Shon Feder
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. *)
module Log = Logs
module type Operator = sig
type 'a t [@@deriving sexp]
val map : ('a -> 'b) -> 'a t -> 'b t
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val to_string : string t -> string
end
module Var = struct
module Binding = struct
let bnd_names : (string, int) Hashtbl.t = Hashtbl.create 100
let name_count n = Hashtbl.find_opt bnd_names n |> Option.value ~default:0
let add_name n =
let count = name_count n + 1 in
Hashtbl.add bnd_names n count;
count
open Sexplib.Std
type t = (string * int) ref [@@deriving sexp]
let v s = ref (s, add_name s)
let name bnd = !bnd |> fst
let name_debug bnd =
let n, c = !bnd in
n ^ Int.to_string c
let compare a b =
if a == b then
0
else
let a_name, a_count = !a in
let b_name, b_count = !b in
let name_cmp = String.compare a_name b_name in
if name_cmp = 0 then
Int.compare a_count b_count
else
name_cmp
let equal a b = Int.equal (compare a b) 0
end
module T = struct
open Sexplib.Std
type t =
| Free of string
| Bound of Binding.t
[@@deriving sexp]
let compare a b =
match (a, b) with
| Bound a, Bound b -> Binding.compare a b
| Free a, Free b -> String.compare a b
| Bound _, Free _ -> -1
end
module Set = Set.Make (T)
module Map = Map.Make (T)
include T
let equal a b = Int.equal (compare a b) 0
let is_free = function
| Free _ -> true
| _ -> false
let is_bound t = not (is_free t)
let name = function
| Free s -> s
| Bound b -> Binding.name b
let to_string = name
let to_string_debug = function
| Free s -> s
| Bound b -> Binding.name_debug b
let v s = Free s
let bind v b =
match v with
| Bound _ -> None
| Free name ->
if String.equal name (Binding.name b) then
Some (Bound b)
else
None
let of_binding b = Bound b
let to_binding = function
| Bound b -> Some b
| Free _ -> None
let is_bound_to v bnd =
match v with
| Free _ -> false
| Bound b -> b == bnd
end
module Operator_aux (O : Operator) = struct
let same : 'a O.t -> 'a O.t -> bool =
fun o o' ->
let to_unit = O.map (Fun.const ()) in
O.equal Unit.equal (to_unit o) (to_unit o')
TODO : Construct a lazy / incremental seq instead
let to_list : 'a O.t -> 'a List.t =
fun o -> O.fold (Fun.flip List.cons) [] o |> List.rev
* Derives a fold2 implementation from the required fold
let fold2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b O.t -> 'c O.t -> 'a =
fun f init o o' ->
let app (list_o', acc) o =
match list_o' with
| [] ->
raise
(Invalid_argument "Operator_aux.fold2 on operators of unequal size")
| o' :: res -> (res, f acc o o')
in
O.fold app (to_list o', init) o |> snd
include O
end
module Bndmap : sig
type t
val empty : t
val add : left:Var.Binding.t -> right:Var.Binding.t -> t -> t
type lookup = Var.Binding.t -> t -> Var.Binding.t option
[ find bnd m ] is the binding corresponding to [ bnd ] , regardless of which
side it was entered from
side it was entered from *)
[ find_left bnd m ] is [ bnd ] if [ bnd ] was entered from the left , otherwise it
is the left - side binding corresponding to the one entered on the right
is the left-side binding corresponding to the one entered on the right *)
val find_left : lookup
[ find_left bnd m ] is [ bnd ] if [ bnd ] was entered from the right , otherwise it
is the left - side binding corresponding to the one entered on the left
is the left-side binding corresponding to the one entered on the left *)
val find_right : lookup
end = struct
module M = Map.Make (Var.Binding)
type t =
{ left : Var.Binding.t M.t
; right : Var.Binding.t M.t
}
let empty = { left = M.empty; right = M.empty }
let add ~left ~right m =
{ left = M.add left right m.left; right = M.add right left m.right }
type lookup = Var.Binding.t -> t -> Var.Binding.t option
Var . Binding.t are unique ( because identified by pointer location reference )
so we do n't need safety constraints on lookup etc .
so we don't need safety constraints on lookup etc. *)
let find_left k m =
if M.mem k m.left then
Some k
else
M.find_opt k m.right
let find_right k m =
if M.mem k m.right then
Some k
else
M.find_opt k m.left
end
module type Syntax = sig
module Op : Operator
* The type of ABT 's constructed from the operators defind in [ O ]
type t = private
[@@deriving sexp]
val bind : Var.Binding.t -> t -> t
* [ bind bnd t ] is a branch of the ABT , in which any free variables in [ t ]
matching the name of [ bnd ] are bound to [ bnd ] .
matching the name of [bnd] are bound to [bnd]. *)
val of_var : Var.t -> t
* [ of_var v ] is a leaf in the ABT consisting of the variable [ v ]
val v : string -> t
* [ v x ] is a leaf in the ABT consisting of a variable named [ x ]
val op : t Op.t -> t
* [ op o ] is a branch in the ABT consisting of the operator [ o ]
val ( #. ) : string -> t -> t
val subst : Var.Binding.t -> value:t -> t -> t
* [ subst bnd ~value t ] is a new ABT obtained by substituting [ value ] for
all variables bound to [ bnd ] .
all variables bound to [bnd]. *)
val subst_var : string -> value:t -> t -> t
* [ subst_var name ~value t ] is a new abt obtained by substituting [ value ] for
the outermost scope of variables bound to [ name ] in [ t ]
the outermost scope of variables bound to [name] in [t] *)
val to_sexp : t -> Sexplib.Sexp.t
val of_sexp : Sexplib.Sexp.t -> t
val to_string : t -> string
val equal : t -> t -> bool
val case :
var:(Var.t -> 'a)
-> bnd:(Var.Binding.t * t -> 'a)
-> opr:(t Op.t -> 'a)
-> t
-> 'a
* Case analysis for eleminating ABTs
This is an alternative to using pattern - based elimination .
@param var function to apply to variables
@param bnd function to apply to bindings
@param opr function to apply to operators
This is an alternative to using pattern-based elimination.
@param var function to apply to variables
@param bnd function to apply to bindings
@param opr function to apply to operators *)
val subterms : t -> t list
val free_vars : t -> Var.Set.t
val is_closed : t -> bool
module Unification : sig
module Subst : sig
type term = t
* An alias for the type of the ABT for reference in the context of the substitution
type t
val find : Var.t -> t -> term option
val bindings : t -> (Var.t * term) list
val to_string : t -> string
end
type error =
[ `Unification of Var.t option * t * t
| `Occurs of Var.t * t
| `Cycle of Subst.t
]
val unify : t -> t -> (t * Subst.t, error) Result.t
val ( =.= ) : t -> t -> (t, error) Result.t
* [ a = .= b ] is [ unify a b |
val ( =?= ) : t -> t -> bool
end
end
module Make (Op : Operator) = struct
module Op = Op
type t =
| Var of Var.t
| Bnd of Var.Binding.t * t
| Opr of t Op.t
[@@deriving sexp]
let to_sexp = sexp_of_t
let of_sexp = t_of_sexp
let rec to_string t =
t |> function
| Var v -> Var.to_string v
| Bnd (b, abt) -> Var.(name @@ of_binding b) ^ "." ^ to_string abt
| Opr op -> Op.map to_string op |> Op.to_string
Alpha - equivalence is derived by checking that the ABTs are identical
modulo the pointer structure of any bound variables .
- For operators , this just amounts to checking the equality supplied by the
given { ! : Operator } , [ O ] .
- For variable , we check that the pointer { i structure } is equivalent , and
do take no account of names , since alpha equivalence is fundamentally
concerned with the ( anonymous ) binding structure of ABTs .
modulo the pointer structure of any bound variables.
- For operators, this just amounts to checking the equality supplied by the
given {!modtype:Operator}, [O].
- For variable, we check that the pointer {i structure} is equivalent, and
do take no account of names, since alpha equivalence is fundamentally
concerned with the (anonymous) binding structure of ABTs. *)
let equal : t -> t -> bool =
let bindings_correlated bndmap bnd bnd' =
match Bndmap.find_right bnd bndmap with
| Some bnd'' -> Var.Binding.equal bnd' bnd''
| None -> false
in
let rec equal : Bndmap.t -> t -> t -> bool =
fun bndmap t t' ->
[%log debug "check ɑ-equality of %s %s" (to_string t) (to_string t')];
match (t, t') with
| Opr o, Opr o' -> Op.equal (equal bndmap) o o'
| Bnd (left, t), Bnd (right, t') ->
Associate corresponding bindings in the bindmap
equal (Bndmap.add ~left ~right bndmap) t t'
| Var (Bound bnd), Var (Bound bnd') -> bindings_correlated bndmap bnd bnd'
| Var v, Var v' -> Var.equal v v'
| _ -> false
in
fun a b -> equal Bndmap.empty a b
let of_var : Var.t -> t = fun v -> Var v
let bind : Var.Binding.t -> t -> t =
fun bnd t ->
let rec scope = function
| Opr op -> Opr (Op.map scope op)
| Bnd (b, t) -> Bnd (b, scope t)
| Var v ->
match Var.bind v bnd with
| None -> Var v
| Some v' -> Var v'
in
Bnd (bnd, scope t)
let ( #. ) : string -> t -> t =
fun name abt ->
let binding : Var.Binding.t = Var.Binding.v name in
bind binding abt
let rec subst : Var.Binding.t -> value:t -> t -> t =
fun bnd ~value -> function
| Opr op -> Opr (Op.map (subst bnd ~value) op)
| Bnd (b, t) ->
if String.equal (Var.Binding.name b) (Var.Binding.name bnd) then
Bnd (b, t)
else
Bnd (b, subst bnd ~value t)
| Var v ->
if Var.is_bound_to v bnd then
value
else
Var v
let rec subst_var : string -> value:t -> t -> t =
fun name ~value -> function
| Var v -> Var v
| Opr op -> Opr (Op.map (subst_var name ~value) op)
| Bnd (b, t) ->
if Var.Binding.name b = name then
subst b ~value t
else
Bnd (b, subst_var name ~value t)
let op a = Opr a
let v : string -> t = fun s -> Var (Var.v s)
let rec subterms : t -> t list =
fun t ->
match t with
| Var _ -> [ t ]
| Bnd (_, t') -> t :: subterms t'
| Opr o -> t :: Op.fold (fun ts t' -> subterms t' @ ts) [] o
let case ~var ~bnd ~opr = function
| Var v -> var v
| Bnd (b, t) -> bnd (b, t)
| Opr o -> opr o
let is_free_var : t -> bool =
fun t ->
match t with
| Var (Free _) -> true
| _ -> false
let free_vars : t -> Var.Set.t =
fun t ->
let rec free fv = function
| Var (Free _ as v) -> Var.Set.add v fv
| Var (Bound _) -> fv
| Bnd (_, t') -> free fv t'
| Opr o -> Op.fold free fv o
in
free Var.Set.empty t
let is_closed : t -> bool = fun t -> Var.Set.is_empty (free_vars t)
module Unification = struct
Initial , naive approach :
* 1 . get all free vars of a and b
* 1 . build mgu and substitute for all vars
* 3 . then check for alpha - equiv
*
* Will take 3n complexity
*
* TODO To optimize : need to unify on a single pass , which will require way of identifying if two
* operators have the same head . Perhaps via an operator function ` sort : O.t - > ( string * int ) ` ?
* 1. get all free vars of a and b
* 1. build mgu and substitute for all vars
* 3. then check for alpha-equiv
*
* Will take 3n complexity
*
* TODO To optimize: need to unify on a single pass, which will require way of identifying if two
* operators have the same head. Perhaps via an operator function `sort : O.t -> (string * int)`? *)
let fail ?v t t' =
[%log debug "unification failure: %s <> %s " (to_string t) (to_string t')];
`Unification (v, t, t')
let occurs_err v t =
[%log debug "fail: %s ocurrs in %s" (Var.to_string v) (to_string t)];
`Occurs (v, t)
module Subst = struct
type term = t
type t =
; vars : term ref Var.Map.t
}
Substitution maps free variables to mutable refs .
When two free variables are assigned to be aliases , they simply share the same ref .
Therefore , assigning one variable , sufficies to assign all of its aliases .
When two free variables are assigned to be aliases, they simply share the same ref.
Therefore, assigning one variable, sufficies to assign all of its aliases. *)
let empty : t = { bnds = Bndmap.empty; vars = Var.Map.empty }
TODO Work out coherent scheme for dealing with binder transitions !
let ( let* ) = Option.bind
let find v ({ bnds; vars } : t) =
let* { contents = term } = Var.Map.find_opt v vars in
match term with
| Var (Bound bnd) ->
Bndmap.find_left bnd bnds
|> Option.map (fun b -> Var.of_binding b |> of_var)
| _ -> Some term
let bindings { vars; _ } =
Var.Map.bindings vars |> List.map (fun (v, t) -> (v, !t))
let term_to_string = to_string
let to_string s =
s
|> bindings
|> List.map (fun (v, term) ->
Printf.sprintf "%s -> %s" (Var.to_string v) (to_string term))
|> String.concat ", "
|> Printf.sprintf "[ %s ]"
let cycle_err s =
[%log debug "fail: cycle between variables %s" (to_string s)];
`Cycle s
let add s v term =
[%log
debug
"add substitution: %s -> %s"
(Var.to_string v)
(term_to_string term)];
if not (Var.is_free v) then
failwith "Invalid argument: Subst.add with non free var ";
if (not (is_free_var term)) && Var.Set.mem v (free_vars term) then
Error (occurs_err v term)
else
let vars = s.vars in
match term with
| Bnd (_, _)
| Opr _ -> (
Var.Map.find_opt v vars |> function
| None -> Ok { s with vars = Var.Map.add v (ref term) vars }
| Some ref_term when equal !ref_term term -> Ok s
| Some ref_var when is_free_var !ref_var ->
ref_var := term;
Ok s
| Some clash_term -> Error (fail ~v term !clash_term))
| Var v' ->
match (Var.Map.find_opt v vars, Var.Map.find_opt v' vars) with
| Some term_ref, None ->
Ok { s with vars = Var.Map.add v' term_ref vars }
| None, Some term_ref' ->
Ok { s with vars = Var.Map.add v term_ref' vars }
| Some term_ref, Some term_ref' ->
TODO Should this be a structural equality check ?
if term_ref == term_ref' then
Ok s
else
Error (fail ~v !term_ref !term_ref')
| None, None ->
let ref_var = ref (of_var v) in
Ok
{ s with
vars = Var.Map.add v ref_var vars |> Var.Map.add v' ref_var
}
let log_substitution s term =
[%log
debug
"applying substitution: %s %s"
(term_to_string term)
(to_string s)]
let lookup_binding lookup bnd s =
let ( let* ) = Option.bind in
let default = bnd |> Var.of_binding |> of_var in
Option.value ~default
@@ let* f = lookup in
let* bnd' = f bnd s.bnds in
Some (Var.of_binding bnd' |> of_var)
exception Cycle_in_apply of t
let apply : ?lookup:Bndmap.lookup -> t -> term -> term =
fun ?lookup s term ->
[%log debug "apply invoked for %s" (term_to_string term)];
let lookup = lookup_binding lookup in
let rec aux cyc_vars s term =
log_substitution s term;
match term with
| Bnd (b, t') -> Bnd (b, aux cyc_vars s t')
| Opr o -> Op.map (aux cyc_vars s) o |> op
| Var (Bound bnd) -> lookup bnd s
| Var (Free _ as v) ->
match Var.Map.find_opt v s.vars with
| None -> term
| Some { contents = substitute } -> (
if Var.Set.mem v cyc_vars then raise (Cycle_in_apply s);
let cyc_vars = Var.Set.add v cyc_vars in
match substitute with
| Var (Bound bnd) -> lookup bnd s
| Var (Free _) -> substitute
| _ ->
aux cyc_vars s substitute)
in
aux Var.Set.empty s term
let ( let* ) = Result.bind
module Op = Operator_aux (Op)
let build a b =
[%log
debug
"building substitution for %s %s"
(term_to_string a)
(term_to_string b)];
let rec aux s_res a b =
let* s = s_res in
match (a, b) with
| Opr ao, Opr bo when Op.same ao bo -> Op.fold2 aux (Ok s) ao bo
| Bnd (left, a'), Bnd (right, b') ->
let s = { s with bnds = Bndmap.add ~left ~right s.bnds } in
aux (Ok s) a' b'
| Var (Free _ as v), _ -> add s v b
| _, Var (Free _ as v) -> add s v a
| Var (Bound _), Var (Bound _) ->
Ok s
| _ -> Error (fail a b)
in
let* subst = aux (Ok empty) a b in
try
Var.Map.iter (fun _ cell -> cell := apply subst !cell) subst.vars;
[%log
debug
"substution for %s %s built: %s"
(term_to_string a)
(term_to_string b)
(to_string subst)];
Ok subst
with
| Cycle_in_apply s -> Error (cycle_err s)
end
let ( let* ) = Result.bind
type error =
[ `Unification of Var.t option * t * t
| `Occurs of Var.t * t
| `Cycle of Subst.t
]
let unify a b =
let result =
[%log debug "unification start: %s =.= %s" (to_string a) (to_string b)];
let* subst = Subst.build a b in
let a' = Subst.apply ~lookup:Bndmap.find_left subst a in
let b' = Subst.apply ~lookup:Bndmap.find_right subst b in
[%log
debug
"checking for alpha equivalence: %s = %s"
(to_string a')
(to_string b')];
if equal a' b' then
Ok (a', subst)
else
Error (fail a' b')
in
match result with
| Ok (u, _) ->
[%log
debug
"unification success: %s =.= %s => %s"
(to_string a)
(to_string b)
(to_string u)];
result
| Error _ ->
[%log
debug "unification failure: %s =/= %s" (to_string a) (to_string b)];
result
let ( =.= ) a b = unify a b |> Result.map fst
let ( =?= ) a b = unify a b |> Result.is_ok
end
end
|
07b4756710f1148df7a985ed5f875858860defa6490acf30d3391072e96d3320 | chenyukang/eopl | checker.scm | (module checker (lib "eopl.ss" "eopl")
(require "drscheme-init.scm")
(require "lang.scm")
(require "static-classes.scm")
(require "static-data-structures.scm")
(provide type-to-external-form type-of type-of-program)
;; type-of-program : Program -> Type
Page : 358
(define type-of-program
(lambda (pgm)
(cases program pgm
(a-program (class-decls exp1)
(initialize-static-class-env! class-decls)
(for-each check-class-decl! class-decls)
(type-of exp1 (init-tenv))))))
;; type-of : Exp -> Tenv
Page : 360 and 364
(define type-of
(lambda (exp tenv)
(cases expression exp
(const-exp (num) (int-type))
(var-exp (var) (apply-tenv tenv var))
(diff-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(sum-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(zero?-exp (exp1)
(let ((type1 (type-of exp1 tenv)))
(check-equal-type! type1 (int-type) exp1)
(bool-type)))
(if-exp (test-exp true-exp false-exp)
(let
((test-type (type-of test-exp tenv))
(true-type (type-of true-exp tenv))
(false-type (type-of false-exp tenv)))
;; these tests either succeed or raise an error
(check-equal-type! test-type (bool-type) test-exp)
(check-equal-type! true-type false-type exp)
true-type))
(let-exp (ids rands body)
(let ((new-tenv
(extend-tenv
ids
(types-of-exps rands tenv)
tenv)))
(type-of body new-tenv)))
(proc-exp (bvars bvar-types body)
(let ((result-type
(type-of body
(extend-tenv bvars bvar-types tenv))))
(proc-type bvar-types result-type)))
(call-exp (rator rands)
(let ((rator-type (type-of rator tenv))
(rand-types (types-of-exps rands tenv)))
(type-of-call rator-type rand-types rands exp)))
(letrec-exp (proc-result-types proc-names
bvarss bvar-typess proc-bodies
letrec-body)
(let ((tenv-for-letrec-body
(extend-tenv
proc-names
(map proc-type bvar-typess proc-result-types)
tenv)))
(for-each
(lambda (proc-result-type bvar-types bvars proc-body)
(let ((proc-body-type
(type-of proc-body
(extend-tenv
bvars
bvar-types
tenv-for-letrec-body)))) ;; !!
(check-equal-type!
proc-body-type proc-result-type proc-body)))
proc-result-types bvar-typess bvarss proc-bodies)
(type-of letrec-body tenv-for-letrec-body)))
(begin-exp (exp1 exps)
(letrec
((type-of-begins
(lambda (e1 es)
(let ((v1 (type-of e1 tenv)))
(if (null? es)
v1
(type-of-begins (car es) (cdr es)))))))
(type-of-begins exp1 exps)))
(assign-exp (id rhs)
(check-is-subtype!
(type-of rhs tenv)
(apply-tenv tenv id)
exp)
(void-type))
(list-exp (exp1 exps)
(let ((type-of-car (type-of exp1 tenv)))
(for-each
(lambda (exp)
(check-equal-type!
(type-of exp tenv)
type-of-car
exp))
exps)
(list-type type-of-car)))
;; object stuff begins here
(new-object-exp (class-name rands)
(let ((arg-types (types-of-exps rands tenv))
(c (lookup-static-class class-name)))
(cases static-class c
(an-interface (method-tenv)
(report-cant-instantiate-interface class-name))
(a-static-class (super-name i-names
field-names field-types method-tenv)
;; check the call to initialize
(type-of-call
(find-method-type
class-name
'initialize)
arg-types
rands
exp)
;; and return the class name as a type
(class-type class-name)))))
(self-exp ()
(apply-tenv tenv '%self))
(method-call-exp (obj-exp method-name rands)
(let ((arg-types (types-of-exps rands tenv))
(obj-type (type-of obj-exp tenv)))
(type-of-call
(find-method-type
(type->class-name obj-type)
method-name)
arg-types
rands
exp)))
(super-call-exp (method-name rands)
(let ((arg-types (types-of-exps rands tenv))
(obj-type (apply-tenv tenv '%self)))
(type-of-call
(find-method-type
(apply-tenv tenv '%super)
method-name)
arg-types
rands
exp)))
;; this matches interp.scm: interp.scm calls
;; object->class-name, which fails on a non-object, so we need
;; to make sure that obj-type is in fact a class type.
;; interp.scm calls is-subclass?, which never raises an error,
;; so we don't need to do anything with class-name here.
(cast-exp (exp class-name)
(let ((obj-type (type-of exp tenv)))
(if (class-type? obj-type)
(class-type class-name)
(report-bad-type-to-cast obj-type exp))))
;; instanceof in interp.scm behaves the same way as cast: it
;; calls object->class-name on its argument, so we need to
;; check that the argument is some kind of object, but we
;; don't need to look at class-name at all.
(instanceof-exp (exp class-name)
(let ((obj-type (type-of exp tenv)))
(if (class-type? obj-type)
(bool-type)
(report-bad-type-to-instanceof obj-type exp))))
)))
(define report-cant-instantiate-interface
(lambda (class-name)
(eopl:error 'type-of-new-obj-exp
"Can't instantiate interface ~s"
class-name)))
(define types-of-exps
(lambda (rands tenv)
(map (lambda (exp) (type-of exp tenv)) rands)))
;; type-of-call : Type * Listof(Type) * Listof(Exp) -> Type
Page : 360
(define type-of-call
(lambda (rator-type rand-types rands exp)
(cases type rator-type
(proc-type (arg-types result-type)
(if (not (= (length arg-types) (length rand-types)))
(report-wrong-number-of-arguments arg-types rand-types
exp))
(for-each check-is-subtype! rand-types arg-types rands)
result-type)
(else
(report-rator-not-of-proc-type
(type-to-external-form rator-type)
exp)))))
(define report-rator-not-of-proc-type
(lambda (external-form-rator-type exp)
(eopl:error 'type-of-call
"rator ~s is not of proc-type ~s"
exp external-form-rator-type)))
(define report-wrong-number-of-arguments
(lambda (arg-types rand-types exp)
(eopl:error 'type-of-call
"These are not the same: ~s and ~s in ~s"
(map type-to-external-form arg-types)
(map type-to-external-form rand-types)
exp)))
;; check-class-decl! : ClassDecl -> Unspecified
Page : 367
(define check-class-decl!
(lambda (c-decl)
(cases class-decl c-decl
(an-interface-decl (i-name abs-method-decls)
#t)
(a-class-decl (class-name super-name i-names
field-types field-names method-decls)
(let ((sc (lookup-static-class class-name)))
(for-each
(lambda (method-decl)
(check-method-decl! method-decl
class-name super-name
(static-class->field-names sc)
(static-class->field-types sc)))
method-decls))
(for-each
(lambda (i-name)
(check-if-implements! class-name i-name))
i-names)
))))
;; check-method-decl! :
;; MethodDecl * ClassName * ClassName * Listof(FieldName) * \Listof(Type)
;; -> Unspecified
Page : 368
(define check-method-decl!
(lambda (m-decl self-name s-name f-names f-types)
(cases method-decl m-decl
(a-method-decl (res-type m-name vars var-types body)
(let ((tenv
(extend-tenv
vars var-types
(extend-tenv-with-self-and-super
(class-type self-name)
s-name
(extend-tenv f-names f-types
(init-tenv))))))
(let ((body-type (type-of body tenv)))
(check-is-subtype! body-type res-type m-decl)
(if (eqv? m-name 'initialize) #t
(let ((maybe-super-type
(maybe-find-method-type
(static-class->method-tenv
(lookup-static-class s-name))
m-name)))
(if maybe-super-type
(check-is-subtype!
(proc-type var-types res-type)
maybe-super-type body)
#t)))))))))
;; check-if-implements! : ClassName * InterfaceName -> Bool
Page : 369
(define check-if-implements!
(lambda (c-name i-name)
(cases static-class (lookup-static-class i-name)
(a-static-class (s-name i-names f-names f-types
m-tenv)
(report-cant-implement-non-interface
c-name i-name))
(an-interface (method-tenv)
(let ((class-method-tenv
(static-class->method-tenv
(lookup-static-class c-name))))
(for-each
(lambda (method-binding)
(let ((m-name (car method-binding))
(m-type (cadr method-binding)))
(let ((c-method-type
(maybe-find-method-type
class-method-tenv
m-name)))
(if c-method-type
(check-is-subtype!
c-method-type m-type c-name)
(report-missing-method
c-name i-name m-name)))))
method-tenv))))))
(define report-cant-implement-non-interface
(lambda (c-name i-name)
(eopl:error 'check-if-implements
"class ~s claims to implement non-interface ~s"
c-name i-name)))
(define report-missing-method
(lambda (c-name i-name i-m-name)
(eopl:error 'check-if-implements
"class ~s claims to implement ~s, missing method ~s"
c-name i-name i-m-name)))
;;;;;;;;;;;;;;;; types ;;;;;;;;;;;;;;;;
(define check-equal-type!
(lambda (t1 t2 exp)
(if (equal? t1 t2)
#t
(eopl:error 'type-of
"Types didn't match: ~s != ~s in~%~s"
(type-to-external-form t1)
(type-to-external-form t2)
exp))))
;; check-is-subtype! : Type * Type * Exp -> Unspecified
Page : 363
(define check-is-subtype!
(lambda (ty1 ty2 exp)
(if (is-subtype? ty1 ty2)
#t
(report-subtype-failure
(type-to-external-form ty1)
(type-to-external-form ty2)
exp))))
(define report-subtype-failure
(lambda (external-form-ty1 external-form-ty2 exp)
(eopl:error 'check-is-subtype!
"~s is not a subtype of ~s in ~%~s"
external-form-ty1
external-form-ty2
exp)))
;; need this for typing cast expressions
;; is-subtype? : Type * Type -> Bool
Page : 363
(define is-subtype?
(lambda (ty1 ty2)
(cases type ty1
(class-type (name1)
(cases type ty2
(class-type (name2)
(statically-is-subclass? name1 name2))
(else #f)))
(proc-type (args1 res1)
(cases type ty2
(proc-type (args2 res2)
(and
(every2? is-subtype? args2 args1)
(is-subtype? res1 res2)))
(else #f)))
(else (equal? ty1 ty2)))))
(define andmap
(lambda (pred lst1 lst2)
(cond
((and (null? lst1) (null? lst2)) #t)
((or (null? lst1) (null? lst2)) #f) ; or maybe throw error
((pred (car lst1) (car lst2))
(andmap pred (cdr lst1) (cdr lst2)))
(else #f))))
(define every2? andmap)
;; statically-is-subclass? : ClassName * ClassName -> Bool
Page : 363
(define statically-is-subclass?
(lambda (name1 name2)
(or
(eqv? name1 name2)
(let ((super-name
(static-class->super-name
(lookup-static-class name1))))
(if super-name
(statically-is-subclass? super-name name2)
#f))
(let ((interface-names
(static-class->interface-names
(lookup-static-class name1))))
(memv name2 interface-names)))))
(define report-bad-type-to-cast
(lambda (type exp)
(eopl:error 'bad-type-to-case
"can't cast non-object; ~s had type ~s"
exp
(type-to-external-form type))))
(define report-bad-type-to-instanceof
(lambda (type exp)
(eopl:error 'bad-type-to-case
"can't apply instanceof to non-object; ~s had type ~s"
exp
(type-to-external-form type))))
) | null | https://raw.githubusercontent.com/chenyukang/eopl/0406ff23b993bfe020294fa70d2597b1ce4f9b78/base/chapter9/typed-oo/checker.scm | scheme | type-of-program : Program -> Type
type-of : Exp -> Tenv
these tests either succeed or raise an error
!!
object stuff begins here
check the call to initialize
and return the class name as a type
this matches interp.scm: interp.scm calls
object->class-name, which fails on a non-object, so we need
to make sure that obj-type is in fact a class type.
interp.scm calls is-subclass?, which never raises an error,
so we don't need to do anything with class-name here.
instanceof in interp.scm behaves the same way as cast: it
calls object->class-name on its argument, so we need to
check that the argument is some kind of object, but we
don't need to look at class-name at all.
type-of-call : Type * Listof(Type) * Listof(Exp) -> Type
check-class-decl! : ClassDecl -> Unspecified
check-method-decl! :
MethodDecl * ClassName * ClassName * Listof(FieldName) * \Listof(Type)
-> Unspecified
check-if-implements! : ClassName * InterfaceName -> Bool
types ;;;;;;;;;;;;;;;;
check-is-subtype! : Type * Type * Exp -> Unspecified
need this for typing cast expressions
is-subtype? : Type * Type -> Bool
or maybe throw error
statically-is-subclass? : ClassName * ClassName -> Bool | (module checker (lib "eopl.ss" "eopl")
(require "drscheme-init.scm")
(require "lang.scm")
(require "static-classes.scm")
(require "static-data-structures.scm")
(provide type-to-external-form type-of type-of-program)
Page : 358
(define type-of-program
(lambda (pgm)
(cases program pgm
(a-program (class-decls exp1)
(initialize-static-class-env! class-decls)
(for-each check-class-decl! class-decls)
(type-of exp1 (init-tenv))))))
Page : 360 and 364
(define type-of
(lambda (exp tenv)
(cases expression exp
(const-exp (num) (int-type))
(var-exp (var) (apply-tenv tenv var))
(diff-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(sum-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(zero?-exp (exp1)
(let ((type1 (type-of exp1 tenv)))
(check-equal-type! type1 (int-type) exp1)
(bool-type)))
(if-exp (test-exp true-exp false-exp)
(let
((test-type (type-of test-exp tenv))
(true-type (type-of true-exp tenv))
(false-type (type-of false-exp tenv)))
(check-equal-type! test-type (bool-type) test-exp)
(check-equal-type! true-type false-type exp)
true-type))
(let-exp (ids rands body)
(let ((new-tenv
(extend-tenv
ids
(types-of-exps rands tenv)
tenv)))
(type-of body new-tenv)))
(proc-exp (bvars bvar-types body)
(let ((result-type
(type-of body
(extend-tenv bvars bvar-types tenv))))
(proc-type bvar-types result-type)))
(call-exp (rator rands)
(let ((rator-type (type-of rator tenv))
(rand-types (types-of-exps rands tenv)))
(type-of-call rator-type rand-types rands exp)))
(letrec-exp (proc-result-types proc-names
bvarss bvar-typess proc-bodies
letrec-body)
(let ((tenv-for-letrec-body
(extend-tenv
proc-names
(map proc-type bvar-typess proc-result-types)
tenv)))
(for-each
(lambda (proc-result-type bvar-types bvars proc-body)
(let ((proc-body-type
(type-of proc-body
(extend-tenv
bvars
bvar-types
(check-equal-type!
proc-body-type proc-result-type proc-body)))
proc-result-types bvar-typess bvarss proc-bodies)
(type-of letrec-body tenv-for-letrec-body)))
(begin-exp (exp1 exps)
(letrec
((type-of-begins
(lambda (e1 es)
(let ((v1 (type-of e1 tenv)))
(if (null? es)
v1
(type-of-begins (car es) (cdr es)))))))
(type-of-begins exp1 exps)))
(assign-exp (id rhs)
(check-is-subtype!
(type-of rhs tenv)
(apply-tenv tenv id)
exp)
(void-type))
(list-exp (exp1 exps)
(let ((type-of-car (type-of exp1 tenv)))
(for-each
(lambda (exp)
(check-equal-type!
(type-of exp tenv)
type-of-car
exp))
exps)
(list-type type-of-car)))
(new-object-exp (class-name rands)
(let ((arg-types (types-of-exps rands tenv))
(c (lookup-static-class class-name)))
(cases static-class c
(an-interface (method-tenv)
(report-cant-instantiate-interface class-name))
(a-static-class (super-name i-names
field-names field-types method-tenv)
(type-of-call
(find-method-type
class-name
'initialize)
arg-types
rands
exp)
(class-type class-name)))))
(self-exp ()
(apply-tenv tenv '%self))
(method-call-exp (obj-exp method-name rands)
(let ((arg-types (types-of-exps rands tenv))
(obj-type (type-of obj-exp tenv)))
(type-of-call
(find-method-type
(type->class-name obj-type)
method-name)
arg-types
rands
exp)))
(super-call-exp (method-name rands)
(let ((arg-types (types-of-exps rands tenv))
(obj-type (apply-tenv tenv '%self)))
(type-of-call
(find-method-type
(apply-tenv tenv '%super)
method-name)
arg-types
rands
exp)))
(cast-exp (exp class-name)
(let ((obj-type (type-of exp tenv)))
(if (class-type? obj-type)
(class-type class-name)
(report-bad-type-to-cast obj-type exp))))
(instanceof-exp (exp class-name)
(let ((obj-type (type-of exp tenv)))
(if (class-type? obj-type)
(bool-type)
(report-bad-type-to-instanceof obj-type exp))))
)))
(define report-cant-instantiate-interface
(lambda (class-name)
(eopl:error 'type-of-new-obj-exp
"Can't instantiate interface ~s"
class-name)))
(define types-of-exps
(lambda (rands tenv)
(map (lambda (exp) (type-of exp tenv)) rands)))
Page : 360
(define type-of-call
(lambda (rator-type rand-types rands exp)
(cases type rator-type
(proc-type (arg-types result-type)
(if (not (= (length arg-types) (length rand-types)))
(report-wrong-number-of-arguments arg-types rand-types
exp))
(for-each check-is-subtype! rand-types arg-types rands)
result-type)
(else
(report-rator-not-of-proc-type
(type-to-external-form rator-type)
exp)))))
(define report-rator-not-of-proc-type
(lambda (external-form-rator-type exp)
(eopl:error 'type-of-call
"rator ~s is not of proc-type ~s"
exp external-form-rator-type)))
(define report-wrong-number-of-arguments
(lambda (arg-types rand-types exp)
(eopl:error 'type-of-call
"These are not the same: ~s and ~s in ~s"
(map type-to-external-form arg-types)
(map type-to-external-form rand-types)
exp)))
Page : 367
(define check-class-decl!
(lambda (c-decl)
(cases class-decl c-decl
(an-interface-decl (i-name abs-method-decls)
#t)
(a-class-decl (class-name super-name i-names
field-types field-names method-decls)
(let ((sc (lookup-static-class class-name)))
(for-each
(lambda (method-decl)
(check-method-decl! method-decl
class-name super-name
(static-class->field-names sc)
(static-class->field-types sc)))
method-decls))
(for-each
(lambda (i-name)
(check-if-implements! class-name i-name))
i-names)
))))
Page : 368
(define check-method-decl!
(lambda (m-decl self-name s-name f-names f-types)
(cases method-decl m-decl
(a-method-decl (res-type m-name vars var-types body)
(let ((tenv
(extend-tenv
vars var-types
(extend-tenv-with-self-and-super
(class-type self-name)
s-name
(extend-tenv f-names f-types
(init-tenv))))))
(let ((body-type (type-of body tenv)))
(check-is-subtype! body-type res-type m-decl)
(if (eqv? m-name 'initialize) #t
(let ((maybe-super-type
(maybe-find-method-type
(static-class->method-tenv
(lookup-static-class s-name))
m-name)))
(if maybe-super-type
(check-is-subtype!
(proc-type var-types res-type)
maybe-super-type body)
#t)))))))))
Page : 369
(define check-if-implements!
(lambda (c-name i-name)
(cases static-class (lookup-static-class i-name)
(a-static-class (s-name i-names f-names f-types
m-tenv)
(report-cant-implement-non-interface
c-name i-name))
(an-interface (method-tenv)
(let ((class-method-tenv
(static-class->method-tenv
(lookup-static-class c-name))))
(for-each
(lambda (method-binding)
(let ((m-name (car method-binding))
(m-type (cadr method-binding)))
(let ((c-method-type
(maybe-find-method-type
class-method-tenv
m-name)))
(if c-method-type
(check-is-subtype!
c-method-type m-type c-name)
(report-missing-method
c-name i-name m-name)))))
method-tenv))))))
(define report-cant-implement-non-interface
(lambda (c-name i-name)
(eopl:error 'check-if-implements
"class ~s claims to implement non-interface ~s"
c-name i-name)))
(define report-missing-method
(lambda (c-name i-name i-m-name)
(eopl:error 'check-if-implements
"class ~s claims to implement ~s, missing method ~s"
c-name i-name i-m-name)))
(define check-equal-type!
(lambda (t1 t2 exp)
(if (equal? t1 t2)
#t
(eopl:error 'type-of
"Types didn't match: ~s != ~s in~%~s"
(type-to-external-form t1)
(type-to-external-form t2)
exp))))
Page : 363
(define check-is-subtype!
(lambda (ty1 ty2 exp)
(if (is-subtype? ty1 ty2)
#t
(report-subtype-failure
(type-to-external-form ty1)
(type-to-external-form ty2)
exp))))
(define report-subtype-failure
(lambda (external-form-ty1 external-form-ty2 exp)
(eopl:error 'check-is-subtype!
"~s is not a subtype of ~s in ~%~s"
external-form-ty1
external-form-ty2
exp)))
Page : 363
(define is-subtype?
(lambda (ty1 ty2)
(cases type ty1
(class-type (name1)
(cases type ty2
(class-type (name2)
(statically-is-subclass? name1 name2))
(else #f)))
(proc-type (args1 res1)
(cases type ty2
(proc-type (args2 res2)
(and
(every2? is-subtype? args2 args1)
(is-subtype? res1 res2)))
(else #f)))
(else (equal? ty1 ty2)))))
(define andmap
(lambda (pred lst1 lst2)
(cond
((and (null? lst1) (null? lst2)) #t)
((pred (car lst1) (car lst2))
(andmap pred (cdr lst1) (cdr lst2)))
(else #f))))
(define every2? andmap)
Page : 363
(define statically-is-subclass?
(lambda (name1 name2)
(or
(eqv? name1 name2)
(let ((super-name
(static-class->super-name
(lookup-static-class name1))))
(if super-name
(statically-is-subclass? super-name name2)
#f))
(let ((interface-names
(static-class->interface-names
(lookup-static-class name1))))
(memv name2 interface-names)))))
(define report-bad-type-to-cast
(lambda (type exp)
(eopl:error 'bad-type-to-case
"can't cast non-object; ~s had type ~s"
exp
(type-to-external-form type))))
(define report-bad-type-to-instanceof
(lambda (type exp)
(eopl:error 'bad-type-to-case
"can't apply instanceof to non-object; ~s had type ~s"
exp
(type-to-external-form type))))
) |
3c24de386ca3838aa49acd19abd02f0788474706ed20ad41e9baee26b53ec4ce | waddlaw/TAPL | Parser.hs | module Language.SystemF.Parser (runSystemFParser) where
-- λs:Bool.λz:Bool.s (s z)
λf : Bool.(λx : Bool.f ( λy : . ( x x ) y ) ) ( λx : Bool . f ( λy : . ( x x ) y ) )
-- import qualified RIO.Map as Map
import Control.Monad.Trans.State
import Language.Core.Parser hiding (Parser, symbol)
import Language.SystemF.Types
import RIO hiding (try)
import qualified RIO.List.Partial as L.Partial
import Text.Parser.Token.Highlight
import Text.Trifecta
runSystemFParser :: Context -> String -> Either String Term
runSystemFParser ctx = runParserString (evalStateT exprP ctx)
exprP :: StateT Context Parser Term
exprP = do
ctx <- get
r1 <- lift $ evalStateT factorP ctx
r2 <- lift $ evalStateT termsP ctx
pure $ lefty r1 r2
where
-- lefty <$> evalStateT factorP env <*> evalStateT termsP env
lefty x xs = L.Partial.foldl1 TmApp (x : xs)
termsP = many (space *> factorP)
factorP :: StateT Context Parser Term
factorP = ( char ' ( ' * > ( exprP < * char ' ) ' ) ) < | > try < | > varP < | > lambdaP
factorP =
(char '(' *> (exprP <* char ')'))
<|> ifP
<|> lambdaP
<|> token constP
-- <|> varP
lambdaP :: StateT Context Parser Term
lambdaP =
TmLam <$ lift (symbol "λ")
<*> identP
<* lift (symbol ":")
<*> typeP
<* dot
<*> token exprP
typeP :: StateT Context Parser Ty
typeP = lefty <$> typeFactorP <*> termsP
where
lefty x xs = L.Partial.foldl1 TyArr (x : xs)
termsP = many (spaces *> string "->" *> spaces *> typeFactorP)
typeFactorP :: StateT Context Parser Ty
typeFactorP = (char '(' *> (typeP <* char ')')) <|> typeBoolP
typeBoolP :: StateT Context Parser Ty
typeBoolP = TyBool <$ string "Bool"
FIXME
: : Parser Term
= c . fromMaybe 0 . < $ char ' c '
-- <*> some digit
constP :: StateT Context Parser Term
constP =
TmTrue <$ string "true"
<|> TmFalse
<$ string "false"
ifP :: StateT Context Parser Term
ifP =
TmIf <$ symbol "if"
<*> (parens exprP <|> token exprP)
<* symbol "then"
<*> (parens exprP <|> token exprP)
<* symbol "else"
<*> (parens exprP <|> token exprP)
-- varP :: StateT Context Parser Term
-- varP = do
-- ctx <- get
var < - lift $ toTerm < $ > oneOf [ ' a ' .. ' z ' ] < * > many alphaNum
pure $ TmVar $ fromMaybe ( error $ Text.unpack var < > " is not found in Contexts " ) $ L.findIndex ( (= = var ) . fst ) $ unCtx
-- where
-- toTerm x xs = Text.pack (x : xs)
toTerm x xs = 0
( TmVar var ) var ( Map.fromList [ ] ) -- FIXME : prelude
identP :: StateT Context Parser VarName
identP = do
v <- lift $ ident defaultIdentStyle
modify ( addContext ( v , NameBind ) )
return (VarName v)
defaultIdentStyle :: IdentifierStyle Parser
defaultIdentStyle =
IdentifierStyle
{ _styleName = "SystemF",
_styleStart = oneOf ['a' .. 'z'],
_styleLetter = alphaNum,
_styleReserved = mempty,
_styleHighlight = Identifier,
_styleReservedHighlight = ReservedIdentifier
}
| null | https://raw.githubusercontent.com/waddlaw/TAPL/94576e46821aaf7abce6d1d828fc3ce6d05a40b8/subs/systemf/src/Language/SystemF/Parser.hs | haskell | λs:Bool.λz:Bool.s (s z)
import qualified RIO.Map as Map
lefty <$> evalStateT factorP env <*> evalStateT termsP env
<|> varP
<*> some digit
varP :: StateT Context Parser Term
varP = do
ctx <- get
where
toTerm x xs = Text.pack (x : xs)
FIXME : prelude | module Language.SystemF.Parser (runSystemFParser) where
λf : Bool.(λx : Bool.f ( λy : . ( x x ) y ) ) ( λx : Bool . f ( λy : . ( x x ) y ) )
import Control.Monad.Trans.State
import Language.Core.Parser hiding (Parser, symbol)
import Language.SystemF.Types
import RIO hiding (try)
import qualified RIO.List.Partial as L.Partial
import Text.Parser.Token.Highlight
import Text.Trifecta
runSystemFParser :: Context -> String -> Either String Term
runSystemFParser ctx = runParserString (evalStateT exprP ctx)
exprP :: StateT Context Parser Term
exprP = do
ctx <- get
r1 <- lift $ evalStateT factorP ctx
r2 <- lift $ evalStateT termsP ctx
pure $ lefty r1 r2
where
lefty x xs = L.Partial.foldl1 TmApp (x : xs)
termsP = many (space *> factorP)
factorP :: StateT Context Parser Term
factorP = ( char ' ( ' * > ( exprP < * char ' ) ' ) ) < | > try < | > varP < | > lambdaP
factorP =
(char '(' *> (exprP <* char ')'))
<|> ifP
<|> lambdaP
<|> token constP
lambdaP :: StateT Context Parser Term
lambdaP =
TmLam <$ lift (symbol "λ")
<*> identP
<* lift (symbol ":")
<*> typeP
<* dot
<*> token exprP
typeP :: StateT Context Parser Ty
typeP = lefty <$> typeFactorP <*> termsP
where
lefty x xs = L.Partial.foldl1 TyArr (x : xs)
termsP = many (spaces *> string "->" *> spaces *> typeFactorP)
typeFactorP :: StateT Context Parser Ty
typeFactorP = (char '(' *> (typeP <* char ')')) <|> typeBoolP
typeBoolP :: StateT Context Parser Ty
typeBoolP = TyBool <$ string "Bool"
FIXME
: : Parser Term
= c . fromMaybe 0 . < $ char ' c '
constP :: StateT Context Parser Term
constP =
TmTrue <$ string "true"
<|> TmFalse
<$ string "false"
ifP :: StateT Context Parser Term
ifP =
TmIf <$ symbol "if"
<*> (parens exprP <|> token exprP)
<* symbol "then"
<*> (parens exprP <|> token exprP)
<* symbol "else"
<*> (parens exprP <|> token exprP)
var < - lift $ toTerm < $ > oneOf [ ' a ' .. ' z ' ] < * > many alphaNum
pure $ TmVar $ fromMaybe ( error $ Text.unpack var < > " is not found in Contexts " ) $ L.findIndex ( (= = var ) . fst ) $ unCtx
toTerm x xs = 0
identP :: StateT Context Parser VarName
identP = do
v <- lift $ ident defaultIdentStyle
modify ( addContext ( v , NameBind ) )
return (VarName v)
defaultIdentStyle :: IdentifierStyle Parser
defaultIdentStyle =
IdentifierStyle
{ _styleName = "SystemF",
_styleStart = oneOf ['a' .. 'z'],
_styleLetter = alphaNum,
_styleReserved = mempty,
_styleHighlight = Identifier,
_styleReservedHighlight = ReservedIdentifier
}
|
4fbaf4d0335c54971b05d3a0835308bebbdb39fcbaf432c610c252d546b0446a | substratic/engine-for-gambit | node.test.scm | Copyright ( c ) 2020 by , All Rights Reserved .
Substratic Engine -
;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(import (_test)
(substratic engine node)
(substratic engine alist)
(substratic engine state)
(substratic engine events)
(substratic engine components component))
(define (test-component a b)
(make-component test
(a a)
(b b)
(updaters (add-method '(foo . foo)))))
(define (a-handler event state event-sink)
(case (event-type event)
((a/change)
(event-sink (make-event 'b/change))
(update-state state (component-a (> (a 2)))))))
(define (b-handler event state event-sink)
(case (event-type event)
((b/change)
(update-state state (component-b (> (b (lambda (b) (+ b 2)))))))))
(define (b-updater state time-step event-sink)
(event-sink (make-event 'a/change))
(update-state state (component-b (> (b 4)))))
(define (make-test-node)
(make-node 'test
(make-component component-a
(a 1)
(handlers (add-method '(a . a-handler))))
(make-component component-b
(b 2)
(updaters (add-method '(b . b-updater)))
(handlers (add-method '(b . b-handler))))))
(test-group "Node"
(let ((node (make-node 'thing (test-component 6 9))))
(test-group "retrieves id"
(test-equal 1 (node-id node)))
(test-group "retrieves type"
(test-equal 'thing (node-type node)))
(test-group "contains component state"
(test-equal 6 (state-ref (state-ref node 'test) 'a)))
(test-group "lists components"
(test-equal '(test) (map (lambda (c) (car c)) (node-components node)))))
(test-group "events circulate within a node's components during update"
(let ((event-sink (make-event-sink))
(test-node (make-test-node)))
(set! test-node (update-node test-node 0.5 event-sink))
(test-equal 2 (state-ref (state-ref test-node 'component-a) 'a))
(test-equal 6 (state-ref (state-ref test-node 'component-b) 'b)))))
| null | https://raw.githubusercontent.com/substratic/engine-for-gambit/b19fccfaa0e27ccec915597897eef24e8fcaa81e/node.test.scm | scheme | Copyright ( c ) 2020 by , All Rights Reserved .
Substratic Engine -
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(import (_test)
(substratic engine node)
(substratic engine alist)
(substratic engine state)
(substratic engine events)
(substratic engine components component))
(define (test-component a b)
(make-component test
(a a)
(b b)
(updaters (add-method '(foo . foo)))))
(define (a-handler event state event-sink)
(case (event-type event)
((a/change)
(event-sink (make-event 'b/change))
(update-state state (component-a (> (a 2)))))))
(define (b-handler event state event-sink)
(case (event-type event)
((b/change)
(update-state state (component-b (> (b (lambda (b) (+ b 2)))))))))
(define (b-updater state time-step event-sink)
(event-sink (make-event 'a/change))
(update-state state (component-b (> (b 4)))))
(define (make-test-node)
(make-node 'test
(make-component component-a
(a 1)
(handlers (add-method '(a . a-handler))))
(make-component component-b
(b 2)
(updaters (add-method '(b . b-updater)))
(handlers (add-method '(b . b-handler))))))
(test-group "Node"
(let ((node (make-node 'thing (test-component 6 9))))
(test-group "retrieves id"
(test-equal 1 (node-id node)))
(test-group "retrieves type"
(test-equal 'thing (node-type node)))
(test-group "contains component state"
(test-equal 6 (state-ref (state-ref node 'test) 'a)))
(test-group "lists components"
(test-equal '(test) (map (lambda (c) (car c)) (node-components node)))))
(test-group "events circulate within a node's components during update"
(let ((event-sink (make-event-sink))
(test-node (make-test-node)))
(set! test-node (update-node test-node 0.5 event-sink))
(test-equal 2 (state-ref (state-ref test-node 'component-a) 'a))
(test-equal 6 (state-ref (state-ref test-node 'component-b) 'b)))))
|
|
5a6bce86a343611b6dcc634a8040e218efd6d68e08863ac7b18e3010fbb56938 | ocaml-multicore/tezos | test_storage.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2020 Metastate AG < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
(** Testing
-------
Component: Context Storage
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test storage
Subject: Test the correctnesss of debug message from storage_functor
*)
open Protocol
open Storage_functors
open Storage_sigs
module Int32 = struct
type t = int32
let encoding = Data_encoding.int32
module Index = struct
type t = int
let path_length = 1
let to_path c l = string_of_int c :: l
let of_path = function
| [] | _ :: _ :: _ -> None
| [c] -> int_of_string_opt c
type 'a ipath = 'a * t
let args =
Storage_description.One
{
rpc_arg = Environment.RPC_arg.int;
encoding = Data_encoding.int31;
compare = Compare.Int.compare;
}
end
end
module Int64 = struct
type t = int64
let encoding = Data_encoding.int64
module Index = struct
type t = int64
let path_length = 1
let to_path c l = Int64.to_string c :: l
let of_path = function
| [] | _ :: _ :: _ -> None
| [c] -> Int64.of_string_opt c
type 'a ipath = 'a * t
let args =
Storage_description.One
{
rpc_arg = Environment.RPC_arg.int64;
encoding = Data_encoding.int64;
compare = Compare.Int64.compare;
}
end
end
let create_context name : (module Raw_context.T with type t = Raw_context.t) =
(module Make_subcontext (Registered) (Raw_context)
(struct
let name = [name]
end))
let create_subcontext name
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Raw_context.T with type t = Raw_context.t) =
(module Make_subcontext (Registered) (Context)
(struct
let name = [name]
end))
let create_single_data_storage name
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Single_data_storage with type t = Context.t and type value = Int32.t)
=
(module Make_single_data_storage (Registered) (Context)
(struct
let name = [name]
end)
(Int32))
let create_indexed_subcontext_int32
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Data_set_storage with type t = Raw_context.t) =
(module Make_data_set_storage (Context) (Int32.Index))
let create_indexed_subcontext_int64
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Data_set_storage with type t = Raw_context.t) =
(module Make_data_set_storage (Context) (Int64.Index))
let must_failwith f_prog error =
try
let _ = f_prog () in
Alcotest.fail "Unexpected successful result"
with exc ->
if exc = error then Lwt.return_unit
else Alcotest.fail "Unexpected error result"
(** Test:
This test check that creating value where value already exists
fails*)
let test_register_single_data () =
let f_prog () =
let context = create_context "context1" in
let _single_data = create_single_data_storage "single_data" context in
create_single_data_storage "single_data" context
in
let error =
Invalid_argument
"Could not register a value at [context1 / single_data] because of an \
existing Value."
in
must_failwith f_prog error
(** Test:
This test check that creating a subcontext where a value already exists
fails*)
let test_register_named_subcontext () =
let f_prog () =
let context = create_context "context2" in
let subcontext = create_subcontext "sub_context" context in
let _single_data = create_single_data_storage "error_register" subcontext in
let subcontext = create_subcontext "error_register" subcontext in
create_single_data_storage "single_data2" subcontext
in
let error =
Invalid_argument
"Could not register a named subcontext at [context2 / sub_context / \
error_register] because of an existing Value."
in
must_failwith f_prog error
(** Test:
This test check that creating a indexed subcontext where a value already
exists fails*)
let test_register_indexed_subcontext () =
let f_prog () =
let context = create_context "context3" in
let _ = create_single_data_storage "single_value" context in
create_indexed_subcontext_int32 context
in
let error =
Invalid_argument
"Could not register an indexed subcontext at [context3] because of an \
existing \n\
single_value Value."
in
must_failwith f_prog error
(** Test:
This test check that creating a indexed subcontext where an indexed
subcontext already exists fails*)
let test_register_indexed_subcontext_2 () =
let f_prog () =
let context = create_context "context4" in
let _ = create_indexed_subcontext_int32 context in
create_indexed_subcontext_int64 context
in
let error =
Invalid_argument
"An indexed subcontext at [context4] already exists but has a different \
argument: `int64` <> `int`."
in
must_failwith f_prog error
let tests =
[
Alcotest_lwt.test_case
"register single data in existing path"
`Quick
(fun _ -> test_register_single_data);
Alcotest_lwt.test_case
"register named subcontext in existing path"
`Quick
(fun _ -> test_register_named_subcontext);
Alcotest_lwt.test_case
"register indexed subcontext in existing path"
`Quick
(fun _ -> test_register_indexed_subcontext);
Alcotest_lwt.test_case
"register indexed subcontext with existing indexed subcontext"
`Quick
(fun _ -> test_register_indexed_subcontext_2);
]
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_012_Psithaca/lib_protocol/test/test_storage.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Testing
-------
Component: Context Storage
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test storage
Subject: Test the correctnesss of debug message from storage_functor
* Test:
This test check that creating value where value already exists
fails
* Test:
This test check that creating a subcontext where a value already exists
fails
* Test:
This test check that creating a indexed subcontext where a value already
exists fails
* Test:
This test check that creating a indexed subcontext where an indexed
subcontext already exists fails | Copyright ( c ) 2020 Metastate AG < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Storage_functors
open Storage_sigs
module Int32 = struct
type t = int32
let encoding = Data_encoding.int32
module Index = struct
type t = int
let path_length = 1
let to_path c l = string_of_int c :: l
let of_path = function
| [] | _ :: _ :: _ -> None
| [c] -> int_of_string_opt c
type 'a ipath = 'a * t
let args =
Storage_description.One
{
rpc_arg = Environment.RPC_arg.int;
encoding = Data_encoding.int31;
compare = Compare.Int.compare;
}
end
end
module Int64 = struct
type t = int64
let encoding = Data_encoding.int64
module Index = struct
type t = int64
let path_length = 1
let to_path c l = Int64.to_string c :: l
let of_path = function
| [] | _ :: _ :: _ -> None
| [c] -> Int64.of_string_opt c
type 'a ipath = 'a * t
let args =
Storage_description.One
{
rpc_arg = Environment.RPC_arg.int64;
encoding = Data_encoding.int64;
compare = Compare.Int64.compare;
}
end
end
let create_context name : (module Raw_context.T with type t = Raw_context.t) =
(module Make_subcontext (Registered) (Raw_context)
(struct
let name = [name]
end))
let create_subcontext name
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Raw_context.T with type t = Raw_context.t) =
(module Make_subcontext (Registered) (Context)
(struct
let name = [name]
end))
let create_single_data_storage name
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Single_data_storage with type t = Context.t and type value = Int32.t)
=
(module Make_single_data_storage (Registered) (Context)
(struct
let name = [name]
end)
(Int32))
let create_indexed_subcontext_int32
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Data_set_storage with type t = Raw_context.t) =
(module Make_data_set_storage (Context) (Int32.Index))
let create_indexed_subcontext_int64
(module Context : Raw_context.T with type t = Raw_context.t) :
(module Data_set_storage with type t = Raw_context.t) =
(module Make_data_set_storage (Context) (Int64.Index))
let must_failwith f_prog error =
try
let _ = f_prog () in
Alcotest.fail "Unexpected successful result"
with exc ->
if exc = error then Lwt.return_unit
else Alcotest.fail "Unexpected error result"
let test_register_single_data () =
let f_prog () =
let context = create_context "context1" in
let _single_data = create_single_data_storage "single_data" context in
create_single_data_storage "single_data" context
in
let error =
Invalid_argument
"Could not register a value at [context1 / single_data] because of an \
existing Value."
in
must_failwith f_prog error
let test_register_named_subcontext () =
let f_prog () =
let context = create_context "context2" in
let subcontext = create_subcontext "sub_context" context in
let _single_data = create_single_data_storage "error_register" subcontext in
let subcontext = create_subcontext "error_register" subcontext in
create_single_data_storage "single_data2" subcontext
in
let error =
Invalid_argument
"Could not register a named subcontext at [context2 / sub_context / \
error_register] because of an existing Value."
in
must_failwith f_prog error
let test_register_indexed_subcontext () =
let f_prog () =
let context = create_context "context3" in
let _ = create_single_data_storage "single_value" context in
create_indexed_subcontext_int32 context
in
let error =
Invalid_argument
"Could not register an indexed subcontext at [context3] because of an \
existing \n\
single_value Value."
in
must_failwith f_prog error
let test_register_indexed_subcontext_2 () =
let f_prog () =
let context = create_context "context4" in
let _ = create_indexed_subcontext_int32 context in
create_indexed_subcontext_int64 context
in
let error =
Invalid_argument
"An indexed subcontext at [context4] already exists but has a different \
argument: `int64` <> `int`."
in
must_failwith f_prog error
let tests =
[
Alcotest_lwt.test_case
"register single data in existing path"
`Quick
(fun _ -> test_register_single_data);
Alcotest_lwt.test_case
"register named subcontext in existing path"
`Quick
(fun _ -> test_register_named_subcontext);
Alcotest_lwt.test_case
"register indexed subcontext in existing path"
`Quick
(fun _ -> test_register_indexed_subcontext);
Alcotest_lwt.test_case
"register indexed subcontext with existing indexed subcontext"
`Quick
(fun _ -> test_register_indexed_subcontext_2);
]
|
9593221c7dd2f3736249fb81c260d8de760a574b098e8b267c2451865cc09f89 | Workiva/eva | integration_test.clj | Copyright 2015 - 2019 Workiva Inc.
;;
;; Licensed under the Eclipse Public License 1.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -1.0.php
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns eva.v2.system.integration-test
(:require [clojure.test :refer :all]
[eva.api :refer :all]
[barometer.core :as m]
[eva.v2.messaging.address :as address]
[eva.v2.system.peer-connection.core :as peer]
[eva.v2.system.transactor.core :as transactor]
[eva.v2.system.indexing.core :as indexing]
[eva.v2.database.core :as database]
[eva.v2.messaging.jms.alpha.local-broker :as broker]
[eva.v2.messaging.node.manager.alpha :as node]
[eva.v2.messaging.node.local-simulation :as local-msg-2]
[eva.v2.messaging.node.manager.types :as node-types]
[eva.v2.storage.value-store.core :as values]
[eva.v2.storage.value-store.manager :as vs-manager]
[eva.v2.storage.block-store.impl.memory :as memory]
[eva.v2.system.database-catalogue.core :as catalog]
[eva.v2.system.database-connection.core :as dbc]
[eva.v2.storage.block-store.types :as store-type]
[eva.v2.storage.value-store.core :as values]
[eva.v2.storage.block-store.impl.sql :as sql]
[eva.v2.system.protocols :as p]
[eva.config :as conf]
[quartermaster.core :as qu]
[eva.quartermaster-patches :as qp]
[eva.v2.storage.local :as h2]
[eva.v2.storage.local :refer [init-h2-db]]
[com.stuartsierra.component :as c])
(:import [java.io File]
[java.util UUID]
[java.util.concurrent CountDownLatch]))
(defn base-config
[database-id storage-config messenger-config]
(merge {::address/transaction-submission "submit-addr"
::address/transaction-publication "pub-addr"
::address/index-updates "indexes"
::peer/id (java.util.UUID/randomUUID)
::transactor/id (UUID/randomUUID)
::indexing/id (UUID/randomUUID)
::database/id database-id}
storage-config
messenger-config))
(defn memory-config
[database-id]
{::store-type/storage-type ::store-type/memory
::memory/store-id database-id
::values/partition-id (java.util.UUID/randomUUID)})
(defn messenger-config
[]
{:messenger-node-config/type :broker-uri
:broker-type "org.apache.activemq.ActiveMQConnectionFactory"
:broker-uri "vm"})
(defn sql-config
[database-id]
{::store-type/storage-type ::store-type/sql
::values/partition-id database-id
::sql/db-spec (h2/db-spec (h2/temp-file))})
(deftest ensure-autogenetic-release-does-release-everything
(qp/testing-for-resource-leaks
(release (connect {:autogenetic true}))))
(deftest peer-reconnect
(qp/testing-for-resource-leaks
(let [database-id (UUID/randomUUID)
config (base-config database-id (memory-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
transactor (qu/acquire transactor/transactor-manager :random config)
indexer (qu/acquire indexing/indexer-manager :random config)
connection (connect config)
connection-2 (connect config)]
(try
(is (= connection connection-2))
(is @(transact connection []))
(broker/stop-broker!)
(broker/broker-uri)
(release connection)
(println "Sleeping 1500 ms for transactor to recover")
Give the transactor its 1000 ms to restart messaging .
(let [connection (connect config)]
(is (not= connection connection-2))
(is @(transact connection [])))
(finally
(qu/release transactor true)
(qu/release indexer true)
(release connection)
(release connection-2)
(broker/stop-broker!)
(qu/release vs true))))))
(defn- fake-messenger
[publish-fn]
(let [status (atom false)
id (qu/new-resource-id)]
(reify
p/PublisherManager
(open-publisher! [this addr opts] {:connection-error (atom nil)})
(publish! [this addr pub-data] (publish-fn pub-data))
(close-publisher! [_ _])
p/SubscriberManager
(subscribe! [this id addr f opts] {:connection-error (atom nil)})
(unsubscribe! [_ _ _])
p/ResponderManager
(open-responder! [this addr f opts] {:connection-error (atom nil)})
(close-responder! [_ _])
p/ErrorListenerManager
(register-error-listener [_ _ _ _])
(unregister-error-listener [_ _])
qu/SharedResource
(initiated? [_] @status)
(status* [_] {})
(resource-id [_] id)
(initiate [this] (reset! status true) this)
(terminate [this] (reset! status false) this)
(force-terminate [_] (reset! status false)))))
(deftest ensure-multiple-transactors-play-nice
(qp/testing-for-resource-leaks
(let [pub-log (atom [])
publish-fn (fn [tag] #(swap! pub-log conj {:node tag :pub-data %}))]
(qu/overriding
[node/messenger-nodes
{:constructor (fn [_ config]
(fake-messenger (publish-fn (::tag config))))
:discriminator (fn [_ config] [(::tag config) (:broker-uri config)])}
transactor/transactor-manager
{:discriminator
(fn [_ config] [(::tag config) (::database/id config) (::transactor/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 10
num-txs 20
configs (map #(assoc config ::tag %) (range num-txors))
total-txs (* num-txors num-txs)
start-latch (CountDownLatch. num-txors)
finish-latch (CountDownLatch. num-txors)
transactors (for [config configs]
(transactor/transactor :testing config))
futs (doall (map-indexed
(fn [i txor]
(future
(.countDown start-latch)
(.await start-latch)
(binding [transactor/*max-concurrent-modification-retries* Long/MAX_VALUE]
(dotimes [n num-txs]
(Thread/sleep 5)
;; ^^ sleep to force some interleaving of transactions
without this , the first transactor to get in ' bullies '
;; the rest since it doesn't have to go through the
;; slow IO process of updating its state.
(p/process-transaction txor
{:database-id database-id
:tx-data [[:db/add (tempid :db.part/user)
:db/doc (format "%s-%s" i n)]]})))
(.countDown finish-latch)))
transactors))]
(try
(.await finish-latch)
(is (apply =
total-txs
(map (comp eva.api/basis-t p/db-snapshot p/repair-and-reload deref :database-connection)
transactors)))
(let [pub-log @pub-log]
(is (= total-txs (count pub-log)))
(is (= (range 1 (inc total-txs))
;; sort since publishes can be disorderly
(sort (map (comp :tx-num :pub-data) pub-log))))
;; vv did everyone get their txs?
(is (= (zipmap (range num-txors) (repeat num-txs))
(frequencies (map :node pub-log)))))
is the tx - log in the state we expect it to be ?
(is (= (range (count (->> transactors first :database-connection deref p/log)))
(->> transactors first :database-connection deref p/log seq (map (comp :tx-num deref)))))
(is (= (inc total-txs)
(->> transactors first :database-connection deref p/log count)))
(finally
(broker/stop-broker!)
(doseq [config configs]
(qu/release* transactor/transactor-manager :testing config true))
(qu/release vs true))))))))
(defn break-publishing [messenger-node]
(let [messenger-node (atom messenger-node)]
(reify
p/PublisherManager
(open-publisher! [this addr opts] (p/open-publisher! @messenger-node addr opts))
(publish! [this addr pub-data]
nil #_(when (< 0.5 (rand))
(p/publish! @messenger-node addr pub-data)))
(close-publisher! [_ addr] (p/close-publisher! @messenger-node addr))
p/SubscriberManager
(subscribe! [_ id addr f opts] (p/subscribe! @messenger-node id addr f opts))
(unsubscribe! [_ id addr] (p/unsubscribe! @messenger-node id addr))
p/ResponderManager
(open-responder! [_ addr f opts] (p/open-responder! @messenger-node addr f opts))
(close-responder! [_ addr] (p/close-responder! @messenger-node addr))
p/RequestorManager
(open-requestor! [mn addr opts] (p/open-requestor! @messenger-node addr opts))
(close-requestor! [mn addr] (p/close-requestor! @messenger-node addr))
(request! [mn addr request-msg] (p/request! @messenger-node addr request-msg))
p/ErrorListenerManager
(register-error-listener [mn key f args] (p/register-error-listener @messenger-node key f args))
(unregister-error-listener [mn key] (p/unregister-error-listener @messenger-node key))
qu/SharedResource
(initiated? [_] (qu/initiated? @messenger-node))
(status* [_] (qu/status* @messenger-node))
(resource-id [_] (qu/resource-id @messenger-node))
(initiate [this] (swap! messenger-node qu/initiate) this)
(terminate [this] (swap! messenger-node qu/terminate) this)
(force-terminate [_] (qu/force-terminate messenger-node)))))
(deftest ensure-peers-can-proceed-without-publishes
(qp/testing-for-resource-leaks
(qu/overriding
[node/messenger-nodes
{:discriminator (fn [user-id config]
[(::tag config) (node-types/messenger-node-discriminator user-id config)])
:constructor (fn [definition config]
(let [real-messenger (node-types/messenger-node-constructor (second definition) config)]
(break-publishing real-messenger)))}
dbc/database-connection-manager
{:discriminator (fn [user-id config] [user-id (::database/id config) (::tag config)])}
peer/peer-connection-manager
{:discriminator (fn [_ config] [(::tag config) (::database/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-conns 10
txor (qu/acquire transactor/transactor-manager :txor (assoc config ::tag -1))
conns (doall (for [i (range num-conns)]
(connect (assoc config ::tag i))))]
(try
;; prime the pump
@(transact (nth conns 0) [[:db/add (tempid :db.part/user) :db/ident :test-var]])
(doall (for [i (range 1 num-conns)]
(is (= nil (pull (db (nth conns i)) [:db/ident] :test-var)))))
(doall (map sync-db (take 5 conns)))
(doall (map #(.syncDb ^eva.Connection %) (drop 5 conns)))
(doall (for [i (range 1 num-conns)]
(is (= {:db/ident :test-var}
(pull (db (nth conns i)) [:db/ident] :test-var)))))
(finally
(qu/release vs true)
(doseq [c conns] (release c))
(qu/release txor true)
(broker/stop-broker!)))))))
(deftest ensure-stale-transactors-recover-from-pipeline-failure
(qp/testing-for-resource-leaks
(qu/overriding
[node/messenger-nodes {:constructor (fn [_ _] (fake-messenger (constantly true)))}
transactor/transactor-manager
{:discriminator (fn [user-id config] [user-id (::database/id config) (::transactor/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 2
transactors (for [i (range num-txors)]
(qu/acquire transactor/transactor-manager i config))
staleness-count (->> "eva.v2.system.database-connection.core.staleness-meter"
(m/get-metric m/DEFAULT)
(m/count))]
(try (is (= 2 (count (sequence (comp (map deref) (distinct)) transactors))))
(is (p/process-transaction @(first transactors)
{:database-id database-id
:tx-data [[:db/add 0 :db/doc "foo"]]}))
(is (p/process-transaction @(second transactors)
{:database-id database-id
:tx-data [[:db.fn/cas 0 :db/doc "foo" "bar"]]}))
(is (= (inc staleness-count)
(->> "eva.v2.system.database-connection.core.staleness-meter"
(m/get-metric m/DEFAULT)
(m/count))))
(finally
(doseq [txor transactors] (qu/release txor true))
(broker/stop-broker!)
(qu/release vs true)))))))
(deftest persistent-h2-store
(let [^File tmpfile (sql/temp-file)
path (.getPath tmpfile)
config {:autogenetic true
::database/id (UUID/randomUUID)
::values/partition-id (UUID/randomUUID)
::store-type/storage-type ::store-type/sql
::sql/db-spec (sql/h2-db-spec path)}]
(qp/testing-for-resource-leaks
(let [conn (connect config)]
(try @(transact conn [{:db/id (tempid :db.part/db), :db/ident ::foobar}])
(finally (release conn)))))
(qp/testing-for-resource-leaks
(let [conn (connect config)
conn2 (connect config)]
(try
(is (= 1 (count (datoms (db-snapshot conn) :eavt ::foobar))))
(is (= conn conn2))
(finally (release conn)
(release conn2)))))))
(deftest distinct-local-connections
(let [uuid-1 (UUID/randomUUID)
uuid-2 (UUID/randomUUID)
config-1a {:autogenetic true
::database/id uuid-1}
config-1b {:autogenetic true
::database/id uuid-1}
config-2a {:autogenetic true
::database/id uuid-2}
config-2b {:autogenetic true
::database/id uuid-2}]
(qp/testing-for-resource-leaks
(let [conn-1a (connect config-1a)
conn-1b (connect config-1b)
conn-2a (connect config-2a)
conn-2b (connect config-2b)]
(try
(is (= conn-1a conn-1b))
(is (= conn-2a conn-2b))
(is (not= conn-1a conn-2a))
(finally
(release conn-1a)
(release conn-1b)
(release conn-2a)
(release conn-2b)))))))
(deftest ensure-multiple-everythings-play-nice
(conf/with-overrides {:eva.database.indexes.max-tx-delta 5}
(qp/testing-for-resource-leaks
(let [shared-messenger (local-msg-2/local-messenger)]
(qu/overriding
[node/messenger-nodes
{:discriminator (fn [_ config] (::tag config))
:constructor
(fn [_ config] (local-msg-2/facade-messenger shared-messenger (::tag config)))}
transactor/transactor-manager
{:discriminator
(fn [_ config] [(::tag config) (::database/id config) (::transactor/id config)])}
indexing/indexer-manager
{:discriminator
(fn [user-id config] [user-id (::database/id config) (::id config) (::tag config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 3
num-idxrs 3
num-txs 50
txor-configs (map #(assoc config ::tag %) (range num-txors))
idxr-configs (map #(assoc config ::tag %) (range num-idxrs))
total-txs (* num-txors num-txs)
start-latch (CountDownLatch. num-txors)
finish-latch (CountDownLatch. num-txors)
transactors (for [config txor-configs]
(transactor/transactor :testing config))
indexors (doall (for [config idxr-configs]
(qu/acquire indexing/indexer-manager :testing config)))
_ (doall (map deref indexors))
futs (doall (map-indexed
(fn [i txor]
(future
(.countDown start-latch)
(.await start-latch)
(binding [transactor/*max-concurrent-modification-retries* Long/MAX_VALUE]
(dotimes [n num-txs]
(Thread/sleep 10 #_(rand-int 20))
;; ^^ sleep to force some interleaving of transactions
without this , the first transactor to get in ' bullies '
;; the rest since it doesn't have to go through the
;; slow IO process of updating its state.
(try (p/process-transaction txor
{:database-id database-id
:tx-data [[:db/add 0 :db/doc (format "%s-%s" i n)]]})
(catch Exception e
(clojure.tools.logging/warn e)
(println "failed attempting to add " (format "%s-%s" i n))
(.countDown finish-latch)
(throw e))
)))
(.countDown finish-latch)))
transactors))]
(try
(.await finish-latch)
(is (apply =
total-txs
(map (comp eva.api/basis-t p/db-snapshot p/repair-and-reload deref :database-connection)
transactors)))
is the tx - log in the state we expect it to be ?
(is (= (range (count (->> transactors first :database-connection deref p/log)))
(->> transactors first :database-connection deref p/log seq (map (comp :tx-num deref)))))
#_(clojure.pprint/pprint (->> transactors first :database-connection deref p/log seq
(map (comp (juxt count #(remove (fn [d] (= 15 (:a d))) %)) eva.core/entry->datoms deref))))
(is (= (inc total-txs)
(->> transactors first :database-connection deref p/log count)))
(finally
(broker/stop-broker!)
(doseq [config txor-configs]
(qu/release* transactor/transactor-manager :testing config true))
(doseq [idxor indexors]
(qu/release idxor true))
(qu/release vs true)))))))))
| null | https://raw.githubusercontent.com/Workiva/eva/b7b8a6a5215cccb507a92aa67e0168dc777ffeac/core/test/eva/v2/system/integration_test.clj | clojure |
Licensed under the Eclipse Public License 1.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-1.0.php
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
^^ sleep to force some interleaving of transactions
the rest since it doesn't have to go through the
slow IO process of updating its state.
sort since publishes can be disorderly
vv did everyone get their txs?
prime the pump
^^ sleep to force some interleaving of transactions
the rest since it doesn't have to go through the
slow IO process of updating its state. | Copyright 2015 - 2019 Workiva Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(ns eva.v2.system.integration-test
(:require [clojure.test :refer :all]
[eva.api :refer :all]
[barometer.core :as m]
[eva.v2.messaging.address :as address]
[eva.v2.system.peer-connection.core :as peer]
[eva.v2.system.transactor.core :as transactor]
[eva.v2.system.indexing.core :as indexing]
[eva.v2.database.core :as database]
[eva.v2.messaging.jms.alpha.local-broker :as broker]
[eva.v2.messaging.node.manager.alpha :as node]
[eva.v2.messaging.node.local-simulation :as local-msg-2]
[eva.v2.messaging.node.manager.types :as node-types]
[eva.v2.storage.value-store.core :as values]
[eva.v2.storage.value-store.manager :as vs-manager]
[eva.v2.storage.block-store.impl.memory :as memory]
[eva.v2.system.database-catalogue.core :as catalog]
[eva.v2.system.database-connection.core :as dbc]
[eva.v2.storage.block-store.types :as store-type]
[eva.v2.storage.value-store.core :as values]
[eva.v2.storage.block-store.impl.sql :as sql]
[eva.v2.system.protocols :as p]
[eva.config :as conf]
[quartermaster.core :as qu]
[eva.quartermaster-patches :as qp]
[eva.v2.storage.local :as h2]
[eva.v2.storage.local :refer [init-h2-db]]
[com.stuartsierra.component :as c])
(:import [java.io File]
[java.util UUID]
[java.util.concurrent CountDownLatch]))
(defn base-config
[database-id storage-config messenger-config]
(merge {::address/transaction-submission "submit-addr"
::address/transaction-publication "pub-addr"
::address/index-updates "indexes"
::peer/id (java.util.UUID/randomUUID)
::transactor/id (UUID/randomUUID)
::indexing/id (UUID/randomUUID)
::database/id database-id}
storage-config
messenger-config))
(defn memory-config
[database-id]
{::store-type/storage-type ::store-type/memory
::memory/store-id database-id
::values/partition-id (java.util.UUID/randomUUID)})
(defn messenger-config
[]
{:messenger-node-config/type :broker-uri
:broker-type "org.apache.activemq.ActiveMQConnectionFactory"
:broker-uri "vm"})
(defn sql-config
[database-id]
{::store-type/storage-type ::store-type/sql
::values/partition-id database-id
::sql/db-spec (h2/db-spec (h2/temp-file))})
(deftest ensure-autogenetic-release-does-release-everything
(qp/testing-for-resource-leaks
(release (connect {:autogenetic true}))))
(deftest peer-reconnect
(qp/testing-for-resource-leaks
(let [database-id (UUID/randomUUID)
config (base-config database-id (memory-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
transactor (qu/acquire transactor/transactor-manager :random config)
indexer (qu/acquire indexing/indexer-manager :random config)
connection (connect config)
connection-2 (connect config)]
(try
(is (= connection connection-2))
(is @(transact connection []))
(broker/stop-broker!)
(broker/broker-uri)
(release connection)
(println "Sleeping 1500 ms for transactor to recover")
Give the transactor its 1000 ms to restart messaging .
(let [connection (connect config)]
(is (not= connection connection-2))
(is @(transact connection [])))
(finally
(qu/release transactor true)
(qu/release indexer true)
(release connection)
(release connection-2)
(broker/stop-broker!)
(qu/release vs true))))))
(defn- fake-messenger
[publish-fn]
(let [status (atom false)
id (qu/new-resource-id)]
(reify
p/PublisherManager
(open-publisher! [this addr opts] {:connection-error (atom nil)})
(publish! [this addr pub-data] (publish-fn pub-data))
(close-publisher! [_ _])
p/SubscriberManager
(subscribe! [this id addr f opts] {:connection-error (atom nil)})
(unsubscribe! [_ _ _])
p/ResponderManager
(open-responder! [this addr f opts] {:connection-error (atom nil)})
(close-responder! [_ _])
p/ErrorListenerManager
(register-error-listener [_ _ _ _])
(unregister-error-listener [_ _])
qu/SharedResource
(initiated? [_] @status)
(status* [_] {})
(resource-id [_] id)
(initiate [this] (reset! status true) this)
(terminate [this] (reset! status false) this)
(force-terminate [_] (reset! status false)))))
(deftest ensure-multiple-transactors-play-nice
(qp/testing-for-resource-leaks
(let [pub-log (atom [])
publish-fn (fn [tag] #(swap! pub-log conj {:node tag :pub-data %}))]
(qu/overriding
[node/messenger-nodes
{:constructor (fn [_ config]
(fake-messenger (publish-fn (::tag config))))
:discriminator (fn [_ config] [(::tag config) (:broker-uri config)])}
transactor/transactor-manager
{:discriminator
(fn [_ config] [(::tag config) (::database/id config) (::transactor/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 10
num-txs 20
configs (map #(assoc config ::tag %) (range num-txors))
total-txs (* num-txors num-txs)
start-latch (CountDownLatch. num-txors)
finish-latch (CountDownLatch. num-txors)
transactors (for [config configs]
(transactor/transactor :testing config))
futs (doall (map-indexed
(fn [i txor]
(future
(.countDown start-latch)
(.await start-latch)
(binding [transactor/*max-concurrent-modification-retries* Long/MAX_VALUE]
(dotimes [n num-txs]
(Thread/sleep 5)
without this , the first transactor to get in ' bullies '
(p/process-transaction txor
{:database-id database-id
:tx-data [[:db/add (tempid :db.part/user)
:db/doc (format "%s-%s" i n)]]})))
(.countDown finish-latch)))
transactors))]
(try
(.await finish-latch)
(is (apply =
total-txs
(map (comp eva.api/basis-t p/db-snapshot p/repair-and-reload deref :database-connection)
transactors)))
(let [pub-log @pub-log]
(is (= total-txs (count pub-log)))
(is (= (range 1 (inc total-txs))
(sort (map (comp :tx-num :pub-data) pub-log))))
(is (= (zipmap (range num-txors) (repeat num-txs))
(frequencies (map :node pub-log)))))
is the tx - log in the state we expect it to be ?
(is (= (range (count (->> transactors first :database-connection deref p/log)))
(->> transactors first :database-connection deref p/log seq (map (comp :tx-num deref)))))
(is (= (inc total-txs)
(->> transactors first :database-connection deref p/log count)))
(finally
(broker/stop-broker!)
(doseq [config configs]
(qu/release* transactor/transactor-manager :testing config true))
(qu/release vs true))))))))
(defn break-publishing [messenger-node]
(let [messenger-node (atom messenger-node)]
(reify
p/PublisherManager
(open-publisher! [this addr opts] (p/open-publisher! @messenger-node addr opts))
(publish! [this addr pub-data]
nil #_(when (< 0.5 (rand))
(p/publish! @messenger-node addr pub-data)))
(close-publisher! [_ addr] (p/close-publisher! @messenger-node addr))
p/SubscriberManager
(subscribe! [_ id addr f opts] (p/subscribe! @messenger-node id addr f opts))
(unsubscribe! [_ id addr] (p/unsubscribe! @messenger-node id addr))
p/ResponderManager
(open-responder! [_ addr f opts] (p/open-responder! @messenger-node addr f opts))
(close-responder! [_ addr] (p/close-responder! @messenger-node addr))
p/RequestorManager
(open-requestor! [mn addr opts] (p/open-requestor! @messenger-node addr opts))
(close-requestor! [mn addr] (p/close-requestor! @messenger-node addr))
(request! [mn addr request-msg] (p/request! @messenger-node addr request-msg))
p/ErrorListenerManager
(register-error-listener [mn key f args] (p/register-error-listener @messenger-node key f args))
(unregister-error-listener [mn key] (p/unregister-error-listener @messenger-node key))
qu/SharedResource
(initiated? [_] (qu/initiated? @messenger-node))
(status* [_] (qu/status* @messenger-node))
(resource-id [_] (qu/resource-id @messenger-node))
(initiate [this] (swap! messenger-node qu/initiate) this)
(terminate [this] (swap! messenger-node qu/terminate) this)
(force-terminate [_] (qu/force-terminate messenger-node)))))
(deftest ensure-peers-can-proceed-without-publishes
(qp/testing-for-resource-leaks
(qu/overriding
[node/messenger-nodes
{:discriminator (fn [user-id config]
[(::tag config) (node-types/messenger-node-discriminator user-id config)])
:constructor (fn [definition config]
(let [real-messenger (node-types/messenger-node-constructor (second definition) config)]
(break-publishing real-messenger)))}
dbc/database-connection-manager
{:discriminator (fn [user-id config] [user-id (::database/id config) (::tag config)])}
peer/peer-connection-manager
{:discriminator (fn [_ config] [(::tag config) (::database/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-conns 10
txor (qu/acquire transactor/transactor-manager :txor (assoc config ::tag -1))
conns (doall (for [i (range num-conns)]
(connect (assoc config ::tag i))))]
(try
@(transact (nth conns 0) [[:db/add (tempid :db.part/user) :db/ident :test-var]])
(doall (for [i (range 1 num-conns)]
(is (= nil (pull (db (nth conns i)) [:db/ident] :test-var)))))
(doall (map sync-db (take 5 conns)))
(doall (map #(.syncDb ^eva.Connection %) (drop 5 conns)))
(doall (for [i (range 1 num-conns)]
(is (= {:db/ident :test-var}
(pull (db (nth conns i)) [:db/ident] :test-var)))))
(finally
(qu/release vs true)
(doseq [c conns] (release c))
(qu/release txor true)
(broker/stop-broker!)))))))
(deftest ensure-stale-transactors-recover-from-pipeline-failure
(qp/testing-for-resource-leaks
(qu/overriding
[node/messenger-nodes {:constructor (fn [_ _] (fake-messenger (constantly true)))}
transactor/transactor-manager
{:discriminator (fn [user-id config] [user-id (::database/id config) (::transactor/id config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 2
transactors (for [i (range num-txors)]
(qu/acquire transactor/transactor-manager i config))
staleness-count (->> "eva.v2.system.database-connection.core.staleness-meter"
(m/get-metric m/DEFAULT)
(m/count))]
(try (is (= 2 (count (sequence (comp (map deref) (distinct)) transactors))))
(is (p/process-transaction @(first transactors)
{:database-id database-id
:tx-data [[:db/add 0 :db/doc "foo"]]}))
(is (p/process-transaction @(second transactors)
{:database-id database-id
:tx-data [[:db.fn/cas 0 :db/doc "foo" "bar"]]}))
(is (= (inc staleness-count)
(->> "eva.v2.system.database-connection.core.staleness-meter"
(m/get-metric m/DEFAULT)
(m/count))))
(finally
(doseq [txor transactors] (qu/release txor true))
(broker/stop-broker!)
(qu/release vs true)))))))
(deftest persistent-h2-store
(let [^File tmpfile (sql/temp-file)
path (.getPath tmpfile)
config {:autogenetic true
::database/id (UUID/randomUUID)
::values/partition-id (UUID/randomUUID)
::store-type/storage-type ::store-type/sql
::sql/db-spec (sql/h2-db-spec path)}]
(qp/testing-for-resource-leaks
(let [conn (connect config)]
(try @(transact conn [{:db/id (tempid :db.part/db), :db/ident ::foobar}])
(finally (release conn)))))
(qp/testing-for-resource-leaks
(let [conn (connect config)
conn2 (connect config)]
(try
(is (= 1 (count (datoms (db-snapshot conn) :eavt ::foobar))))
(is (= conn conn2))
(finally (release conn)
(release conn2)))))))
(deftest distinct-local-connections
(let [uuid-1 (UUID/randomUUID)
uuid-2 (UUID/randomUUID)
config-1a {:autogenetic true
::database/id uuid-1}
config-1b {:autogenetic true
::database/id uuid-1}
config-2a {:autogenetic true
::database/id uuid-2}
config-2b {:autogenetic true
::database/id uuid-2}]
(qp/testing-for-resource-leaks
(let [conn-1a (connect config-1a)
conn-1b (connect config-1b)
conn-2a (connect config-2a)
conn-2b (connect config-2b)]
(try
(is (= conn-1a conn-1b))
(is (= conn-2a conn-2b))
(is (not= conn-1a conn-2a))
(finally
(release conn-1a)
(release conn-1b)
(release conn-2a)
(release conn-2b)))))))
(deftest ensure-multiple-everythings-play-nice
(conf/with-overrides {:eva.database.indexes.max-tx-delta 5}
(qp/testing-for-resource-leaks
(let [shared-messenger (local-msg-2/local-messenger)]
(qu/overriding
[node/messenger-nodes
{:discriminator (fn [_ config] (::tag config))
:constructor
(fn [_ config] (local-msg-2/facade-messenger shared-messenger (::tag config)))}
transactor/transactor-manager
{:discriminator
(fn [_ config] [(::tag config) (::database/id config) (::transactor/id config)])}
indexing/indexer-manager
{:discriminator
(fn [user-id config] [user-id (::database/id config) (::id config) (::tag config)])}]
(let [database-id (UUID/randomUUID)
config (base-config database-id (sql-config database-id) (messenger-config))
vs (qu/acquire vs-manager/value-store-manager :random config)
database-info (catalog/initialize-database* vs database-id)
num-txors 3
num-idxrs 3
num-txs 50
txor-configs (map #(assoc config ::tag %) (range num-txors))
idxr-configs (map #(assoc config ::tag %) (range num-idxrs))
total-txs (* num-txors num-txs)
start-latch (CountDownLatch. num-txors)
finish-latch (CountDownLatch. num-txors)
transactors (for [config txor-configs]
(transactor/transactor :testing config))
indexors (doall (for [config idxr-configs]
(qu/acquire indexing/indexer-manager :testing config)))
_ (doall (map deref indexors))
futs (doall (map-indexed
(fn [i txor]
(future
(.countDown start-latch)
(.await start-latch)
(binding [transactor/*max-concurrent-modification-retries* Long/MAX_VALUE]
(dotimes [n num-txs]
(Thread/sleep 10 #_(rand-int 20))
without this , the first transactor to get in ' bullies '
(try (p/process-transaction txor
{:database-id database-id
:tx-data [[:db/add 0 :db/doc (format "%s-%s" i n)]]})
(catch Exception e
(clojure.tools.logging/warn e)
(println "failed attempting to add " (format "%s-%s" i n))
(.countDown finish-latch)
(throw e))
)))
(.countDown finish-latch)))
transactors))]
(try
(.await finish-latch)
(is (apply =
total-txs
(map (comp eva.api/basis-t p/db-snapshot p/repair-and-reload deref :database-connection)
transactors)))
is the tx - log in the state we expect it to be ?
(is (= (range (count (->> transactors first :database-connection deref p/log)))
(->> transactors first :database-connection deref p/log seq (map (comp :tx-num deref)))))
#_(clojure.pprint/pprint (->> transactors first :database-connection deref p/log seq
(map (comp (juxt count #(remove (fn [d] (= 15 (:a d))) %)) eva.core/entry->datoms deref))))
(is (= (inc total-txs)
(->> transactors first :database-connection deref p/log count)))
(finally
(broker/stop-broker!)
(doseq [config txor-configs]
(qu/release* transactor/transactor-manager :testing config true))
(doseq [idxor indexors]
(qu/release idxor true))
(qu/release vs true)))))))))
|
a2195abded5cc2ce7b461e2b9c26a55db35e79e3908787cdf8bc68c50828a0d0 | utahstreetlabs/risingtide | feed_bolts.clj | (ns risingtide.storm.feed-bolts
(:require [risingtide
[core :refer [log-err]]
[dedupe :refer [dedupe]]
[key :as key]
[config :as config]
[redis :as redis]
[active-users :refer [active-users active?]]]
[risingtide.feed
[filters :refer [for-everything-feed?]]
[persist :refer [encode-feed write-feed! initialize-digest-feed]]
[set :as feed-set]]
[risingtide.model
[feed :refer [add]]
[timestamps :refer [timestamp]]]
[backtype.storm [clojure :refer [emit-bolt! defbolt ack! bolt]]]
[clojure.tools.logging :as log]
[metrics
[meters :refer [defmeter mark!]]
[timers :refer [deftimer time!]]
[gauges :refer [gauge]]])
(:import java.util.concurrent.ScheduledThreadPoolExecutor))
(defn schedule-with-delay [function interval]
(doto (java.util.concurrent.ScheduledThreadPoolExecutor. 1)
(.scheduleWithFixedDelay function interval interval java.util.concurrent.TimeUnit/SECONDS)))
(defmeter expiration-run "expiration runs")
(deftimer expiration-time)
(defmeter feed-writes "feeds written")
(deftimer add-feed-time)
(deftimer feed-write-time)
(defbolt add-to-feed ["id" "user-id" "feed"] {:prepare true}
[conf context collector]
(let [redii (redis/redii)
feed-set (atom {})
feed-set-size-gauge (gauge "feed-set-size" (count @feed-set))
feed-expirer (schedule-with-delay
#(try
(time! expiration-time (feed-set/expire! redii feed-set))
(mark! expiration-run)
(catch Exception e (log-err "exception expiring cache" e *ns*)))
config/feed-expiration-delay)]
(bolt
(execute [{id "id" message "message" user-id "user-id" story "story" new-feed "feed" listing-id "listing-id" :as tuple}]
(case message
:remove (feed-set/remove! redii feed-set user-id listing-id)
(doseq [s (if story [story] new-feed)]
(feed-set/add! redii feed-set user-id (dedupe s))))
(when (and (or story (= :remove message) (not (empty? new-feed)))
(active? redii user-id))
(let [feed @(@feed-set user-id)]
(mark! feed-writes)
(time! feed-write-time
(write-feed! redii (key/user-feed user-id) feed))))
(ack! collector tuple))
(cleanup [] (.shutdown feed-expirer)))))
(defmeter curated-feed-writes "stories written to curated feed")
(defbolt add-to-curated-feed ["id" "feed"] {:prepare true}
[conf context collector]
(let [redii (redis/redii)
feed-atom (atom (initialize-digest-feed redii (key/everything-feed)))
feed-expirer (schedule-with-delay
#(try
(feed-set/expire-feed! feed-atom)
(catch Exception e (log-err "exception expiring cache" e *ns*)))
config/feed-expiration-delay)
curated-feed-size-gauge (gauge "curated-feed-size" (count (seq @feed-atom)))]
(bolt
(execute [{id "id" story "story" :as tuple}]
(when (for-everything-feed? story)
(swap! feed-atom add (dedupe story))
(write-feed! redii (key/everything-feed) @feed-atom)
(mark! curated-feed-writes))
(ack! collector tuple)))))
(defn feed-to-json [feed]
(with-out-str (print (encode-feed (map #(assoc % :timestamp (timestamp %)) feed)))))
(defn serialize [{id "id" feed "feed" :as tuple} collector]
(emit-bolt! collector [id (feed-to-json feed)]))
(defbolt serialize-feed ["id" "feed"] [tuple collector]
(serialize tuple collector)
(ack! collector tuple))
| null | https://raw.githubusercontent.com/utahstreetlabs/risingtide/bc5b798396679739469b1bd8ee1b03db76178cde/src/risingtide/storm/feed_bolts.clj | clojure | (ns risingtide.storm.feed-bolts
(:require [risingtide
[core :refer [log-err]]
[dedupe :refer [dedupe]]
[key :as key]
[config :as config]
[redis :as redis]
[active-users :refer [active-users active?]]]
[risingtide.feed
[filters :refer [for-everything-feed?]]
[persist :refer [encode-feed write-feed! initialize-digest-feed]]
[set :as feed-set]]
[risingtide.model
[feed :refer [add]]
[timestamps :refer [timestamp]]]
[backtype.storm [clojure :refer [emit-bolt! defbolt ack! bolt]]]
[clojure.tools.logging :as log]
[metrics
[meters :refer [defmeter mark!]]
[timers :refer [deftimer time!]]
[gauges :refer [gauge]]])
(:import java.util.concurrent.ScheduledThreadPoolExecutor))
(defn schedule-with-delay [function interval]
(doto (java.util.concurrent.ScheduledThreadPoolExecutor. 1)
(.scheduleWithFixedDelay function interval interval java.util.concurrent.TimeUnit/SECONDS)))
(defmeter expiration-run "expiration runs")
(deftimer expiration-time)
(defmeter feed-writes "feeds written")
(deftimer add-feed-time)
(deftimer feed-write-time)
(defbolt add-to-feed ["id" "user-id" "feed"] {:prepare true}
[conf context collector]
(let [redii (redis/redii)
feed-set (atom {})
feed-set-size-gauge (gauge "feed-set-size" (count @feed-set))
feed-expirer (schedule-with-delay
#(try
(time! expiration-time (feed-set/expire! redii feed-set))
(mark! expiration-run)
(catch Exception e (log-err "exception expiring cache" e *ns*)))
config/feed-expiration-delay)]
(bolt
(execute [{id "id" message "message" user-id "user-id" story "story" new-feed "feed" listing-id "listing-id" :as tuple}]
(case message
:remove (feed-set/remove! redii feed-set user-id listing-id)
(doseq [s (if story [story] new-feed)]
(feed-set/add! redii feed-set user-id (dedupe s))))
(when (and (or story (= :remove message) (not (empty? new-feed)))
(active? redii user-id))
(let [feed @(@feed-set user-id)]
(mark! feed-writes)
(time! feed-write-time
(write-feed! redii (key/user-feed user-id) feed))))
(ack! collector tuple))
(cleanup [] (.shutdown feed-expirer)))))
(defmeter curated-feed-writes "stories written to curated feed")
(defbolt add-to-curated-feed ["id" "feed"] {:prepare true}
[conf context collector]
(let [redii (redis/redii)
feed-atom (atom (initialize-digest-feed redii (key/everything-feed)))
feed-expirer (schedule-with-delay
#(try
(feed-set/expire-feed! feed-atom)
(catch Exception e (log-err "exception expiring cache" e *ns*)))
config/feed-expiration-delay)
curated-feed-size-gauge (gauge "curated-feed-size" (count (seq @feed-atom)))]
(bolt
(execute [{id "id" story "story" :as tuple}]
(when (for-everything-feed? story)
(swap! feed-atom add (dedupe story))
(write-feed! redii (key/everything-feed) @feed-atom)
(mark! curated-feed-writes))
(ack! collector tuple)))))
(defn feed-to-json [feed]
(with-out-str (print (encode-feed (map #(assoc % :timestamp (timestamp %)) feed)))))
(defn serialize [{id "id" feed "feed" :as tuple} collector]
(emit-bolt! collector [id (feed-to-json feed)]))
(defbolt serialize-feed ["id" "feed"] [tuple collector]
(serialize tuple collector)
(ack! collector tuple))
|
|
6f8520a9999acc77b50354198586926f3a3f581ad9cdbdca900e0e5cc6f63f00 | theodormoroianu/SecondYearCourses | LambdaChurch_20210415165644.hs | module LambdaChurch where
import Data.Char (isLetter)
import Data.List ( nub )
class ShowNice a where
showNice :: a -> String
class ReadNice a where
readNice :: String -> (a, String)
data Variable
= Variable
{ name :: String
, count :: Int
}
deriving (Show, Eq, Ord)
var :: String -> Variable
var x = Variable x 0
instance ShowNice Variable where
showNice (Variable x 0) = x
showNice (Variable x cnt) = x <> "_" <> show cnt
instance ReadNice Variable where
readNice s
| null x = error $ "expected variable but found " <> s
| otherwise = (var x, s')
where
(x, s') = span isLetter s
freshVariable :: Variable -> [Variable] -> Variable
freshVariable var vars = Variable x (cnt + 1)
where
x = name var
varsWithName = filter ((== x) . name) vars
Variable _ cnt = maximum (var : varsWithName)
data Term
= V Variable
| App Term Term
| Lam Variable Term
deriving (Show)
-- alpha-equivalence
aEq :: Term -> Term -> Bool
aEq (V x) (V x') = x == x'
aEq (App t1 t2) (App t1' t2') = aEq t1 t1' && aEq t2 t2'
aEq (Lam x t) (Lam x' t')
| x == x' = aEq t t'
| otherwise = aEq (subst (V y) x t) (subst (V y) x' t')
where
fvT = freeVars t
fvT' = freeVars t'
allFV = nub ([x, x'] ++ fvT ++ fvT')
y = freshVariable x allFV
aEq _ _ = False
v :: String -> Term
v x = V (var x)
lam :: String -> Term -> Term
lam x = Lam (var x)
lams :: [String] -> Term -> Term
lams xs t = foldr lam t xs
($$) :: Term -> Term -> Term
($$) = App
infixl 9 $$
instance ShowNice Term where
showNice (V var) = showNice var
showNice (App t1 t2) = "(" <> showNice t1 <> " " <> showNice t2 <> ")"
showNice (Lam var t) = "(" <> "\\" <> showNice var <> "." <> showNice t <> ")"
instance ReadNice Term where
readNice [] = error "Nothing to read"
readNice ('(' : '\\' : s) = (Lam var t, s'')
where
(var, '.' : s') = readNice s
(t, ')' : s'') = readNice s'
readNice ('(' : s) = (App t1 t2, s'')
where
(t1, ' ' : s') = readNice s
(t2, ')' : s'') = readNice s'
readNice s = (V var, s')
where
(var, s') = readNice s
freeVars :: Term -> [Variable]
freeVars (V var) = [var]
freeVars (App t1 t2) = nub $ freeVars t1 ++ freeVars t2
freeVars (Lam var t) = filter (/= var) (freeVars t)
-- subst u x t defines [u/x]t, i.e., substituting u for x in t
for example [ 3 / x](x + x ) = = 3 + 3
-- This substitution avoids variable captures so it is safe to be used when
-- reducing terms with free variables (e.g., if evaluating inside lambda abstractions)
subst
:: Term -- ^ substitution term
-> Variable -- ^ variable to be substitutes
-> Term -- ^ term in which the substitution occurs
-> Term
subst u x (V y)
| x == y = u
| otherwise = V y
subst u x (App t1 t2) = App (subst u x t1) (subst u x t2)
subst u x (Lam y t)
| x == y = Lam y t
| y `notElem` fvU = Lam y (subst u x t)
| x `notElem` fvT = Lam y t
| otherwise = Lam y' (subst u x (subst (V y') y t))
where
fvT = freeVars t
fvU = freeVars u
allFV = nub ([x] ++ fvU ++ fvT)
y' = freshVariable y allFV
-- Normal order reduction
-- - like call by name
-- - but also reduce under lambda abstractions if no application is possible
-- - guarantees reaching a normal form if it exists
normalReduceStep :: Term -> Maybe Term
normalReduceStep (App (Lam v t) t2) = Just $ subst t2 v t
normalReduceStep (App t1 t2)
| Just t1' <- normalReduceStep t1 = Just $ App t1' t2
| Just t2' <- normalReduceStep t2 = Just $ App t1 t2'
normalReduceStep (Lam x t)
| Just t' <- normalReduceStep t = Just $ Lam x t'
normalReduceStep _ = Nothing
normalReduce :: Term -> Term
normalReduce t
| Just t' <- normalReduceStep t = normalReduce t'
| otherwise = t
reduce :: Term -> Term
reduce = normalReduce
-- alpha-beta equivalence (for strongly normalizing terms) is obtained by
-- fully evaluating the terms using beta-reduction, then checking their
-- alpha-equivalence.
abEq :: Term -> Term -> Bool
abEq t1 t2 = aEq (reduce t1) (reduce t2)
evaluate :: String -> String
evaluate s = showNice (reduce t)
where
(t, "") = readNice s
-- Church Encodings in Lambda
------------
--BOOLEANS--
------------
A boolean is any way to choose between two alternatives ( t - > t - > t )
The boolean constant true always chooses the first alternative
cTrue :: Term
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: Term
cFalse = undefined
--If is not really needed because we can use the booleans themselves, but...
cIf :: Term
cIf = undefined
--The boolean negation switches the alternatives
cNot :: Term
cNot = undefined
--The boolean conjunction can be built as a conditional
cAnd :: Term
cAnd = undefined
--The boolean disjunction can be built as a conditional
cOr :: Term
cOr = undefined
---------
PAIRS--
---------
-- a pair with components of type a and b is a way to compute something based
-- on the values contained within the pair (a -> b -> c) -> c
builds a pair out of two values as an object which , when given
--a function to be applied on the values, it will apply it on them.
cPair :: Term
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: Term
cFst = undefined
second projection
cSnd :: Term
cSnd = undefined
-------------------
--NATURAL NUMBERS--
-------------------
-- A natural number is any way to iterate a function s a number of times
-- over an initial value z ( (t -> t) -> t -> t )
c0 :: Term
c0 = undefined
c1 :: Term
c1 = lams ["s", "z"] (v "s" $$ v "z")
c2 :: Term
c2 = lams ["s", "z"] (v "s" $$ (v "s" $$ v "z"))
cS :: Term
cS = lams ["t","s","z"] (v "s" $$ (v "t" $$ v "s" $$ v "z"))
cNat :: Integer -> Term
cNat = undefined
cPlus :: Term
cPlus = lams ["n", "m", "s", "z"] (v "n" $$ v "s" $$ (v "m" $$ v "s" $$ v "z"))
cPlus' :: Term
cPlus' = lams ["n", "m"] (v "n" $$ cS $$ v "m")
cMul :: Term
cMul = lams ["n", "m", "s"] (v "n" $$ (v "m" $$ v "s"))
cMul' :: Term
cMul' = lams ["n", "m"] (v "n" $$ (cPlus' $$ v "m") $$ c0)
cPow :: Term
cPow = lams ["m", "n"] (v "n" $$ v "m")
cPow' :: Term
cPow' = lams ["m", "n"] (v "n" $$ (cMul' $$ v "m") $$ c1)
cIs0 :: Term
cIs0 = lam "n" (v "n" $$ (cAnd $$ cFalse) $$ cTrue)
cS' :: Term
cS' = lam "n" (v "n" $$ cS $$ c1)
cS'Rev0 :: Term
cS'Rev0 = lams ["s","z"] c0
cPred :: Term
cPred =
lam "n"
(cIf
$$ (cIs0 $$ v "n")
$$ c0
$$ (v "n" $$ cS' $$ cS'Rev0))
cSub :: Term
cSub = lams ["m", "n"] (v "n" $$ cPred $$ v "m")
cLte :: Term
cLte = lams ["m", "n"] (cIs0 $$ (cSub $$ v "m" $$ v "n"))
cGte :: Term
cGte = lams ["m", "n"] (cLte $$ v "n" $$ v "m")
cLt :: Term
cLt = lams ["m", "n"] (cNot $$ (cGte $$ v "m" $$ v "n"))
cGt :: Term
cGt = lams ["m", "n"] (cLt $$ v "n" $$ v "m")
cEq :: Term
cEq = lams ["m", "n"] (cAnd $$ (cLte $$ v "m" $$ v "n") $$ (cLte $$ v "n" $$ v "m"))
cPred' :: Term
cPred' = lam "n" (cFst $$
(v "n"
$$ lam "p" (lam "x" (cPair $$ v "x" $$ (cS $$ v "x"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ c0 $$ c0)
))
cFactorial :: Term
cFactorial = lam "n" (cSnd $$
(v "n"
$$ lam "p"
(cPair
$$ (cS $$ (cFst $$ v "p"))
$$ (cMul $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c1 $$ c1)
))
cFibonacci :: Term
cFibonacci = lam "n" (cFst $$
(v "n"
$$ lam "p"
(cPair
$$ (cSnd $$ v "p")
$$ (cPlus $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c0 $$ c1)
))
cDivMod :: Term
cDivMod =
lams ["m", "n"]
(v "m"
$$ lam "pair"
(cIf
$$ (cLte $$ v "n" $$ (cSnd $$ v "pair"))
$$ (cPair
$$ (cS $$ (cFst $$ v "pair"))
$$ (cSub
$$ (cSnd $$ v "pair")
$$ v "n"
)
)
$$ v "pair"
)
$$ (cPair $$ c0 $$ v "m")
)
cNil :: Term
cNil = lams ["agg", "init"] (v "init")
cCons :: Term
cCons = lams ["x","l","agg", "init"]
(v "agg"
$$ v "x"
$$ (v "l" $$ v "agg" $$ v "init")
)
cList :: [Term] -> Term
cList = foldr (\x l -> cCons $$ x $$ l) cNil
cNatList :: [Integer] -> Term
cNatList = cList . map cNat
cSum :: Term
cSum = lam "l" (v "l" $$ cPlus $$ c0)
cIsNil :: Term
cIsNil = lam "l" (v "l" $$ lams ["x", "a"] cFalse $$ cTrue)
cHead :: Term
cHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cTail :: Term
cTail = lam "l" (cFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (cPair $$ v "t" $$ (cCons $$ v "x" $$ v "t"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ cNil $$ cNil)
))
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
cDivMod' :: Term
cDivMod' = lams ["m", "n"]
(cIs0 $$ v "n"
$$ (cPair $$ c0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(cIs0 $$ v "x"
$$ (cLte $$ v "n" $$ (cSnd $$ v "p")
$$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ c0)
$$ v "p"
)
$$ (v "f" $$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ v "x"))
)
$$ (cSub $$ (cSnd $$ v "p") $$ v "n")
)
$$ (cPair $$ c0 $$ v "m")
)
)
cSudan :: Term
cSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(cIs0 $$ v "n"
$$ (cPlus $$ v "x" $$ v "y")
$$ (cIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (cPred $$ v "n")
$$ v "fnpy"
$$ (cPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (cPred $$ v "y"))
)
)
))
cAckermann :: Term
cAckermann = fix $$ lam "A" (lams ["m", "n"]
(cIs0 $$ v "m"
$$ (cS $$ v "n")
$$ (cIs0 $$ v "n"
$$ (v "A" $$ (cPred $$ v "m") $$ c1)
$$ (v "A" $$ (cPred $$ v "m")
$$ (v "A" $$ v "m" $$ (cPred $$ v "n")))
)
))
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/LambdaChurch_20210415165644.hs | haskell | alpha-equivalence
subst u x t defines [u/x]t, i.e., substituting u for x in t
This substitution avoids variable captures so it is safe to be used when
reducing terms with free variables (e.g., if evaluating inside lambda abstractions)
^ substitution term
^ variable to be substitutes
^ term in which the substitution occurs
Normal order reduction
- like call by name
- but also reduce under lambda abstractions if no application is possible
- guarantees reaching a normal form if it exists
alpha-beta equivalence (for strongly normalizing terms) is obtained by
fully evaluating the terms using beta-reduction, then checking their
alpha-equivalence.
Church Encodings in Lambda
----------
BOOLEANS--
----------
If is not really needed because we can use the booleans themselves, but...
The boolean negation switches the alternatives
The boolean conjunction can be built as a conditional
The boolean disjunction can be built as a conditional
-------
-------
a pair with components of type a and b is a way to compute something based
on the values contained within the pair (a -> b -> c) -> c
a function to be applied on the values, it will apply it on them.
-----------------
NATURAL NUMBERS--
-----------------
A natural number is any way to iterate a function s a number of times
over an initial value z ( (t -> t) -> t -> t ) | module LambdaChurch where
import Data.Char (isLetter)
import Data.List ( nub )
class ShowNice a where
showNice :: a -> String
class ReadNice a where
readNice :: String -> (a, String)
data Variable
= Variable
{ name :: String
, count :: Int
}
deriving (Show, Eq, Ord)
var :: String -> Variable
var x = Variable x 0
instance ShowNice Variable where
showNice (Variable x 0) = x
showNice (Variable x cnt) = x <> "_" <> show cnt
instance ReadNice Variable where
readNice s
| null x = error $ "expected variable but found " <> s
| otherwise = (var x, s')
where
(x, s') = span isLetter s
freshVariable :: Variable -> [Variable] -> Variable
freshVariable var vars = Variable x (cnt + 1)
where
x = name var
varsWithName = filter ((== x) . name) vars
Variable _ cnt = maximum (var : varsWithName)
data Term
= V Variable
| App Term Term
| Lam Variable Term
deriving (Show)
aEq :: Term -> Term -> Bool
aEq (V x) (V x') = x == x'
aEq (App t1 t2) (App t1' t2') = aEq t1 t1' && aEq t2 t2'
aEq (Lam x t) (Lam x' t')
| x == x' = aEq t t'
| otherwise = aEq (subst (V y) x t) (subst (V y) x' t')
where
fvT = freeVars t
fvT' = freeVars t'
allFV = nub ([x, x'] ++ fvT ++ fvT')
y = freshVariable x allFV
aEq _ _ = False
v :: String -> Term
v x = V (var x)
lam :: String -> Term -> Term
lam x = Lam (var x)
lams :: [String] -> Term -> Term
lams xs t = foldr lam t xs
($$) :: Term -> Term -> Term
($$) = App
infixl 9 $$
instance ShowNice Term where
showNice (V var) = showNice var
showNice (App t1 t2) = "(" <> showNice t1 <> " " <> showNice t2 <> ")"
showNice (Lam var t) = "(" <> "\\" <> showNice var <> "." <> showNice t <> ")"
instance ReadNice Term where
readNice [] = error "Nothing to read"
readNice ('(' : '\\' : s) = (Lam var t, s'')
where
(var, '.' : s') = readNice s
(t, ')' : s'') = readNice s'
readNice ('(' : s) = (App t1 t2, s'')
where
(t1, ' ' : s') = readNice s
(t2, ')' : s'') = readNice s'
readNice s = (V var, s')
where
(var, s') = readNice s
freeVars :: Term -> [Variable]
freeVars (V var) = [var]
freeVars (App t1 t2) = nub $ freeVars t1 ++ freeVars t2
freeVars (Lam var t) = filter (/= var) (freeVars t)
for example [ 3 / x](x + x ) = = 3 + 3
subst
-> Term
subst u x (V y)
| x == y = u
| otherwise = V y
subst u x (App t1 t2) = App (subst u x t1) (subst u x t2)
subst u x (Lam y t)
| x == y = Lam y t
| y `notElem` fvU = Lam y (subst u x t)
| x `notElem` fvT = Lam y t
| otherwise = Lam y' (subst u x (subst (V y') y t))
where
fvT = freeVars t
fvU = freeVars u
allFV = nub ([x] ++ fvU ++ fvT)
y' = freshVariable y allFV
normalReduceStep :: Term -> Maybe Term
normalReduceStep (App (Lam v t) t2) = Just $ subst t2 v t
normalReduceStep (App t1 t2)
| Just t1' <- normalReduceStep t1 = Just $ App t1' t2
| Just t2' <- normalReduceStep t2 = Just $ App t1 t2'
normalReduceStep (Lam x t)
| Just t' <- normalReduceStep t = Just $ Lam x t'
normalReduceStep _ = Nothing
normalReduce :: Term -> Term
normalReduce t
| Just t' <- normalReduceStep t = normalReduce t'
| otherwise = t
reduce :: Term -> Term
reduce = normalReduce
abEq :: Term -> Term -> Bool
abEq t1 t2 = aEq (reduce t1) (reduce t2)
evaluate :: String -> String
evaluate s = showNice (reduce t)
where
(t, "") = readNice s
A boolean is any way to choose between two alternatives ( t - > t - > t )
The boolean constant true always chooses the first alternative
cTrue :: Term
cTrue = undefined
The boolean constant false always chooses the second alternative
cFalse :: Term
cFalse = undefined
cIf :: Term
cIf = undefined
cNot :: Term
cNot = undefined
cAnd :: Term
cAnd = undefined
cOr :: Term
cOr = undefined
builds a pair out of two values as an object which , when given
cPair :: Term
cPair = undefined
first projection uses the function selecting first component on a pair
cFst :: Term
cFst = undefined
second projection
cSnd :: Term
cSnd = undefined
c0 :: Term
c0 = undefined
c1 :: Term
c1 = lams ["s", "z"] (v "s" $$ v "z")
c2 :: Term
c2 = lams ["s", "z"] (v "s" $$ (v "s" $$ v "z"))
cS :: Term
cS = lams ["t","s","z"] (v "s" $$ (v "t" $$ v "s" $$ v "z"))
cNat :: Integer -> Term
cNat = undefined
cPlus :: Term
cPlus = lams ["n", "m", "s", "z"] (v "n" $$ v "s" $$ (v "m" $$ v "s" $$ v "z"))
cPlus' :: Term
cPlus' = lams ["n", "m"] (v "n" $$ cS $$ v "m")
cMul :: Term
cMul = lams ["n", "m", "s"] (v "n" $$ (v "m" $$ v "s"))
cMul' :: Term
cMul' = lams ["n", "m"] (v "n" $$ (cPlus' $$ v "m") $$ c0)
cPow :: Term
cPow = lams ["m", "n"] (v "n" $$ v "m")
cPow' :: Term
cPow' = lams ["m", "n"] (v "n" $$ (cMul' $$ v "m") $$ c1)
cIs0 :: Term
cIs0 = lam "n" (v "n" $$ (cAnd $$ cFalse) $$ cTrue)
cS' :: Term
cS' = lam "n" (v "n" $$ cS $$ c1)
cS'Rev0 :: Term
cS'Rev0 = lams ["s","z"] c0
cPred :: Term
cPred =
lam "n"
(cIf
$$ (cIs0 $$ v "n")
$$ c0
$$ (v "n" $$ cS' $$ cS'Rev0))
cSub :: Term
cSub = lams ["m", "n"] (v "n" $$ cPred $$ v "m")
cLte :: Term
cLte = lams ["m", "n"] (cIs0 $$ (cSub $$ v "m" $$ v "n"))
cGte :: Term
cGte = lams ["m", "n"] (cLte $$ v "n" $$ v "m")
cLt :: Term
cLt = lams ["m", "n"] (cNot $$ (cGte $$ v "m" $$ v "n"))
cGt :: Term
cGt = lams ["m", "n"] (cLt $$ v "n" $$ v "m")
cEq :: Term
cEq = lams ["m", "n"] (cAnd $$ (cLte $$ v "m" $$ v "n") $$ (cLte $$ v "n" $$ v "m"))
cPred' :: Term
cPred' = lam "n" (cFst $$
(v "n"
$$ lam "p" (lam "x" (cPair $$ v "x" $$ (cS $$ v "x"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ c0 $$ c0)
))
cFactorial :: Term
cFactorial = lam "n" (cSnd $$
(v "n"
$$ lam "p"
(cPair
$$ (cS $$ (cFst $$ v "p"))
$$ (cMul $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c1 $$ c1)
))
cFibonacci :: Term
cFibonacci = lam "n" (cFst $$
(v "n"
$$ lam "p"
(cPair
$$ (cSnd $$ v "p")
$$ (cPlus $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c0 $$ c1)
))
cDivMod :: Term
cDivMod =
lams ["m", "n"]
(v "m"
$$ lam "pair"
(cIf
$$ (cLte $$ v "n" $$ (cSnd $$ v "pair"))
$$ (cPair
$$ (cS $$ (cFst $$ v "pair"))
$$ (cSub
$$ (cSnd $$ v "pair")
$$ v "n"
)
)
$$ v "pair"
)
$$ (cPair $$ c0 $$ v "m")
)
cNil :: Term
cNil = lams ["agg", "init"] (v "init")
cCons :: Term
cCons = lams ["x","l","agg", "init"]
(v "agg"
$$ v "x"
$$ (v "l" $$ v "agg" $$ v "init")
)
cList :: [Term] -> Term
cList = foldr (\x l -> cCons $$ x $$ l) cNil
cNatList :: [Integer] -> Term
cNatList = cList . map cNat
cSum :: Term
cSum = lam "l" (v "l" $$ cPlus $$ c0)
cIsNil :: Term
cIsNil = lam "l" (v "l" $$ lams ["x", "a"] cFalse $$ cTrue)
cHead :: Term
cHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cTail :: Term
cTail = lam "l" (cFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (cPair $$ v "t" $$ (cCons $$ v "x" $$ v "t"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ cNil $$ cNil)
))
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
cDivMod' :: Term
cDivMod' = lams ["m", "n"]
(cIs0 $$ v "n"
$$ (cPair $$ c0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(cIs0 $$ v "x"
$$ (cLte $$ v "n" $$ (cSnd $$ v "p")
$$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ c0)
$$ v "p"
)
$$ (v "f" $$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ v "x"))
)
$$ (cSub $$ (cSnd $$ v "p") $$ v "n")
)
$$ (cPair $$ c0 $$ v "m")
)
)
cSudan :: Term
cSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(cIs0 $$ v "n"
$$ (cPlus $$ v "x" $$ v "y")
$$ (cIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (cPred $$ v "n")
$$ v "fnpy"
$$ (cPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (cPred $$ v "y"))
)
)
))
cAckermann :: Term
cAckermann = fix $$ lam "A" (lams ["m", "n"]
(cIs0 $$ v "m"
$$ (cS $$ v "n")
$$ (cIs0 $$ v "n"
$$ (v "A" $$ (cPred $$ v "m") $$ c1)
$$ (v "A" $$ (cPred $$ v "m")
$$ (v "A" $$ v "m" $$ (cPred $$ v "n")))
)
))
|
703ff6277462e9dcb66de3eb645087571ba3c56760ce6189a72e74955f7ba6ef | mauricioszabo/check | mocks.clj | (ns check.mocks
(:require [clojure.spec.alpha :as s]))
(s/def ::arrow '#{=> =streams=>})
(s/def ::template (s/cat :fn symbol? :args (s/* any?)))
(s/def ::mocks (s/cat
:mocks (s/+ (s/cat :template (s/spec ::template) :arrow ::arrow :return any?))
:arrow '#{--- ===}
:body (s/* any?)))
(defn- normalize-return [{:keys [arrow fn args return]}]
(case arrow
=> {:return return}
=streams=> (let [s (gensym "stream-")]
{:let-fn `[~s (atom ~return)]
:fn `(fn []
(when (empty? @~s)
(throw (ex-info "No more values to stream on mock"
{:function '~fn
:args ~args})))
(let [ret# (first @~s)]
(swap! ~s rest)
ret#))})))
(defn- normalize-mocking-params [mockings]
(->> mockings
(map (fn [{:keys [template return arrow]}]
[(:fn template) (assoc template :arrow arrow :return return)]))
(group-by first)
(map (fn [[k v]]
[k (->> v
(map (fn [[_ v]] [(:args v) (normalize-return v)]))
(into {}))]))))
; (into {})))
(defn- to-function [[fun args+return]]
(let [all-lets (->> args+return
(map (comp :let-fn second))
(filter identity)
(mapcat identity))]
[fun
`(let [~@all-lets]
(fn ~(-> fun name symbol) [ & old-args#]
(if-let [return# (get ~args+return old-args#)]
(let [{:keys [~'fn ~'return]} return#]
(cond
~'fn (~'fn)
~'return ~'return))
(throw (ex-info "No mocked calls for this fn/args"
{:function '~fun
:expected-args (keys ~args+return)
:actual-args old-args#})))))]))
(defmacro mocking
"Mocks a group of calls. "
[ & args]
(s/assert* ::mocks args)
(let [{:keys [mocks body]} (s/conform ::mocks args)
mockings (->> mocks
normalize-mocking-params
(mapcat to-function)
vec)]
`(with-redefs ~mockings
~@body)))
| null | https://raw.githubusercontent.com/mauricioszabo/check/fc4a3a619a8ce63d152f940de12bc96b83a4adfd/src/check/mocks.clj | clojure | (into {}))) | (ns check.mocks
(:require [clojure.spec.alpha :as s]))
(s/def ::arrow '#{=> =streams=>})
(s/def ::template (s/cat :fn symbol? :args (s/* any?)))
(s/def ::mocks (s/cat
:mocks (s/+ (s/cat :template (s/spec ::template) :arrow ::arrow :return any?))
:arrow '#{--- ===}
:body (s/* any?)))
(defn- normalize-return [{:keys [arrow fn args return]}]
(case arrow
=> {:return return}
=streams=> (let [s (gensym "stream-")]
{:let-fn `[~s (atom ~return)]
:fn `(fn []
(when (empty? @~s)
(throw (ex-info "No more values to stream on mock"
{:function '~fn
:args ~args})))
(let [ret# (first @~s)]
(swap! ~s rest)
ret#))})))
(defn- normalize-mocking-params [mockings]
(->> mockings
(map (fn [{:keys [template return arrow]}]
[(:fn template) (assoc template :arrow arrow :return return)]))
(group-by first)
(map (fn [[k v]]
[k (->> v
(map (fn [[_ v]] [(:args v) (normalize-return v)]))
(into {}))]))))
(defn- to-function [[fun args+return]]
(let [all-lets (->> args+return
(map (comp :let-fn second))
(filter identity)
(mapcat identity))]
[fun
`(let [~@all-lets]
(fn ~(-> fun name symbol) [ & old-args#]
(if-let [return# (get ~args+return old-args#)]
(let [{:keys [~'fn ~'return]} return#]
(cond
~'fn (~'fn)
~'return ~'return))
(throw (ex-info "No mocked calls for this fn/args"
{:function '~fun
:expected-args (keys ~args+return)
:actual-args old-args#})))))]))
(defmacro mocking
"Mocks a group of calls. "
[ & args]
(s/assert* ::mocks args)
(let [{:keys [mocks body]} (s/conform ::mocks args)
mockings (->> mocks
normalize-mocking-params
(mapcat to-function)
vec)]
`(with-redefs ~mockings
~@body)))
|
64384e83c74a1fdb243d13643568d23db7831f3d30a8f5a4f718316d0d9d719e | racket/redex | rbtrees-3.rkt | #lang racket/base
(require redex/benchmark
"util.rkt"
redex/reduction-semantics)
(provide (all-defined-out))
(define the-error "doesn't increment black depth in non-empty case")
(define-rewrite bug3
(rbt (B (c_1 t_11 n_1 t_12) n (c_2 t_21 n_2 t_22)) n_1min n_2max (s n_bd))
==>
(rbt (B (c_1 t_11 n_1 t_12) n (c_2 t_21 n_2 t_22)) n_1min n_2max n_bd)
#:context (define-judgment-form)
#:once-only)
(include/rewrite (lib "redex/examples/rbtrees.rkt") rbtrees bug3)
(include/rewrite "generators.rkt" generators bug-mod-rw)
(require (only-in (submod "." rbtrees) num->n))
(define small-counter-example
(term (B
(B
(R E (num->n 1) E)
(num->n 2)
(R E (num->n 3) E))
(num->n 4)
(R E (num->n 5) E))))
(define enum-small-counter-example
(term (R (B (R E O E)
(s O)
E)
(s (s (s O)))
(B E
(s (s (s (s O))))
E))))
(test small-counter-example)
(test enum-small-counter-example) | null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-benchmark/redex/benchmark/models/rbtrees/rbtrees-3.rkt | racket | #lang racket/base
(require redex/benchmark
"util.rkt"
redex/reduction-semantics)
(provide (all-defined-out))
(define the-error "doesn't increment black depth in non-empty case")
(define-rewrite bug3
(rbt (B (c_1 t_11 n_1 t_12) n (c_2 t_21 n_2 t_22)) n_1min n_2max (s n_bd))
==>
(rbt (B (c_1 t_11 n_1 t_12) n (c_2 t_21 n_2 t_22)) n_1min n_2max n_bd)
#:context (define-judgment-form)
#:once-only)
(include/rewrite (lib "redex/examples/rbtrees.rkt") rbtrees bug3)
(include/rewrite "generators.rkt" generators bug-mod-rw)
(require (only-in (submod "." rbtrees) num->n))
(define small-counter-example
(term (B
(B
(R E (num->n 1) E)
(num->n 2)
(R E (num->n 3) E))
(num->n 4)
(R E (num->n 5) E))))
(define enum-small-counter-example
(term (R (B (R E O E)
(s O)
E)
(s (s (s O)))
(B E
(s (s (s (s O))))
E))))
(test small-counter-example)
(test enum-small-counter-example) |
|
65d668545c052c7308b721218c9c4b86fe29a395f75248778280160d07abb0fb | mbenke/jnp3-haskell | TestLens3.hs | module Main where
import Atom
import Lens3
moveAtom :: Atom -> Atom
moveAtom = over (point `comp` x) (+1)
atom2 = moveAtom atom0
main = mapM_ print [atom0, atom1, atom2]
| null | https://raw.githubusercontent.com/mbenke/jnp3-haskell/712c5a6a24ad0efb45aee2b48e66bb91d949848e/Code/lens/TestLens3.hs | haskell | module Main where
import Atom
import Lens3
moveAtom :: Atom -> Atom
moveAtom = over (point `comp` x) (+1)
atom2 = moveAtom atom0
main = mapM_ print [atom0, atom1, atom2]
|
|
33db562b25ca6ef0cb5a5d7dc85f4f0b62027be2c1e80554174bc388e862d0c4 | grin-compiler/ghc-wpc-sample-programs | Context.hs | # LANGUAGE MultiParamTypeClasses , FlexibleInstances , FlexibleContexts , TypeSynonymInstances #
# OPTIONS_GHC -fno - warn - orphans #
|
Module : Text . Regex . Base . Context
Copyright : ( c ) 2006
SPDX - License - Identifier : BSD-3 - Clause
Maintainer :
Stability : experimental
Portability : non - portable ( MPTC+FD )
This is a module of instances of ' RegexContext ' ( defined in
Text . Regex . Base . ) . Nothing else is exported . This is
usually imported via the Text . Regex . Base convenience package which
itself is re - exported from newer Text . Regex . XXX modules provided by
the different regex - xxx backends .
These instances work for all the supported types and backends
interchangably . These instances provide the different results that
can be gotten from a match or matchM operation ( often via the @=~@ and
@=~~@ operators with combine @makeRegex@ with @match@ and @matchM@
respectively ) . This module name is Context because they operators are
context dependent : use them in a context that expects an Int and you
get a count of matches , use them in a context and get True if
there is a match , etc .
@RegexContext a b c@ takes a regular expression suppied in a type ' a '
generated by ' RegexMaker ' and a target text supplied in type ' b ' to a
result type ' c ' using the ' match ' class function . The ' matchM ' class
function works like ' match ' unless there is no match found , in which
case it calls ' fail ' in the ( arbitrary ) monad context .
There are a few type synonyms from RegexLike that are used here :
@
-- | 0 based index from start of source , or ( -1 ) for unused
type MatchOffset = Int
-- | non - negative length of a match
type MatchLength = Int
type MatchArray = Array Int ( MatchOffset , MatchLength )
type MatchText source = Array Int ( source , ( MatchOffset , MatchLength ) )
@
There are also a few newtypes that used to prevent any possible
overlap of types , which were not needed for GHC 's late overlap
detection but are needed for use in Hugs .
@
newtype AllSubmatches f b = AllSubmatches { getAllSubmatches : : ( f b ) }
newtype AllTextSubmatches f b = AllTextSubmatches { getAllTextSubmatches : : ( f b ) }
newtype AllMatches f b = AllMatches { getAllMatches : : ( f b ) }
newtype AllTextMatches f b = AllTextMatches : : ( f b ) }
@
The newtypes ' @f@ parameters are the containers , usually @[]@ or
@Array Int@ , ( where the arrays all have lower bound 0 ) .
The two * Submatches newtypes return only information on the first
match . The other two newtypes return information on all the
non - overlapping matches . The two * Text * newtypes are used to mark
result types that contain the same type as the target text .
Where provided , noncaptured submatches will have a @MatchOffset@ of
( -1 ) and non - negative otherwise . The semantics of submatches depend
on the backend and its compile and execution options . Where provided ,
@MatchLength@ will always be non - negative . Arrays with no elements
are returned with bounds of ( 1,0 ) . Arrays with elements will have a
lower bound of 0 .
XXX THIS HADDOCK DOCUMENTATION IS OUT OF DATE XXX
These are for finding the first match in the target text :
@ RegexContext a b Bool @ :
Whether there is any match or not .
@ RegexContext a b ( ) @ :
Useful as a guard with @matchM@ or @=~~@ in a monad , since failure to match calls ' fail ' .
@ RegexContext a b b @ :
This returns the text of the whole match .
It will return ' empty ' from the ' Extract ' type class if there is no match .
These are defined in each backend module , but documented here for convenience .
@ RegexContext a b ( MatchOffset , MatchLength ) @ :
This returns the initial index and length of the whole match .
MatchLength will always be non - negative , and 0 for a failed match .
@ RegexContext a b ( MatchResult b ) @ : The
' MatchResult ' structure with details for the match . This is the
structure copied from the old @JRegex@ pacakge .
@ RegexContext a b ( b , b , b ) @ :
The text before the match , the text of the match , the text after the match
@ RegexContext a b ( b , MatchText b , b ) @ :
The text before the match , the details of the match , and the text after the match
@ RegexContext a b ( b , b , b , [ b ] ) @ :
The text before the match , the text of the match , the text after the
match , and a list of the text of the 1st and higher sub - parts of the
match . This is the same return value as used in the old
@Text . Regex@ API .
Two containers of the submatch offset information :
@ RegexContext a b MatchArray @ :
Array of @(MatchOffset , MatchLength)@ for all the sub matches .
The whole match is at the intial 0th index .
Noncaptured submatches will have a @MatchOffset@ of ( -1 )
The array will have no elements and bounds ( 1,0 ) if there is no match .
@ RegexContext a b ( AllSubmatches [ ] ( MatchOffset , MatchLength ) @ :
List of @(MatchOffset , MatchLength)@
The whole match is the first element , the rest are the submatches ( if any ) in order .
The list is empty if there is no match .
Two containers of the submatch text and offset information :
@ RegexContext a b ( AllTextSubmatches ( Array Int ) ( b , ( MatchOffset , MatchLength ) ) ) @
@ RegexContext a b ( AllTextSubmatches [ ] ( b , ( MatchOffset , MatchLength ) ) ) @
Two containers of the submatch text information :
@ RegexContext a b ( AllTextSubmatches [ ] b ) @
@ RegexContext a b ( AllTextSubmatches ( Array Int ) b ) @
These instances are for all the matches ( non - overlapping ) . Note that
backends are supposed to supply ' RegexLike ' instances for which the
default ' matchAll ' and ' matchAllText ' stop searching after returning
any successful but empty match .
@ RegexContext a b Int @ :
The number of matches , non - negative .
Two containers for locations of all matches :
@ RegexContext a b ( AllMatches [ ] ( MatchOffset , MatchLength ) ) @
@ RegexContext a b ( AllMatches ( Array Int ) ( MatchOffset , MatchLength ) ) @
Two containers for the locations of all matches and their submatches :
@ RegexContext a b [ MatchArray ] @ :
@ RegexContext a b ( AllMatches ( Array Int ) MatchArray ) @
Two containers for the text and locations of all matches and their submatches :
@ RegexContext a b [ MatchText b ] @
@ RegexContext a b ( AllTextMatches ( Array Int ) ( MatchText b ) ) @
Two containers for text of all matches :
@ RegexContext a b ( AllTextMatches [ ] b ) @
@ RegexContext a b ( AllTextMatches ( Array Int ) b ) @
Four containers for text of all matches and their submatches :
@ RegexContext a b [ [ b ] ] @
@ RegexContext a b ( AllTextMatches ( Array Int ) [ b ] ) @
@ RegexContext a b ( AllTextMatches [ ] ( Array Int b ) ) @
@ RegexContext a b ( AllTextMatches ( Array Int ) ( Array Int b ) ) @
Unused matches are ' empty ' ( defined via ' Extract ' )
Module : Text.Regex.Base.Context
Copyright : (c) Chris Kuklewicz 2006
SPDX-License-Identifier: BSD-3-Clause
Maintainer :
Stability : experimental
Portability : non-portable (MPTC+FD)
This is a module of instances of 'RegexContext' (defined in
Text.Regex.Base.RegexLike). Nothing else is exported. This is
usually imported via the Text.Regex.Base convenience package which
itself is re-exported from newer Text.Regex.XXX modules provided by
the different regex-xxx backends.
These instances work for all the supported types and backends
interchangably. These instances provide the different results that
can be gotten from a match or matchM operation (often via the @=~@ and
@=~~@ operators with combine @makeRegex@ with @match@ and @matchM@
respectively). This module name is Context because they operators are
context dependent: use them in a context that expects an Int and you
get a count of matches, use them in a Bool context and get True if
there is a match, etc.
@RegexContext a b c@ takes a regular expression suppied in a type 'a'
generated by 'RegexMaker' and a target text supplied in type 'b' to a
result type 'c' using the 'match' class function. The 'matchM' class
function works like 'match' unless there is no match found, in which
case it calls 'fail' in the (arbitrary) monad context.
There are a few type synonyms from RegexLike that are used here:
@
-- | 0 based index from start of source, or (-1) for unused
type MatchOffset = Int
-- | non-negative length of a match
type MatchLength = Int
type MatchArray = Array Int (MatchOffset, MatchLength)
type MatchText source = Array Int (source, (MatchOffset, MatchLength))
@
There are also a few newtypes that used to prevent any possible
overlap of types, which were not needed for GHC's late overlap
detection but are needed for use in Hugs.
@
newtype AllSubmatches f b = AllSubmatches {getAllSubmatches :: (f b)}
newtype AllTextSubmatches f b = AllTextSubmatches {getAllTextSubmatches :: (f b)}
newtype AllMatches f b = AllMatches {getAllMatches :: (f b)}
newtype AllTextMatches f b = AllTextMatches {getAllTextMatches :: (f b) }
@
The newtypes' @f@ parameters are the containers, usually @[]@ or
@Array Int@, (where the arrays all have lower bound 0).
The two *Submatches newtypes return only information on the first
match. The other two newtypes return information on all the
non-overlapping matches. The two *Text* newtypes are used to mark
result types that contain the same type as the target text.
Where provided, noncaptured submatches will have a @MatchOffset@ of
(-1) and non-negative otherwise. The semantics of submatches depend
on the backend and its compile and execution options. Where provided,
@MatchLength@ will always be non-negative. Arrays with no elements
are returned with bounds of (1,0). Arrays with elements will have a
lower bound of 0.
XXX THIS HADDOCK DOCUMENTATION IS OUT OF DATE XXX
These are for finding the first match in the target text:
@ RegexContext a b Bool @ :
Whether there is any match or not.
@ RegexContext a b () @ :
Useful as a guard with @matchM@ or @=~~@ in a monad, since failure to match calls 'fail'.
@ RegexContext a b b @ :
This returns the text of the whole match.
It will return 'empty' from the 'Extract' type class if there is no match.
These are defined in each backend module, but documented here for convenience.
@ RegexContext a b (MatchOffset,MatchLength) @ :
This returns the initial index and length of the whole match.
MatchLength will always be non-negative, and 0 for a failed match.
@ RegexContext a b (MatchResult b) @ : The
'MatchResult' structure with details for the match. This is the
structure copied from the old @JRegex@ pacakge.
@ RegexContext a b (b, b, b) @ :
The text before the match, the text of the match, the text after the match
@ RegexContext a b (b, MatchText b, b) @ :
The text before the match, the details of the match, and the text after the match
@ RegexContext a b (b, b, b, [b]) @ :
The text before the match, the text of the match, the text after the
match, and a list of the text of the 1st and higher sub-parts of the
match. This is the same return value as used in the old
@Text.Regex@ API.
Two containers of the submatch offset information:
@ RegexContext a b MatchArray @ :
Array of @(MatchOffset,MatchLength)@ for all the sub matches.
The whole match is at the intial 0th index.
Noncaptured submatches will have a @MatchOffset@ of (-1)
The array will have no elements and bounds (1,0) if there is no match.
@ RegexContext a b (AllSubmatches [] (MatchOffset,MatchLength) @ :
List of @(MatchOffset,MatchLength)@
The whole match is the first element, the rest are the submatches (if any) in order.
The list is empty if there is no match.
Two containers of the submatch text and offset information:
@ RegexContext a b (AllTextSubmatches (Array Int) (b, (MatchOffset, MatchLength))) @
@ RegexContext a b (AllTextSubmatches [] (b, (MatchOffset, MatchLength))) @
Two containers of the submatch text information:
@ RegexContext a b (AllTextSubmatches [] b) @
@ RegexContext a b (AllTextSubmatches (Array Int) b) @
These instances are for all the matches (non-overlapping). Note that
backends are supposed to supply 'RegexLike' instances for which the
default 'matchAll' and 'matchAllText' stop searching after returning
any successful but empty match.
@ RegexContext a b Int @ :
The number of matches, non-negative.
Two containers for locations of all matches:
@ RegexContext a b (AllMatches [] (MatchOffset, MatchLength)) @
@ RegexContext a b (AllMatches (Array Int) (MatchOffset,MatchLength)) @
Two containers for the locations of all matches and their submatches:
@ RegexContext a b [MatchArray] @ :
@ RegexContext a b (AllMatches (Array Int) MatchArray) @
Two containers for the text and locations of all matches and their submatches:
@ RegexContext a b [MatchText b] @
@ RegexContext a b (AllTextMatches (Array Int) (MatchText b)) @
Two containers for text of all matches:
@ RegexContext a b (AllTextMatches [] b) @
@ RegexContext a b (AllTextMatches (Array Int) b) @
Four containers for text of all matches and their submatches:
@ RegexContext a b [[b]] @
@ RegexContext a b (AllTextMatches (Array Int) [b]) @
@ RegexContext a b (AllTextMatches [] (Array Int b)) @
@ RegexContext a b (AllTextMatches (Array Int) (Array Int b)) @
Unused matches are 'empty' (defined via 'Extract')
-}
module Text.Regex.Base.Context() where
import Prelude hiding (fail)
import Control.Monad.Fail (MonadFail(fail)) -- see 'regexFailed'
import Control.Monad(liftM)
import Data.Array(Array,(!),elems,listArray)
import Data . Maybe(maybe )
import Text.Regex.Base.RegexLike(RegexLike(..),RegexContext(..)
,AllSubmatches(..),AllTextSubmatches(..),AllMatches(..),AllTextMatches(..)
,MatchResult(..),Extract(empty),MatchOffset,MatchLength,MatchArray,MatchText)
-- Get the ByteString type for mood / doom
import Data . ByteString(ByteString )
-- Get the Regex types for the mood / doom workaround
import qualified Text . Regex . Lib . as R1(Regex )
import qualified Text . Regex . Lib . WrapPCRE as R2(Regex )
import qualified Text . Regex . Lib . WrapLazy as R3(Regex )
import qualified Text . Regex . Lib . WrapDFAEngine as R4(Regex )
-- Get the RegexLike instances
import Text . Regex . Lib . ( )
import Text . Regex . Lib . ( )
import Text . Regex . Lib . StringLazy ( )
import Text . Regex . Lib . StringDFAEngine ( )
import Text . Regex . Lib . ByteStringPosix ( )
import Text . Regex . Lib . ByteStringPCRE ( )
import Text . Regex . Lib . ByteStringLazy ( )
import Text . Regex . Lib . ( )
-- Get the ByteString type for mood/doom
import Data.ByteString(ByteString)
-- Get the Regex types for the mood/doom workaround
import qualified Text.Regex.Lib.WrapPosix as R1(Regex)
import qualified Text.Regex.Lib.WrapPCRE as R2(Regex)
import qualified Text.Regex.Lib.WrapLazy as R3(Regex)
import qualified Text.Regex.Lib.WrapDFAEngine as R4(Regex)
-- Get the RegexLike instances
import Text.Regex.Lib.StringPosix()
import Text.Regex.Lib.StringPCRE()
import Text.Regex.Lib.StringLazy()
import Text.Regex.Lib.StringDFAEngine()
import Text.Regex.Lib.ByteStringPosix()
import Text.Regex.Lib.ByteStringPCRE()
import Text.Regex.Lib.ByteStringLazy()
import Text.Regex.Lib.ByteStringDFAEngine()
-}
mood : : ( RegexLike a b ) = > a - > b - > b
{ - # INLINE mood #
mood :: (RegexLike a b) => a -> b -> b
{-# INLINE mood #-}
mood r s = case matchOnceText r s of
Nothing -> empty
Just (_,ma,_) -> fst (ma!0)
doom :: (RegexLike a b,Monad m) => a -> b -> m b
# INLINE doom #
doom = actOn (\(_,ma,_)->fst (ma!0))
These run afoul of various restrictions if I say
" instance RegexContext a b b where "
so I am listing these cases explicitly
"instance RegexContext a b b where"
so I am listing these cases explicitly
-}
instance RegexContext R1.Regex String String where match = mood; matchM = doom
instance RegexContext R2.Regex String String where match = mood; matchM = doom
instance RegexContext R3.Regex String String where match = mood; matchM = doom
instance RegexContext R4.Regex String String where match = mood; matchM = doom
instance RegexContext R1.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R2.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R3.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R4.Regex ByteString ByteString where match = mood; matchM = doom
-}
nullArray :: Array Int a
# INLINE nullArray #
nullArray = listArray (1,0) []
nullFail :: (RegexContext regex source (AllMatches [] target),MonadFail m) => regex -> source -> m (AllMatches [] target)
# INLINE nullFail #
nullFail r s = case match r s of
(AllMatches []) -> regexFailed
xs -> return xs
nullFailText :: (RegexContext regex source (AllTextMatches [] target),MonadFail m) => regex -> source -> m (AllTextMatches [] target)
{-# INLINE nullFailText #-}
nullFailText r s = case match r s of
(AllTextMatches []) -> regexFailed
xs -> return xs
nullFail' :: (RegexContext regex source ([] target),MonadFail m) => regex -> source -> m ([] target)
# INLINE nullFail ' #
nullFail' r s = case match r s of
([]) -> regexFailed
xs -> return xs
regexFailed :: (MonadFail m) => m b
# INLINE regexFailed #
regexFailed = fail $ "regex failed to match"
actOn :: (RegexLike r s,MonadFail m) => ((s,MatchText s,s)->t) -> r -> s -> m t
# INLINE actOn #
actOn f r s = case matchOnceText r s of
Nothing -> regexFailed
Just preMApost -> return (f preMApost)
-- ** Instances based on matchTest ()
instance (RegexLike a b) => RegexContext a b Bool where
match = matchTest
matchM r s = case match r s of
False -> regexFailed
True -> return True
instance (RegexLike a b) => RegexContext a b () where
match _ _ = ()
matchM r s = case matchTest r s of
False -> regexFailed
True -> return ()
* * Instance based on matchCount
instance (RegexLike a b) => RegexContext a b Int where
match = matchCount
matchM r s = case match r s of
0 -> regexFailed
x -> return x
-- ** Instances based on matchOnce,matchOnceText
instance (RegexLike a b) => RegexContext a b (MatchOffset,MatchLength) where
match r s = maybe (-1,0) (! 0) (matchOnce r s)
matchM r s = maybe regexFailed (return.(! 0)) (matchOnce r s)
instance (RegexLike a b) => RegexContext a b (MatchResult b) where
match r s = maybe (MR {mrBefore = s,mrMatch = empty,mrAfter = empty
,mrSubs = nullArray,mrSubList = []}) id (matchM r s)
matchM = actOn (\(pre,ma,post) ->
let ((whole,_):subs) = elems ma
in MR { mrBefore = pre
, mrMatch = whole
, mrAfter = post
, mrSubs = fmap fst ma
, mrSubList = map fst subs })
instance (RegexLike a b) => RegexContext a b (b,MatchText b,b) where
match r s = maybe (s,nullArray,empty) id (matchOnceText r s)
matchM r s = maybe regexFailed return (matchOnceText r s)
instance (RegexLike a b) => RegexContext a b (b,b,b) where
match r s = maybe (s,empty,empty) id (matchM r s)
matchM = actOn (\(pre,ma,post) -> let ((whole,_):_) = elems ma
in (pre,whole,post))
instance (RegexLike a b) => RegexContext a b (b,b,b,[b]) where
match r s = maybe (s,empty,empty,[]) id (matchM r s)
matchM = actOn (\(pre,ma,post) -> let ((whole,_):subs) = elems ma
in (pre,whole,post,map fst subs))
now AllSubmatches wrapper
instance (RegexLike a b) => RegexContext a b MatchArray where
match r s = maybe nullArray id (matchOnce r s)
matchM r s = maybe regexFailed return (matchOnce r s)
instance (RegexLike a b) => RegexContext a b (AllSubmatches [] (MatchOffset,MatchLength)) where
match r s = maybe (AllSubmatches []) id (matchM r s)
matchM r s = case matchOnce r s of
Nothing -> regexFailed
Just ma -> return (AllSubmatches (elems ma))
essentially AllSubmatches applied to ( MatchText b )
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches (Array Int) (b, (MatchOffset, MatchLength))) where
match r s = maybe (AllTextSubmatches nullArray) id (matchM r s)
matchM r s = actOn (\(_,ma,_) -> AllTextSubmatches ma) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches [] (b, (MatchOffset, MatchLength))) where
match r s = maybe (AllTextSubmatches []) id (matchM r s)
matchM r s = actOn (\(_,ma,_) -> AllTextSubmatches (elems ma)) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches [] b) where
match r s = maybe (AllTextSubmatches []) id (matchM r s)
matchM r s = liftM AllTextSubmatches $ actOn (\(_,ma,_) -> map fst . elems $ ma) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches (Array Int) b) where
match r s = maybe (AllTextSubmatches nullArray) id (matchM r s)
matchM r s = liftM AllTextSubmatches $ actOn (\(_,ma,_) -> fmap fst ma) r s
-- ** Instances based on matchAll,matchAllText
instance (RegexLike a b) => RegexContext a b (AllMatches [] (MatchOffset,MatchLength)) where
match r s = AllMatches [ ma!0 | ma <- matchAll r s ]
matchM r s = nullFail r s
instance (RegexLike a b) => RegexContext a b (AllMatches (Array Int) (MatchOffset,MatchLength)) where
match r s = maybe (AllMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllMatches []) -> regexFailed
(AllMatches pairs) -> return . AllMatches . listArray (0,pred $ length pairs) $ pairs
-- No AllMatches wrapper
instance (RegexLike a b) => RegexContext a b [MatchArray] where
match = matchAll
matchM = nullFail'
instance (RegexLike a b) => RegexContext a b (AllMatches (Array Int) MatchArray) where
match r s = maybe (AllMatches nullArray) id (matchM r s)
matchM r s = case match r s of
[] -> regexFailed
mas -> return . AllMatches . listArray (0,pred $ length mas) $ mas
-- No AllTextMatches wrapper
instance (RegexLike a b) => RegexContext a b [MatchText b] where
match = matchAllText
matchM = nullFail'
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) (MatchText b)) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
([]) -> regexFailed
(mts) -> return . AllTextMatches . listArray (0,pred $ length mts) $ mts
instance (RegexLike a b) => RegexContext a b (AllTextMatches [] b) where
match r s = AllTextMatches [ fst (ma!0) | ma <- matchAllText r s ]
matchM r s = nullFailText r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) b) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllTextMatches []) -> regexFailed
(AllTextMatches bs) -> return . AllTextMatches . listArray (0,pred $ length bs) $ bs
-- No AllTextMatches wrapper
instance (RegexLike a b) => RegexContext a b [[b]] where
match r s = [ map fst (elems ma) | ma <- matchAllText r s ]
matchM r s = nullFail' r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) [b]) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
([]) -> regexFailed
(ls) -> return . AllTextMatches . listArray (0,pred $ length ls) $ ls
instance (RegexLike a b) => RegexContext a b (AllTextMatches [] (Array Int b)) where
match r s = AllTextMatches [ fmap fst ma | ma <- matchAllText r s ]
matchM r s = nullFailText r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) (Array Int b)) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllTextMatches []) -> regexFailed
(AllTextMatches as) -> return . AllTextMatches . listArray (0,pred $ length as) $ as
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/regex-base-0.94.0.0/src/Text/Regex/Base/Context.hs | haskell | | 0 based index from start of source , or ( -1 ) for unused
| non - negative length of a match
| 0 based index from start of source, or (-1) for unused
| non-negative length of a match
see 'regexFailed'
Get the ByteString type for mood / doom
Get the Regex types for the mood / doom workaround
Get the RegexLike instances
Get the ByteString type for mood/doom
Get the Regex types for the mood/doom workaround
Get the RegexLike instances
# INLINE mood #
# INLINE nullFailText #
** Instances based on matchTest ()
** Instances based on matchOnce,matchOnceText
** Instances based on matchAll,matchAllText
No AllMatches wrapper
No AllTextMatches wrapper
No AllTextMatches wrapper | # LANGUAGE MultiParamTypeClasses , FlexibleInstances , FlexibleContexts , TypeSynonymInstances #
# OPTIONS_GHC -fno - warn - orphans #
|
Module : Text . Regex . Base . Context
Copyright : ( c ) 2006
SPDX - License - Identifier : BSD-3 - Clause
Maintainer :
Stability : experimental
Portability : non - portable ( MPTC+FD )
This is a module of instances of ' RegexContext ' ( defined in
Text . Regex . Base . ) . Nothing else is exported . This is
usually imported via the Text . Regex . Base convenience package which
itself is re - exported from newer Text . Regex . XXX modules provided by
the different regex - xxx backends .
These instances work for all the supported types and backends
interchangably . These instances provide the different results that
can be gotten from a match or matchM operation ( often via the @=~@ and
@=~~@ operators with combine @makeRegex@ with @match@ and @matchM@
respectively ) . This module name is Context because they operators are
context dependent : use them in a context that expects an Int and you
get a count of matches , use them in a context and get True if
there is a match , etc .
@RegexContext a b c@ takes a regular expression suppied in a type ' a '
generated by ' RegexMaker ' and a target text supplied in type ' b ' to a
result type ' c ' using the ' match ' class function . The ' matchM ' class
function works like ' match ' unless there is no match found , in which
case it calls ' fail ' in the ( arbitrary ) monad context .
There are a few type synonyms from RegexLike that are used here :
@
type MatchOffset = Int
type MatchLength = Int
type MatchArray = Array Int ( MatchOffset , MatchLength )
type MatchText source = Array Int ( source , ( MatchOffset , MatchLength ) )
@
There are also a few newtypes that used to prevent any possible
overlap of types , which were not needed for GHC 's late overlap
detection but are needed for use in Hugs .
@
newtype AllSubmatches f b = AllSubmatches { getAllSubmatches : : ( f b ) }
newtype AllTextSubmatches f b = AllTextSubmatches { getAllTextSubmatches : : ( f b ) }
newtype AllMatches f b = AllMatches { getAllMatches : : ( f b ) }
newtype AllTextMatches f b = AllTextMatches : : ( f b ) }
@
The newtypes ' @f@ parameters are the containers , usually @[]@ or
@Array Int@ , ( where the arrays all have lower bound 0 ) .
The two * Submatches newtypes return only information on the first
match . The other two newtypes return information on all the
non - overlapping matches . The two * Text * newtypes are used to mark
result types that contain the same type as the target text .
Where provided , noncaptured submatches will have a @MatchOffset@ of
( -1 ) and non - negative otherwise . The semantics of submatches depend
on the backend and its compile and execution options . Where provided ,
@MatchLength@ will always be non - negative . Arrays with no elements
are returned with bounds of ( 1,0 ) . Arrays with elements will have a
lower bound of 0 .
XXX THIS HADDOCK DOCUMENTATION IS OUT OF DATE XXX
These are for finding the first match in the target text :
@ RegexContext a b Bool @ :
Whether there is any match or not .
@ RegexContext a b ( ) @ :
Useful as a guard with @matchM@ or @=~~@ in a monad , since failure to match calls ' fail ' .
@ RegexContext a b b @ :
This returns the text of the whole match .
It will return ' empty ' from the ' Extract ' type class if there is no match .
These are defined in each backend module , but documented here for convenience .
@ RegexContext a b ( MatchOffset , MatchLength ) @ :
This returns the initial index and length of the whole match .
MatchLength will always be non - negative , and 0 for a failed match .
@ RegexContext a b ( MatchResult b ) @ : The
' MatchResult ' structure with details for the match . This is the
structure copied from the old @JRegex@ pacakge .
@ RegexContext a b ( b , b , b ) @ :
The text before the match , the text of the match , the text after the match
@ RegexContext a b ( b , MatchText b , b ) @ :
The text before the match , the details of the match , and the text after the match
@ RegexContext a b ( b , b , b , [ b ] ) @ :
The text before the match , the text of the match , the text after the
match , and a list of the text of the 1st and higher sub - parts of the
match . This is the same return value as used in the old
@Text . Regex@ API .
Two containers of the submatch offset information :
@ RegexContext a b MatchArray @ :
Array of @(MatchOffset , MatchLength)@ for all the sub matches .
The whole match is at the intial 0th index .
Noncaptured submatches will have a @MatchOffset@ of ( -1 )
The array will have no elements and bounds ( 1,0 ) if there is no match .
@ RegexContext a b ( AllSubmatches [ ] ( MatchOffset , MatchLength ) @ :
List of @(MatchOffset , MatchLength)@
The whole match is the first element , the rest are the submatches ( if any ) in order .
The list is empty if there is no match .
Two containers of the submatch text and offset information :
@ RegexContext a b ( AllTextSubmatches ( Array Int ) ( b , ( MatchOffset , MatchLength ) ) ) @
@ RegexContext a b ( AllTextSubmatches [ ] ( b , ( MatchOffset , MatchLength ) ) ) @
Two containers of the submatch text information :
@ RegexContext a b ( AllTextSubmatches [ ] b ) @
@ RegexContext a b ( AllTextSubmatches ( Array Int ) b ) @
These instances are for all the matches ( non - overlapping ) . Note that
backends are supposed to supply ' RegexLike ' instances for which the
default ' matchAll ' and ' matchAllText ' stop searching after returning
any successful but empty match .
@ RegexContext a b Int @ :
The number of matches , non - negative .
Two containers for locations of all matches :
@ RegexContext a b ( AllMatches [ ] ( MatchOffset , MatchLength ) ) @
@ RegexContext a b ( AllMatches ( Array Int ) ( MatchOffset , MatchLength ) ) @
Two containers for the locations of all matches and their submatches :
@ RegexContext a b [ MatchArray ] @ :
@ RegexContext a b ( AllMatches ( Array Int ) MatchArray ) @
Two containers for the text and locations of all matches and their submatches :
@ RegexContext a b [ MatchText b ] @
@ RegexContext a b ( AllTextMatches ( Array Int ) ( MatchText b ) ) @
Two containers for text of all matches :
@ RegexContext a b ( AllTextMatches [ ] b ) @
@ RegexContext a b ( AllTextMatches ( Array Int ) b ) @
Four containers for text of all matches and their submatches :
@ RegexContext a b [ [ b ] ] @
@ RegexContext a b ( AllTextMatches ( Array Int ) [ b ] ) @
@ RegexContext a b ( AllTextMatches [ ] ( Array Int b ) ) @
@ RegexContext a b ( AllTextMatches ( Array Int ) ( Array Int b ) ) @
Unused matches are ' empty ' ( defined via ' Extract ' )
Module : Text.Regex.Base.Context
Copyright : (c) Chris Kuklewicz 2006
SPDX-License-Identifier: BSD-3-Clause
Maintainer :
Stability : experimental
Portability : non-portable (MPTC+FD)
This is a module of instances of 'RegexContext' (defined in
Text.Regex.Base.RegexLike). Nothing else is exported. This is
usually imported via the Text.Regex.Base convenience package which
itself is re-exported from newer Text.Regex.XXX modules provided by
the different regex-xxx backends.
These instances work for all the supported types and backends
interchangably. These instances provide the different results that
can be gotten from a match or matchM operation (often via the @=~@ and
@=~~@ operators with combine @makeRegex@ with @match@ and @matchM@
respectively). This module name is Context because they operators are
context dependent: use them in a context that expects an Int and you
get a count of matches, use them in a Bool context and get True if
there is a match, etc.
@RegexContext a b c@ takes a regular expression suppied in a type 'a'
generated by 'RegexMaker' and a target text supplied in type 'b' to a
result type 'c' using the 'match' class function. The 'matchM' class
function works like 'match' unless there is no match found, in which
case it calls 'fail' in the (arbitrary) monad context.
There are a few type synonyms from RegexLike that are used here:
@
type MatchOffset = Int
type MatchLength = Int
type MatchArray = Array Int (MatchOffset, MatchLength)
type MatchText source = Array Int (source, (MatchOffset, MatchLength))
@
There are also a few newtypes that used to prevent any possible
overlap of types, which were not needed for GHC's late overlap
detection but are needed for use in Hugs.
@
newtype AllSubmatches f b = AllSubmatches {getAllSubmatches :: (f b)}
newtype AllTextSubmatches f b = AllTextSubmatches {getAllTextSubmatches :: (f b)}
newtype AllMatches f b = AllMatches {getAllMatches :: (f b)}
newtype AllTextMatches f b = AllTextMatches {getAllTextMatches :: (f b) }
@
The newtypes' @f@ parameters are the containers, usually @[]@ or
@Array Int@, (where the arrays all have lower bound 0).
The two *Submatches newtypes return only information on the first
match. The other two newtypes return information on all the
non-overlapping matches. The two *Text* newtypes are used to mark
result types that contain the same type as the target text.
Where provided, noncaptured submatches will have a @MatchOffset@ of
(-1) and non-negative otherwise. The semantics of submatches depend
on the backend and its compile and execution options. Where provided,
@MatchLength@ will always be non-negative. Arrays with no elements
are returned with bounds of (1,0). Arrays with elements will have a
lower bound of 0.
XXX THIS HADDOCK DOCUMENTATION IS OUT OF DATE XXX
These are for finding the first match in the target text:
@ RegexContext a b Bool @ :
Whether there is any match or not.
@ RegexContext a b () @ :
Useful as a guard with @matchM@ or @=~~@ in a monad, since failure to match calls 'fail'.
@ RegexContext a b b @ :
This returns the text of the whole match.
It will return 'empty' from the 'Extract' type class if there is no match.
These are defined in each backend module, but documented here for convenience.
@ RegexContext a b (MatchOffset,MatchLength) @ :
This returns the initial index and length of the whole match.
MatchLength will always be non-negative, and 0 for a failed match.
@ RegexContext a b (MatchResult b) @ : The
'MatchResult' structure with details for the match. This is the
structure copied from the old @JRegex@ pacakge.
@ RegexContext a b (b, b, b) @ :
The text before the match, the text of the match, the text after the match
@ RegexContext a b (b, MatchText b, b) @ :
The text before the match, the details of the match, and the text after the match
@ RegexContext a b (b, b, b, [b]) @ :
The text before the match, the text of the match, the text after the
match, and a list of the text of the 1st and higher sub-parts of the
match. This is the same return value as used in the old
@Text.Regex@ API.
Two containers of the submatch offset information:
@ RegexContext a b MatchArray @ :
Array of @(MatchOffset,MatchLength)@ for all the sub matches.
The whole match is at the intial 0th index.
Noncaptured submatches will have a @MatchOffset@ of (-1)
The array will have no elements and bounds (1,0) if there is no match.
@ RegexContext a b (AllSubmatches [] (MatchOffset,MatchLength) @ :
List of @(MatchOffset,MatchLength)@
The whole match is the first element, the rest are the submatches (if any) in order.
The list is empty if there is no match.
Two containers of the submatch text and offset information:
@ RegexContext a b (AllTextSubmatches (Array Int) (b, (MatchOffset, MatchLength))) @
@ RegexContext a b (AllTextSubmatches [] (b, (MatchOffset, MatchLength))) @
Two containers of the submatch text information:
@ RegexContext a b (AllTextSubmatches [] b) @
@ RegexContext a b (AllTextSubmatches (Array Int) b) @
These instances are for all the matches (non-overlapping). Note that
backends are supposed to supply 'RegexLike' instances for which the
default 'matchAll' and 'matchAllText' stop searching after returning
any successful but empty match.
@ RegexContext a b Int @ :
The number of matches, non-negative.
Two containers for locations of all matches:
@ RegexContext a b (AllMatches [] (MatchOffset, MatchLength)) @
@ RegexContext a b (AllMatches (Array Int) (MatchOffset,MatchLength)) @
Two containers for the locations of all matches and their submatches:
@ RegexContext a b [MatchArray] @ :
@ RegexContext a b (AllMatches (Array Int) MatchArray) @
Two containers for the text and locations of all matches and their submatches:
@ RegexContext a b [MatchText b] @
@ RegexContext a b (AllTextMatches (Array Int) (MatchText b)) @
Two containers for text of all matches:
@ RegexContext a b (AllTextMatches [] b) @
@ RegexContext a b (AllTextMatches (Array Int) b) @
Four containers for text of all matches and their submatches:
@ RegexContext a b [[b]] @
@ RegexContext a b (AllTextMatches (Array Int) [b]) @
@ RegexContext a b (AllTextMatches [] (Array Int b)) @
@ RegexContext a b (AllTextMatches (Array Int) (Array Int b)) @
Unused matches are 'empty' (defined via 'Extract')
-}
module Text.Regex.Base.Context() where
import Prelude hiding (fail)
import Control.Monad(liftM)
import Data.Array(Array,(!),elems,listArray)
import Data . Maybe(maybe )
import Text.Regex.Base.RegexLike(RegexLike(..),RegexContext(..)
,AllSubmatches(..),AllTextSubmatches(..),AllMatches(..),AllTextMatches(..)
,MatchResult(..),Extract(empty),MatchOffset,MatchLength,MatchArray,MatchText)
import Data . ByteString(ByteString )
import qualified Text . Regex . Lib . as R1(Regex )
import qualified Text . Regex . Lib . WrapPCRE as R2(Regex )
import qualified Text . Regex . Lib . WrapLazy as R3(Regex )
import qualified Text . Regex . Lib . WrapDFAEngine as R4(Regex )
import Text . Regex . Lib . ( )
import Text . Regex . Lib . ( )
import Text . Regex . Lib . StringLazy ( )
import Text . Regex . Lib . StringDFAEngine ( )
import Text . Regex . Lib . ByteStringPosix ( )
import Text . Regex . Lib . ByteStringPCRE ( )
import Text . Regex . Lib . ByteStringLazy ( )
import Text . Regex . Lib . ( )
import Data.ByteString(ByteString)
import qualified Text.Regex.Lib.WrapPosix as R1(Regex)
import qualified Text.Regex.Lib.WrapPCRE as R2(Regex)
import qualified Text.Regex.Lib.WrapLazy as R3(Regex)
import qualified Text.Regex.Lib.WrapDFAEngine as R4(Regex)
import Text.Regex.Lib.StringPosix()
import Text.Regex.Lib.StringPCRE()
import Text.Regex.Lib.StringLazy()
import Text.Regex.Lib.StringDFAEngine()
import Text.Regex.Lib.ByteStringPosix()
import Text.Regex.Lib.ByteStringPCRE()
import Text.Regex.Lib.ByteStringLazy()
import Text.Regex.Lib.ByteStringDFAEngine()
-}
mood : : ( RegexLike a b ) = > a - > b - > b
{ - # INLINE mood #
mood :: (RegexLike a b) => a -> b -> b
mood r s = case matchOnceText r s of
Nothing -> empty
Just (_,ma,_) -> fst (ma!0)
doom :: (RegexLike a b,Monad m) => a -> b -> m b
# INLINE doom #
doom = actOn (\(_,ma,_)->fst (ma!0))
These run afoul of various restrictions if I say
" instance RegexContext a b b where "
so I am listing these cases explicitly
"instance RegexContext a b b where"
so I am listing these cases explicitly
-}
instance RegexContext R1.Regex String String where match = mood; matchM = doom
instance RegexContext R2.Regex String String where match = mood; matchM = doom
instance RegexContext R3.Regex String String where match = mood; matchM = doom
instance RegexContext R4.Regex String String where match = mood; matchM = doom
instance RegexContext R1.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R2.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R3.Regex ByteString ByteString where match = mood; matchM = doom
instance RegexContext R4.Regex ByteString ByteString where match = mood; matchM = doom
-}
nullArray :: Array Int a
# INLINE nullArray #
nullArray = listArray (1,0) []
nullFail :: (RegexContext regex source (AllMatches [] target),MonadFail m) => regex -> source -> m (AllMatches [] target)
# INLINE nullFail #
nullFail r s = case match r s of
(AllMatches []) -> regexFailed
xs -> return xs
nullFailText :: (RegexContext regex source (AllTextMatches [] target),MonadFail m) => regex -> source -> m (AllTextMatches [] target)
nullFailText r s = case match r s of
(AllTextMatches []) -> regexFailed
xs -> return xs
nullFail' :: (RegexContext regex source ([] target),MonadFail m) => regex -> source -> m ([] target)
# INLINE nullFail ' #
nullFail' r s = case match r s of
([]) -> regexFailed
xs -> return xs
regexFailed :: (MonadFail m) => m b
# INLINE regexFailed #
regexFailed = fail $ "regex failed to match"
actOn :: (RegexLike r s,MonadFail m) => ((s,MatchText s,s)->t) -> r -> s -> m t
# INLINE actOn #
actOn f r s = case matchOnceText r s of
Nothing -> regexFailed
Just preMApost -> return (f preMApost)
instance (RegexLike a b) => RegexContext a b Bool where
match = matchTest
matchM r s = case match r s of
False -> regexFailed
True -> return True
instance (RegexLike a b) => RegexContext a b () where
match _ _ = ()
matchM r s = case matchTest r s of
False -> regexFailed
True -> return ()
* * Instance based on matchCount
instance (RegexLike a b) => RegexContext a b Int where
match = matchCount
matchM r s = case match r s of
0 -> regexFailed
x -> return x
instance (RegexLike a b) => RegexContext a b (MatchOffset,MatchLength) where
match r s = maybe (-1,0) (! 0) (matchOnce r s)
matchM r s = maybe regexFailed (return.(! 0)) (matchOnce r s)
instance (RegexLike a b) => RegexContext a b (MatchResult b) where
match r s = maybe (MR {mrBefore = s,mrMatch = empty,mrAfter = empty
,mrSubs = nullArray,mrSubList = []}) id (matchM r s)
matchM = actOn (\(pre,ma,post) ->
let ((whole,_):subs) = elems ma
in MR { mrBefore = pre
, mrMatch = whole
, mrAfter = post
, mrSubs = fmap fst ma
, mrSubList = map fst subs })
instance (RegexLike a b) => RegexContext a b (b,MatchText b,b) where
match r s = maybe (s,nullArray,empty) id (matchOnceText r s)
matchM r s = maybe regexFailed return (matchOnceText r s)
instance (RegexLike a b) => RegexContext a b (b,b,b) where
match r s = maybe (s,empty,empty) id (matchM r s)
matchM = actOn (\(pre,ma,post) -> let ((whole,_):_) = elems ma
in (pre,whole,post))
instance (RegexLike a b) => RegexContext a b (b,b,b,[b]) where
match r s = maybe (s,empty,empty,[]) id (matchM r s)
matchM = actOn (\(pre,ma,post) -> let ((whole,_):subs) = elems ma
in (pre,whole,post,map fst subs))
now AllSubmatches wrapper
instance (RegexLike a b) => RegexContext a b MatchArray where
match r s = maybe nullArray id (matchOnce r s)
matchM r s = maybe regexFailed return (matchOnce r s)
instance (RegexLike a b) => RegexContext a b (AllSubmatches [] (MatchOffset,MatchLength)) where
match r s = maybe (AllSubmatches []) id (matchM r s)
matchM r s = case matchOnce r s of
Nothing -> regexFailed
Just ma -> return (AllSubmatches (elems ma))
essentially AllSubmatches applied to ( MatchText b )
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches (Array Int) (b, (MatchOffset, MatchLength))) where
match r s = maybe (AllTextSubmatches nullArray) id (matchM r s)
matchM r s = actOn (\(_,ma,_) -> AllTextSubmatches ma) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches [] (b, (MatchOffset, MatchLength))) where
match r s = maybe (AllTextSubmatches []) id (matchM r s)
matchM r s = actOn (\(_,ma,_) -> AllTextSubmatches (elems ma)) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches [] b) where
match r s = maybe (AllTextSubmatches []) id (matchM r s)
matchM r s = liftM AllTextSubmatches $ actOn (\(_,ma,_) -> map fst . elems $ ma) r s
instance (RegexLike a b) => RegexContext a b (AllTextSubmatches (Array Int) b) where
match r s = maybe (AllTextSubmatches nullArray) id (matchM r s)
matchM r s = liftM AllTextSubmatches $ actOn (\(_,ma,_) -> fmap fst ma) r s
instance (RegexLike a b) => RegexContext a b (AllMatches [] (MatchOffset,MatchLength)) where
match r s = AllMatches [ ma!0 | ma <- matchAll r s ]
matchM r s = nullFail r s
instance (RegexLike a b) => RegexContext a b (AllMatches (Array Int) (MatchOffset,MatchLength)) where
match r s = maybe (AllMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllMatches []) -> regexFailed
(AllMatches pairs) -> return . AllMatches . listArray (0,pred $ length pairs) $ pairs
instance (RegexLike a b) => RegexContext a b [MatchArray] where
match = matchAll
matchM = nullFail'
instance (RegexLike a b) => RegexContext a b (AllMatches (Array Int) MatchArray) where
match r s = maybe (AllMatches nullArray) id (matchM r s)
matchM r s = case match r s of
[] -> regexFailed
mas -> return . AllMatches . listArray (0,pred $ length mas) $ mas
instance (RegexLike a b) => RegexContext a b [MatchText b] where
match = matchAllText
matchM = nullFail'
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) (MatchText b)) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
([]) -> regexFailed
(mts) -> return . AllTextMatches . listArray (0,pred $ length mts) $ mts
instance (RegexLike a b) => RegexContext a b (AllTextMatches [] b) where
match r s = AllTextMatches [ fst (ma!0) | ma <- matchAllText r s ]
matchM r s = nullFailText r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) b) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllTextMatches []) -> regexFailed
(AllTextMatches bs) -> return . AllTextMatches . listArray (0,pred $ length bs) $ bs
instance (RegexLike a b) => RegexContext a b [[b]] where
match r s = [ map fst (elems ma) | ma <- matchAllText r s ]
matchM r s = nullFail' r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) [b]) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
([]) -> regexFailed
(ls) -> return . AllTextMatches . listArray (0,pred $ length ls) $ ls
instance (RegexLike a b) => RegexContext a b (AllTextMatches [] (Array Int b)) where
match r s = AllTextMatches [ fmap fst ma | ma <- matchAllText r s ]
matchM r s = nullFailText r s
instance (RegexLike a b) => RegexContext a b (AllTextMatches (Array Int) (Array Int b)) where
match r s = maybe (AllTextMatches nullArray) id (matchM r s)
matchM r s = case match r s of
(AllTextMatches []) -> regexFailed
(AllTextMatches as) -> return . AllTextMatches . listArray (0,pred $ length as) $ as
|
2145c8108ee798fcf93be95035524b89d31f4d4f87740f15ccc7889a9a1012a8 | matsen/pplacer | rppr_info.ml | open Subcommand
open Guppy_cmdobjs
open Ppatteries
open Convex
class cmd () =
object (self)
inherit subcommand () as super
inherit refpkg_cmd ~required:true as super_refpkg
inherit tabular_cmd () as super_tabular
val taxonomic = flag "--taxonomic"
(Plain (false, "Show by-rank taxonomic information"))
method specl = super_refpkg#specl
@ super_tabular#specl @
[
toggle_flag taxonomic;
]
method desc = "gives information about a reference package"
method usage = "usage: info -c my.refpkg"
method action _ =
let rp = self#get_rp in
let gt = Refpkg.get_ref_tree rp in
let st = gt.Gtree.stree in
let top_id = Stree.top_id st in
match Result.catch Refpkg.get_taxonomy rp |> Result.to_option with
| Some td ->
if fv taxonomic then begin
rank_tax_map_of_refpkg rp
|> IntMap.enum
|> Enum.map
(fun (rank, taxmap) ->
let sizemim, cutsetim = build_sizemim_and_cutsetim (taxmap, st) in
let cutsetim = IntMap.add top_id ColorSet.empty cutsetim in
let unconvex_colors = IntMap.fold
(fun _ colors unconvex ->
if ColorSet.cardinal colors < 2 then unconvex else
ColorSet.union unconvex colors)
cutsetim
ColorSet.empty
and max_bad, tot_bad = badness cutsetim in
(Tax_taxonomy.get_rank_name td rank) ::
(List.map
string_of_int
[ColorMap.cardinal (IntMap.find top_id sizemim);
ColorSet.cardinal unconvex_colors;
max_bad;
tot_bad]))
|> List.of_enum
|> List.cons ["rank"; "n_taxids"; "n_nonconvex"; "max_bad"; "tot_bad"]
|> self#write_ll_tab
end
else
Printf.printf "%s: %d leaves, %d taxids\n"
(Refpkg.get_name rp)
(Stree.n_taxa st)
(Tax_id.TaxIdMap.cardinal td.Tax_taxonomy.tax_name_map)
| _ ->
if fv taxonomic then
raise (Refpkg.Missing_element "taxonomy");
Printf.printf "%s: %d leaves\n" (Refpkg.get_name rp) (Stree.n_taxa st)
end
| null | https://raw.githubusercontent.com/matsen/pplacer/f40a363e962cca7131f1f2d372262e0081ff1190/pplacer_src/rppr_info.ml | ocaml | open Subcommand
open Guppy_cmdobjs
open Ppatteries
open Convex
class cmd () =
object (self)
inherit subcommand () as super
inherit refpkg_cmd ~required:true as super_refpkg
inherit tabular_cmd () as super_tabular
val taxonomic = flag "--taxonomic"
(Plain (false, "Show by-rank taxonomic information"))
method specl = super_refpkg#specl
@ super_tabular#specl @
[
toggle_flag taxonomic;
]
method desc = "gives information about a reference package"
method usage = "usage: info -c my.refpkg"
method action _ =
let rp = self#get_rp in
let gt = Refpkg.get_ref_tree rp in
let st = gt.Gtree.stree in
let top_id = Stree.top_id st in
match Result.catch Refpkg.get_taxonomy rp |> Result.to_option with
| Some td ->
if fv taxonomic then begin
rank_tax_map_of_refpkg rp
|> IntMap.enum
|> Enum.map
(fun (rank, taxmap) ->
let sizemim, cutsetim = build_sizemim_and_cutsetim (taxmap, st) in
let cutsetim = IntMap.add top_id ColorSet.empty cutsetim in
let unconvex_colors = IntMap.fold
(fun _ colors unconvex ->
if ColorSet.cardinal colors < 2 then unconvex else
ColorSet.union unconvex colors)
cutsetim
ColorSet.empty
and max_bad, tot_bad = badness cutsetim in
(Tax_taxonomy.get_rank_name td rank) ::
(List.map
string_of_int
[ColorMap.cardinal (IntMap.find top_id sizemim);
ColorSet.cardinal unconvex_colors;
max_bad;
tot_bad]))
|> List.of_enum
|> List.cons ["rank"; "n_taxids"; "n_nonconvex"; "max_bad"; "tot_bad"]
|> self#write_ll_tab
end
else
Printf.printf "%s: %d leaves, %d taxids\n"
(Refpkg.get_name rp)
(Stree.n_taxa st)
(Tax_id.TaxIdMap.cardinal td.Tax_taxonomy.tax_name_map)
| _ ->
if fv taxonomic then
raise (Refpkg.Missing_element "taxonomy");
Printf.printf "%s: %d leaves\n" (Refpkg.get_name rp) (Stree.n_taxa st)
end
|
|
02ecccb95e320ae0772bf0a9b31ec0e7bd7317dab742c8387d755f16233c7241 | mirage/capnp-rpc | rO_array.ml | type 'a t = 'a array
let init = Array.init
let of_list = Array.of_list
let get_exn t i = t.(i)
let length = Array.length
let map = Array.map
let mapi = Array.mapi
let iter = Array.iter
let iteri = Array.iteri
let fold_left = Array.fold_left
let get ~oob t i =
if i < 0 || i >= Array.length t then oob
else Array.get t i
let find fn t =
let rec loop i =
if i = Array.length t then None
else (
let item = t.(i) in
if fn item then Some item
else loop (i + 1)
)
in
loop 0
let empty = [| |]
let pp x = Fmt.(brackets (array ~sep:(const string ", ") x))
let equal eq a b =
let l = Array.length a in
if l <> Array.length b then false
else (
let rec loop i =
if i = 0 then true
else (
let i = i - 1 in
eq a.(i) b.(i) && loop i
)
in
loop l
)
let release t v =
for i = 0 to Array.length t - 1 do
t.(i) <- v;
done
| null | https://raw.githubusercontent.com/mirage/capnp-rpc/f04fa96a583994b71731bc1288833f8304c9ce81/capnp-rpc/rO_array.ml | ocaml | type 'a t = 'a array
let init = Array.init
let of_list = Array.of_list
let get_exn t i = t.(i)
let length = Array.length
let map = Array.map
let mapi = Array.mapi
let iter = Array.iter
let iteri = Array.iteri
let fold_left = Array.fold_left
let get ~oob t i =
if i < 0 || i >= Array.length t then oob
else Array.get t i
let find fn t =
let rec loop i =
if i = Array.length t then None
else (
let item = t.(i) in
if fn item then Some item
else loop (i + 1)
)
in
loop 0
let empty = [| |]
let pp x = Fmt.(brackets (array ~sep:(const string ", ") x))
let equal eq a b =
let l = Array.length a in
if l <> Array.length b then false
else (
let rec loop i =
if i = 0 then true
else (
let i = i - 1 in
eq a.(i) b.(i) && loop i
)
in
loop l
)
let release t v =
for i = 0 to Array.length t - 1 do
t.(i) <- v;
done
|
|
7df04b2e78a6aeb95d80f25cea8105ed4ff86486db90b0246ca7499eddee85d0 | Javran/advent-of-code | Day19.hs | module Javran.AdventOfCode.Y2016.Day19 (
) where
import Data.Bits
import Data.List
import Javran.AdventOfCode.Prelude
data Day19 deriving (Generic)
--
safePosition :: Int -> Int
safePosition n = 2 * l + 1
where
l = clearBit n (finiteBitSize @Int unreachable - countLeadingZeros n - 1)
solve2 :: Int -> Int
solve2 n =
if
| x == n -> x
| n < 2 * x -> n `rem` x
| otherwise -> x + 2 * (n `rem` x)
where
pow3 = iterate (* 3) 1
(_, x) : _ = dropWhile ((<= n) . fst) $ zip (tail pow3) pow3
My guess would be that this sort of problem is already studied
somewhere , so that I can simulate it for some small numbers
and probably we 'll get a OEIS hit .
And indeed we found it :
My guess would be that this sort of problem is already studied
somewhere, so that I can simulate it for some small numbers
and probably we'll get a OEIS hit.
And indeed we found it:
-}
_simulate :: Int -> [Int] -> [Int]
_simulate n xs =
if n == 1
then xs
else _simulate (n - 1) $ take (n - 1) $ tail $ cycle ys
where
ys = delete (xs !! halve n) xs
instance Solution Day19 where
solutionRun _ SolutionContext {getInputS, answerShow} = do
n <- read @Int . head . lines <$> getInputS
answerShow (safePosition n)
answerShow (solve2 n)
| null | https://raw.githubusercontent.com/Javran/advent-of-code/676ef13c2f9d341cf7de0f383335a1cf577bd73d/src/Javran/AdventOfCode/Y2016/Day19.hs | haskell | module Javran.AdventOfCode.Y2016.Day19 (
) where
import Data.Bits
import Data.List
import Javran.AdventOfCode.Prelude
data Day19 deriving (Generic)
safePosition :: Int -> Int
safePosition n = 2 * l + 1
where
l = clearBit n (finiteBitSize @Int unreachable - countLeadingZeros n - 1)
solve2 :: Int -> Int
solve2 n =
if
| x == n -> x
| n < 2 * x -> n `rem` x
| otherwise -> x + 2 * (n `rem` x)
where
pow3 = iterate (* 3) 1
(_, x) : _ = dropWhile ((<= n) . fst) $ zip (tail pow3) pow3
My guess would be that this sort of problem is already studied
somewhere , so that I can simulate it for some small numbers
and probably we 'll get a OEIS hit .
And indeed we found it :
My guess would be that this sort of problem is already studied
somewhere, so that I can simulate it for some small numbers
and probably we'll get a OEIS hit.
And indeed we found it:
-}
_simulate :: Int -> [Int] -> [Int]
_simulate n xs =
if n == 1
then xs
else _simulate (n - 1) $ take (n - 1) $ tail $ cycle ys
where
ys = delete (xs !! halve n) xs
instance Solution Day19 where
solutionRun _ SolutionContext {getInputS, answerShow} = do
n <- read @Int . head . lines <$> getInputS
answerShow (safePosition n)
answerShow (solve2 n)
|
|
03695f410b0c4ec35160f56c2ff4e678efd60e94386399d4e50ed5418c0d5d4a | fetburner/Coq2SML | omega.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(**************************************************************************)
(* *)
Omega : a solver of quantifier - free problems in Presburger Arithmetic
(* *)
( CNET , Lannion , France )
(* *)
(* 13/10/2002 : modified to cope with an external numbering of equations *)
and hypothesis . Its use for Omega is not more complex and it makes
(* things much simpler for the reflexive version where we should limit *)
(* the number of source of numbering. *)
(**************************************************************************)
open Names
module type INT = sig
type bigint
val less_than : bigint -> bigint -> bool
val add : bigint -> bigint -> bigint
val sub : bigint -> bigint -> bigint
val mult : bigint -> bigint -> bigint
val euclid : bigint -> bigint -> bigint * bigint
val neg : bigint -> bigint
val zero : bigint
val one : bigint
val to_string : bigint -> string
end
let debug = ref false
module MakeOmegaSolver (Int:INT) = struct
type bigint = Int.bigint
let (<?) = Int.less_than
let (<=?) x y = Int.less_than x y or x = y
let (>?) x y = Int.less_than y x
let (>=?) x y = Int.less_than y x or x = y
let (=?) = (=)
let (+) = Int.add
let (-) = Int.sub
let ( * ) = Int.mult
let (/) x y = fst (Int.euclid x y)
let (mod) x y = snd (Int.euclid x y)
let zero = Int.zero
let one = Int.one
let two = one + one
let negone = Int.neg one
let abs x = if Int.less_than x zero then Int.neg x else x
let string_of_bigint = Int.to_string
let neg = Int.neg
(* To ensure that polymorphic (<) is not used mistakenly on big integers *)
(* Warning: do not use (=) either on big int *)
let (<) = ((<) : int -> int -> bool)
let (>) = ((>) : int -> int -> bool)
let (<=) = ((<=) : int -> int -> bool)
let (>=) = ((>=) : int -> int -> bool)
let pp i = print_int i; print_newline (); flush stdout
let push v l = l := v :: !l
let rec pgcd x y = if y =? zero then x else pgcd y (x mod y)
let pgcd_l = function
| [] -> failwith "pgcd_l"
| x :: l -> List.fold_left pgcd x l
let floor_div a b =
match a >=? zero , b >? zero with
| true,true -> a / b
| false,false -> a / b
| true, false -> (a-one) / b - one
| false,true -> (a+one) / b - one
type coeff = {c: bigint ; v: int}
type linear = coeff list
type eqn_kind = EQUA | INEQ | DISE
type afine = {
(* a number uniquely identifying the equation *)
id: int ;
(* a boolean true for an eq, false for an ineq (Sigma a_i x_i >= 0) *)
kind: eqn_kind;
(* the variables and their coefficient *)
body: coeff list;
(* a constant *)
constant: bigint }
type state_action = {
st_new_eq : afine;
st_def : afine;
st_orig : afine;
st_coef : bigint;
st_var : int }
type action =
| DIVIDE_AND_APPROX of afine * afine * bigint * bigint
| NOT_EXACT_DIVIDE of afine * bigint
| FORGET_C of int
| EXACT_DIVIDE of afine * bigint
| SUM of int * (bigint * afine) * (bigint * afine)
| STATE of state_action
| HYP of afine
| FORGET of int * int
| FORGET_I of int * int
| CONTRADICTION of afine * afine
| NEGATE_CONTRADICT of afine * afine * bool
| MERGE_EQ of int * afine * int
| CONSTANT_NOT_NUL of int * bigint
| CONSTANT_NUL of int
| CONSTANT_NEG of int * bigint
| SPLIT_INEQ of afine * (int * action list) * (int * action list)
| WEAKEN of int * bigint
exception UNSOLVABLE
exception NO_CONTRADICTION
let display_eq print_var (l,e) =
let _ =
List.fold_left
(fun not_first f ->
print_string
(if f.c <? zero then "- " else if not_first then "+ " else "");
let c = abs f.c in
if c =? one then
Printf.printf "%s " (print_var f.v)
else
Printf.printf "%s %s " (string_of_bigint c) (print_var f.v);
true)
false l
in
if e >? zero then
Printf.printf "+ %s " (string_of_bigint e)
else if e <? zero then
Printf.printf "- %s " (string_of_bigint (abs e))
let rec trace_length l =
let action_length accu = function
| SPLIT_INEQ (_,(_,l1),(_,l2)) ->
accu + one + trace_length l1 + trace_length l2
| _ -> accu + one in
List.fold_left action_length zero l
let operator_of_eq = function
| EQUA -> "=" | DISE -> "!=" | INEQ -> ">="
let kind_of = function
| EQUA -> "equation" | DISE -> "disequation" | INEQ -> "inequation"
let display_system print_var l =
List.iter
(fun { kind=b; body=e; constant=c; id=id} ->
Printf.printf "E%d: " id;
display_eq print_var (e,c);
Printf.printf "%s 0\n" (operator_of_eq b))
l;
print_string "------------------------\n\n"
let display_inequations print_var l =
List.iter (fun e -> display_eq print_var e;print_string ">= 0\n") l;
print_string "------------------------\n\n"
let sbi = string_of_bigint
let rec display_action print_var = function
| act :: l -> begin match act with
| DIVIDE_AND_APPROX (e1,e2,k,d) ->
Printf.printf
"Inequation E%d is divided by %s and the constant coefficient is \
rounded by substracting %s.\n" e1.id (sbi k) (sbi d)
| NOT_EXACT_DIVIDE (e,k) ->
Printf.printf
"Constant in equation E%d is not divisible by the pgcd \
%s of its other coefficients.\n" e.id (sbi k)
| EXACT_DIVIDE (e,k) ->
Printf.printf
"Equation E%d is divided by the pgcd \
%s of its coefficients.\n" e.id (sbi k)
| WEAKEN (e,k) ->
Printf.printf
"To ensure a solution in the dark shadow \
the equation E%d is weakened by %s.\n" e (sbi k)
| SUM (e,(c1,e1),(c2,e2)) ->
Printf.printf
"We state %s E%d = %s %s E%d + %s %s E%d.\n"
(kind_of e1.kind) e (sbi c1) (kind_of e1.kind) e1.id (sbi c2)
(kind_of e2.kind) e2.id
| STATE { st_new_eq = e } ->
Printf.printf "We define a new equation E%d: " e.id;
display_eq print_var (e.body,e.constant);
print_string (operator_of_eq e.kind); print_string " 0"
| HYP e ->
Printf.printf "We define E%d: " e.id;
display_eq print_var (e.body,e.constant);
print_string (operator_of_eq e.kind); print_string " 0\n"
| FORGET_C e -> Printf.printf "E%d is trivially satisfiable.\n" e
| FORGET (e1,e2) -> Printf.printf "E%d subsumes E%d.\n" e1 e2
| FORGET_I (e1,e2) -> Printf.printf "E%d subsumes E%d.\n" e1 e2
| MERGE_EQ (e,e1,e2) ->
Printf.printf "E%d and E%d can be merged into E%d.\n" e1.id e2 e
| CONTRADICTION (e1,e2) ->
Printf.printf
"Equations E%d and E%d imply a contradiction on their \
constant factors.\n" e1.id e2.id
| NEGATE_CONTRADICT(e1,e2,b) ->
Printf.printf
"Equations E%d and E%d state that their body is at the same time \
equal and different\n" e1.id e2.id
| CONSTANT_NOT_NUL (e,k) ->
Printf.printf "Equation E%d states %s = 0.\n" e (sbi k)
| CONSTANT_NEG(e,k) ->
Printf.printf "Equation E%d states %s >= 0.\n" e (sbi k)
| CONSTANT_NUL e ->
Printf.printf "Inequation E%d states 0 != 0.\n" e
| SPLIT_INEQ (e,(e1,l1),(e2,l2)) ->
Printf.printf "Equation E%d is split in E%d and E%d\n\n" e.id e1 e2;
display_action print_var l1;
print_newline ();
display_action print_var l2;
print_newline ()
end; display_action print_var l
| [] ->
flush stdout
let default_print_var v = Printf.sprintf "X%d" v (* For debugging *)
(*""*)
let add_event, history, clear_history =
let accu = ref [] in
(fun (v:action) -> if !debug then display_action default_print_var [v]; push v accu),
(fun () -> !accu),
(fun () -> accu := [])
let nf_linear = Sort.list (fun x y -> x.v > y.v)
let nf ((b : bool),(e,(x : int))) = (b,(nf_linear e,x))
let map_eq_linear f =
let rec loop = function
| x :: l -> let c = f x.c in if c=?zero then loop l else {v=x.v; c=c} :: loop l
| [] -> []
in
loop
let map_eq_afine f e =
{ id = e.id; kind = e.kind; body = map_eq_linear f e.body;
constant = f e.constant }
let negate_eq = map_eq_afine (fun x -> neg x)
let rec sum p0 p1 = match (p0,p1) with
| ([], l) -> l | (l, []) -> l
| (((x1::l1) as l1'), ((x2::l2) as l2')) ->
if x1.v = x2.v then
let c = x1.c + x2.c in
if c =? zero then sum l1 l2 else {v=x1.v;c=c} :: sum l1 l2
else if x1.v > x2.v then
x1 :: sum l1 l2'
else
x2 :: sum l1' l2
let sum_afine new_eq_id eq1 eq2 =
{ kind = eq1.kind; id = new_eq_id ();
body = sum eq1.body eq2.body; constant = eq1.constant + eq2.constant }
exception FACTOR1
let rec chop_factor_1 = function
| x :: l ->
if abs x.c =? one then x,l else let (c',l') = chop_factor_1 l in (c',x::l')
| [] -> raise FACTOR1
exception CHOPVAR
let rec chop_var v = function
| f :: l -> if f.v = v then f,l else let (f',l') = chop_var v l in (f',f::l')
| [] -> raise CHOPVAR
let normalize ({id=id; kind=eq_flag; body=e; constant =x} as eq) =
if e = [] then begin
match eq_flag with
| EQUA ->
if x =? zero then [] else begin
add_event (CONSTANT_NOT_NUL(id,x)); raise UNSOLVABLE
end
| DISE ->
if x <> zero then [] else begin
add_event (CONSTANT_NUL id); raise UNSOLVABLE
end
| INEQ ->
if x >=? zero then [] else begin
add_event (CONSTANT_NEG(id,x)); raise UNSOLVABLE
end
end else
let gcd = pgcd_l (List.map (fun f -> abs f.c) e) in
if eq_flag=EQUA & x mod gcd <> zero then begin
add_event (NOT_EXACT_DIVIDE (eq,gcd)); raise UNSOLVABLE
end else if eq_flag=DISE & x mod gcd <> zero then begin
add_event (FORGET_C eq.id); []
end else if gcd <> one then begin
let c = floor_div x gcd in
let d = x - c * gcd in
let new_eq = {id=id; kind=eq_flag; constant=c;
body=map_eq_linear (fun c -> c / gcd) e} in
add_event (if eq_flag=EQUA or eq_flag = DISE then EXACT_DIVIDE(eq,gcd)
else DIVIDE_AND_APPROX(eq,new_eq,gcd,d));
[new_eq]
end else [eq]
let eliminate_with_in new_eq_id {v=v;c=c_unite} eq2
({body=e1; constant=c1} as eq1) =
try
let (f,_) = chop_var v e1 in
let coeff = if c_unite=?one then neg f.c else if c_unite=? negone then f.c
else failwith "eliminate_with_in" in
let res = sum_afine new_eq_id eq1 (map_eq_afine (fun c -> c * coeff) eq2) in
add_event (SUM (res.id,(one,eq1),(coeff,eq2))); res
with CHOPVAR -> eq1
let omega_mod a b = a - b * floor_div (two * a + b) (two * b)
let banerjee_step (new_eq_id,new_var_id,print_var) original l1 l2 =
let e = original.body in
let sigma = new_var_id () in
let smallest,var =
try
List.fold_left (fun (v,p) c -> if v >? (abs c.c) then abs c.c,c.v else (v,p))
(abs (List.hd e).c, (List.hd e).v) (List.tl e)
with Failure "tl" -> display_system print_var [original] ; failwith "TL" in
let m = smallest + one in
let new_eq =
{ constant = omega_mod original.constant m;
body = {c= neg m;v=sigma} ::
map_eq_linear (fun a -> omega_mod a m) original.body;
id = new_eq_id (); kind = EQUA } in
let definition =
{ constant = neg (floor_div (two * original.constant + m) (two * m));
body = map_eq_linear (fun a -> neg (floor_div (two * a + m) (two * m)))
original.body;
id = new_eq_id (); kind = EQUA } in
add_event (STATE {st_new_eq = new_eq; st_def = definition;
st_orig = original; st_coef = m; st_var = sigma});
let new_eq = List.hd (normalize new_eq) in
let eliminated_var, def = chop_var var new_eq.body in
let other_equations =
Util.list_map_append
(fun e ->
normalize (eliminate_with_in new_eq_id eliminated_var new_eq e)) l1 in
let inequations =
Util.list_map_append
(fun e ->
normalize (eliminate_with_in new_eq_id eliminated_var new_eq e)) l2 in
let original' = eliminate_with_in new_eq_id eliminated_var new_eq original in
let mod_original = map_eq_afine (fun c -> c / m) original' in
add_event (EXACT_DIVIDE (original',m));
List.hd (normalize mod_original),other_equations,inequations
let rec eliminate_one_equation ((new_eq_id,new_var_id,print_var) as new_ids) (e,other,ineqs) =
if !debug then display_system print_var (e::other);
try
let v,def = chop_factor_1 e.body in
(Util.list_map_append
(fun e' -> normalize (eliminate_with_in new_eq_id v e e')) other,
Util.list_map_append
(fun e' -> normalize (eliminate_with_in new_eq_id v e e')) ineqs)
with FACTOR1 ->
eliminate_one_equation new_ids (banerjee_step new_ids e other ineqs)
let rec banerjee ((_,_,print_var) as new_ids) (sys_eq,sys_ineq) =
let rec fst_eq_1 = function
(eq::l) ->
if List.exists (fun x -> abs x.c =? one) eq.body then eq,l
else let (eq',l') = fst_eq_1 l in (eq',eq::l')
| [] -> raise Not_found in
match sys_eq with
[] -> if !debug then display_system print_var sys_ineq; sys_ineq
| (e1::rest) ->
let eq,other = try fst_eq_1 sys_eq with Not_found -> (e1,rest) in
if eq.body = [] then
if eq.constant =? zero then begin
add_event (FORGET_C eq.id); banerjee new_ids (other,sys_ineq)
end else begin
add_event (CONSTANT_NOT_NUL(eq.id,eq.constant)); raise UNSOLVABLE
end
else
banerjee new_ids
(eliminate_one_equation new_ids (eq,other,sys_ineq))
type kind = INVERTED | NORMAL
let redundancy_elimination new_eq_id system =
let normal = function
({body=f::_} as e) when f.c <? zero -> negate_eq e, INVERTED
| e -> e,NORMAL in
let table = Hashtbl.create 7 in
List.iter
(fun e ->
let ({body=ne} as nx) ,kind = normal e in
if ne = [] then
if nx.constant <? zero then begin
add_event (CONSTANT_NEG(nx.id,nx.constant)); raise UNSOLVABLE
end else add_event (FORGET_C nx.id)
else
try
let (optnormal,optinvert) = Hashtbl.find table ne in
let final =
if kind = NORMAL then begin
match optnormal with
Some v ->
let kept =
if v.constant <? nx.constant
then begin add_event (FORGET (v.id,nx.id));v end
else begin add_event (FORGET (nx.id,v.id));nx end in
(Some(kept),optinvert)
| None -> Some nx,optinvert
end else begin
match optinvert with
Some v ->
let _kept =
if v.constant >? nx.constant
then begin add_event (FORGET_I (v.id,nx.id));v end
else begin add_event (FORGET_I (nx.id,v.id));nx end in
(optnormal,Some(if v.constant >? nx.constant then v else nx))
| None -> optnormal,Some nx
end in
begin match final with
(Some high, Some low) ->
if high.constant <? low.constant then begin
add_event(CONTRADICTION (high,negate_eq low));
raise UNSOLVABLE
end
| _ -> () end;
Hashtbl.remove table ne;
Hashtbl.add table ne final
with Not_found ->
Hashtbl.add table ne
(if kind = NORMAL then (Some nx,None) else (None,Some nx)))
system;
let accu_eq = ref [] in
let accu_ineq = ref [] in
Hashtbl.iter
(fun p0 p1 -> match (p0,p1) with
| (e, (Some x, Some y)) when x.constant =? y.constant ->
let id=new_eq_id () in
add_event (MERGE_EQ(id,x,y.id));
push {id=id; kind=EQUA; body=x.body; constant=x.constant} accu_eq
| (e, (optnorm,optinvert)) ->
begin match optnorm with
Some x -> push x accu_ineq | _ -> () end;
begin match optinvert with
Some x -> push (negate_eq x) accu_ineq | _ -> () end)
table;
!accu_eq,!accu_ineq
exception SOLVED_SYSTEM
let select_variable system =
let table = Hashtbl.create 7 in
let push v c=
try let r = Hashtbl.find table v in r := max !r (abs c)
with Not_found -> Hashtbl.add table v (ref (abs c)) in
List.iter (fun {body=l} -> List.iter (fun f -> push f.v f.c) l) system;
let vmin,cmin = ref (-1), ref zero in
let var_cpt = ref 0 in
Hashtbl.iter
(fun v ({contents = c}) ->
incr var_cpt;
if c <? !cmin or !vmin = (-1) then begin vmin := v; cmin := c end)
table;
if !var_cpt < 1 then raise SOLVED_SYSTEM;
!vmin
let classify v system =
List.fold_left
(fun (not_occ,below,over) eq ->
try let f,eq' = chop_var v eq.body in
if f.c >=? zero then (not_occ,((f.c,eq) :: below),over)
else (not_occ,below,((neg f.c,eq) :: over))
with CHOPVAR -> (eq::not_occ,below,over))
([],[],[]) system
let product new_eq_id dark_shadow low high =
List.fold_left
(fun accu (a,eq1) ->
List.fold_left
(fun accu (b,eq2) ->
let eq =
sum_afine new_eq_id (map_eq_afine (fun c -> c * b) eq1)
(map_eq_afine (fun c -> c * a) eq2) in
add_event(SUM(eq.id,(b,eq1),(a,eq2)));
match normalize eq with
| [eq] ->
let final_eq =
if dark_shadow then
let delta = (a - one) * (b - one) in
add_event(WEAKEN(eq.id,delta));
{id = eq.id; kind=INEQ; body = eq.body;
constant = eq.constant - delta}
else eq
in final_eq :: accu
| (e::_) -> failwith "Product dardk"
| [] -> accu)
accu high)
[] low
let fourier_motzkin (new_eq_id,_,print_var) dark_shadow system =
let v = select_variable system in
let (ineq_out, ineq_low,ineq_high) = classify v system in
let expanded = ineq_out @ product new_eq_id dark_shadow ineq_low ineq_high in
if !debug then display_system print_var expanded; expanded
let simplify ((new_eq_id,new_var_id,print_var) as new_ids) dark_shadow system =
if List.exists (fun e -> e.kind = DISE) system then
failwith "disequation in simplify";
clear_history ();
List.iter (fun e -> add_event (HYP e)) system;
let system = Util.list_map_append normalize system in
let eqs,ineqs = List.partition (fun e -> e.kind=EQUA) system in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ineqs in
let system = (eqs @ simp_eq,simp_ineq) in
let rec loop1a system =
let sys_ineq = banerjee new_ids system in
loop1b sys_ineq
and loop1b sys_ineq =
let simp_eq,simp_ineq = redundancy_elimination new_eq_id sys_ineq in
if simp_eq = [] then simp_ineq else loop1a (simp_eq,simp_ineq)
in
let rec loop2 system =
try
let expanded = fourier_motzkin new_ids dark_shadow system in
loop2 (loop1b expanded)
with SOLVED_SYSTEM ->
if !debug then display_system print_var system; system
in
loop2 (loop1a system)
let rec depend relie_on accu = function
| act :: l ->
begin match act with
| DIVIDE_AND_APPROX (e,_,_,_) ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| EXACT_DIVIDE (e,_) ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| WEAKEN (e,_) ->
if List.mem e relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| SUM (e,(_,e1),(_,e2)) ->
if List.mem e relie_on then
depend (e1.id::e2.id::relie_on) (act::accu) l
else
depend relie_on accu l
| STATE {st_new_eq=e;st_orig=o} ->
if List.mem e.id relie_on then depend (o.id::relie_on) (act::accu) l
else depend relie_on accu l
| HYP e ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| FORGET_C _ -> depend relie_on accu l
| FORGET _ -> depend relie_on accu l
| FORGET_I _ -> depend relie_on accu l
| MERGE_EQ (e,e1,e2) ->
if List.mem e relie_on then
depend (e1.id::e2::relie_on) (act::accu) l
else
depend relie_on accu l
| NOT_EXACT_DIVIDE (e,_) -> depend (e.id::relie_on) (act::accu) l
| CONTRADICTION (e1,e2) ->
depend (e1.id::e2.id::relie_on) (act::accu) l
| CONSTANT_NOT_NUL (e,_) -> depend (e::relie_on) (act::accu) l
| CONSTANT_NEG (e,_) -> depend (e::relie_on) (act::accu) l
| CONSTANT_NUL e -> depend (e::relie_on) (act::accu) l
| NEGATE_CONTRADICT (e1,e2,_) ->
depend (e1.id::e2.id::relie_on) (act::accu) l
| SPLIT_INEQ _ -> failwith "depend"
end
| [] -> relie_on, accu
let depend relie_on accu trace =
Printf.printf " Longueur de la trace initiale : % d\n "
( trace_length trace + trace_length accu ) ;
let rel',trace ' = depend relie_on accu trace in
Printf.printf " Longueur de la trace simplifiée : % d\n " ( trace_length trace ' ) ;
rel',trace '
let depend relie_on accu trace =
Printf.printf "Longueur de la trace initiale : %d\n"
(trace_length trace + trace_length accu);
let rel',trace' = depend relie_on accu trace in
Printf.printf "Longueur de la trace simplifiée : %d\n" (trace_length trace');
rel',trace'
*)
let solve (new_eq_id,new_eq_var,print_var) system =
try let _ = simplify new_eq_id false system in failwith "no contradiction"
with UNSOLVABLE -> display_action print_var (snd (depend [] [] (history ())))
let negation (eqs,ineqs) =
let diseq,_ = List.partition (fun e -> e.kind = DISE) ineqs in
let normal = function
| ({body=f::_} as e) when f.c <? zero -> negate_eq e, INVERTED
| e -> e,NORMAL in
let table = Hashtbl.create 7 in
List.iter (fun e ->
let {body=ne;constant=c} ,kind = normal e in
Hashtbl.add table (ne,c) (kind,e)) diseq;
List.iter (fun e ->
assert (e.kind = EQUA);
let {body=ne;constant=c},kind = normal e in
try
let (kind',e') = Hashtbl.find table (ne,c) in
add_event (NEGATE_CONTRADICT (e,e',kind=kind'));
raise UNSOLVABLE
with Not_found -> ()) eqs
exception FULL_SOLUTION of action list * int list
let simplify_strong ((new_eq_id,new_var_id,print_var) as new_ids) system =
clear_history ();
List.iter (fun e -> add_event (HYP e)) system;
(* Initial simplification phase *)
let rec loop1a system =
negation system;
let sys_ineq = banerjee new_ids system in
loop1b sys_ineq
and loop1b sys_ineq =
let dise,ine = List.partition (fun e -> e.kind = DISE) sys_ineq in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ine in
if simp_eq = [] then dise @ simp_ineq
else loop1a (simp_eq,dise @ simp_ineq)
in
let rec loop2 system =
try
let expanded = fourier_motzkin new_ids false system in
loop2 (loop1b expanded)
with SOLVED_SYSTEM -> if !debug then display_system print_var system; system
in
let rec explode_diseq = function
| (de::diseq,ineqs,expl_map) ->
let id1 = new_eq_id ()
and id2 = new_eq_id () in
let e1 =
{id = id1; kind=INEQ; body = de.body; constant = de.constant -one} in
let e2 =
{id = id2; kind=INEQ; body = map_eq_linear neg de.body;
constant = neg de.constant - one} in
let new_sys =
List.map (fun (what,sys) -> ((de.id,id1,true)::what, e1::sys))
ineqs @
List.map (fun (what,sys) -> ((de.id,id2,false)::what,e2::sys))
ineqs
in
explode_diseq (diseq,new_sys,(de.id,(de,id1,id2))::expl_map)
| ([],ineqs,expl_map) -> ineqs,expl_map
in
try
let system = Util.list_map_append normalize system in
let eqs,ineqs = List.partition (fun e -> e.kind=EQUA) system in
let dise,ine = List.partition (fun e -> e.kind = DISE) ineqs in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ine in
let system = (eqs @ simp_eq,simp_ineq @ dise) in
let system' = loop1a system in
let diseq,ineq = List.partition (fun e -> e.kind = DISE) system' in
let first_segment = history () in
let sys_exploded,explode_map = explode_diseq (diseq,[[],ineq],[]) in
let all_solutions =
List.map
(fun (decomp,sys) ->
clear_history ();
try let _ = loop2 sys in raise NO_CONTRADICTION
with UNSOLVABLE ->
let relie_on,path = depend [] [] (history ()) in
let dc,_ = List.partition (fun (_,id,_) -> List.mem id relie_on) decomp in
let red = List.map (fun (x,_,_) -> x) dc in
(red,relie_on,decomp,path))
sys_exploded
in
let max_count sys =
let tbl = Hashtbl.create 7 in
let augment x =
try incr (Hashtbl.find tbl x)
with Not_found -> Hashtbl.add tbl x (ref 1) in
let eq = ref (-1) and c = ref 0 in
List.iter (function
| ([],r_on,_,path) -> raise (FULL_SOLUTION (path,r_on))
| (l,_,_,_) -> List.iter augment l) sys;
Hashtbl.iter (fun x v -> if !v > !c then begin eq := x; c := !v end) tbl;
!eq
in
let rec solve systems =
try
let id = max_count systems in
let rec sign = function
| ((id',_,b)::l) -> if id=id' then b else sign l
| [] -> failwith "solve" in
let s1,s2 =
List.partition (fun (_,_,decomp,_) -> sign decomp) systems in
let s1' =
List.map (fun (dep,ro,dc,pa) -> (Util.list_except id dep,ro,dc,pa)) s1 in
let s2' =
List.map (fun (dep,ro,dc,pa) -> (Util.list_except id dep,ro,dc,pa)) s2 in
let (r1,relie1) = solve s1'
and (r2,relie2) = solve s2' in
let (eq,id1,id2) = List.assoc id explode_map in
[SPLIT_INEQ(eq,(id1,r1),(id2, r2))], eq.id :: Util.list_union relie1 relie2
with FULL_SOLUTION (x0,x1) -> (x0,x1)
in
let act,relie_on = solve all_solutions in
snd(depend relie_on act first_segment)
with UNSOLVABLE -> snd (depend [] [] (history ()))
end
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/plugins/omega/omega.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
************************************************************************
13/10/2002 : modified to cope with an external numbering of equations
things much simpler for the reflexive version where we should limit
the number of source of numbering.
************************************************************************
To ensure that polymorphic (<) is not used mistakenly on big integers
Warning: do not use (=) either on big int
a number uniquely identifying the equation
a boolean true for an eq, false for an ineq (Sigma a_i x_i >= 0)
the variables and their coefficient
a constant
For debugging
""
Initial simplification phase | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Omega : a solver of quantifier - free problems in Presburger Arithmetic
( CNET , Lannion , France )
and hypothesis . Its use for Omega is not more complex and it makes
open Names
module type INT = sig
type bigint
val less_than : bigint -> bigint -> bool
val add : bigint -> bigint -> bigint
val sub : bigint -> bigint -> bigint
val mult : bigint -> bigint -> bigint
val euclid : bigint -> bigint -> bigint * bigint
val neg : bigint -> bigint
val zero : bigint
val one : bigint
val to_string : bigint -> string
end
let debug = ref false
module MakeOmegaSolver (Int:INT) = struct
type bigint = Int.bigint
let (<?) = Int.less_than
let (<=?) x y = Int.less_than x y or x = y
let (>?) x y = Int.less_than y x
let (>=?) x y = Int.less_than y x or x = y
let (=?) = (=)
let (+) = Int.add
let (-) = Int.sub
let ( * ) = Int.mult
let (/) x y = fst (Int.euclid x y)
let (mod) x y = snd (Int.euclid x y)
let zero = Int.zero
let one = Int.one
let two = one + one
let negone = Int.neg one
let abs x = if Int.less_than x zero then Int.neg x else x
let string_of_bigint = Int.to_string
let neg = Int.neg
let (<) = ((<) : int -> int -> bool)
let (>) = ((>) : int -> int -> bool)
let (<=) = ((<=) : int -> int -> bool)
let (>=) = ((>=) : int -> int -> bool)
let pp i = print_int i; print_newline (); flush stdout
let push v l = l := v :: !l
let rec pgcd x y = if y =? zero then x else pgcd y (x mod y)
let pgcd_l = function
| [] -> failwith "pgcd_l"
| x :: l -> List.fold_left pgcd x l
let floor_div a b =
match a >=? zero , b >? zero with
| true,true -> a / b
| false,false -> a / b
| true, false -> (a-one) / b - one
| false,true -> (a+one) / b - one
type coeff = {c: bigint ; v: int}
type linear = coeff list
type eqn_kind = EQUA | INEQ | DISE
type afine = {
id: int ;
kind: eqn_kind;
body: coeff list;
constant: bigint }
type state_action = {
st_new_eq : afine;
st_def : afine;
st_orig : afine;
st_coef : bigint;
st_var : int }
type action =
| DIVIDE_AND_APPROX of afine * afine * bigint * bigint
| NOT_EXACT_DIVIDE of afine * bigint
| FORGET_C of int
| EXACT_DIVIDE of afine * bigint
| SUM of int * (bigint * afine) * (bigint * afine)
| STATE of state_action
| HYP of afine
| FORGET of int * int
| FORGET_I of int * int
| CONTRADICTION of afine * afine
| NEGATE_CONTRADICT of afine * afine * bool
| MERGE_EQ of int * afine * int
| CONSTANT_NOT_NUL of int * bigint
| CONSTANT_NUL of int
| CONSTANT_NEG of int * bigint
| SPLIT_INEQ of afine * (int * action list) * (int * action list)
| WEAKEN of int * bigint
exception UNSOLVABLE
exception NO_CONTRADICTION
let display_eq print_var (l,e) =
let _ =
List.fold_left
(fun not_first f ->
print_string
(if f.c <? zero then "- " else if not_first then "+ " else "");
let c = abs f.c in
if c =? one then
Printf.printf "%s " (print_var f.v)
else
Printf.printf "%s %s " (string_of_bigint c) (print_var f.v);
true)
false l
in
if e >? zero then
Printf.printf "+ %s " (string_of_bigint e)
else if e <? zero then
Printf.printf "- %s " (string_of_bigint (abs e))
let rec trace_length l =
let action_length accu = function
| SPLIT_INEQ (_,(_,l1),(_,l2)) ->
accu + one + trace_length l1 + trace_length l2
| _ -> accu + one in
List.fold_left action_length zero l
let operator_of_eq = function
| EQUA -> "=" | DISE -> "!=" | INEQ -> ">="
let kind_of = function
| EQUA -> "equation" | DISE -> "disequation" | INEQ -> "inequation"
let display_system print_var l =
List.iter
(fun { kind=b; body=e; constant=c; id=id} ->
Printf.printf "E%d: " id;
display_eq print_var (e,c);
Printf.printf "%s 0\n" (operator_of_eq b))
l;
print_string "------------------------\n\n"
let display_inequations print_var l =
List.iter (fun e -> display_eq print_var e;print_string ">= 0\n") l;
print_string "------------------------\n\n"
let sbi = string_of_bigint
let rec display_action print_var = function
| act :: l -> begin match act with
| DIVIDE_AND_APPROX (e1,e2,k,d) ->
Printf.printf
"Inequation E%d is divided by %s and the constant coefficient is \
rounded by substracting %s.\n" e1.id (sbi k) (sbi d)
| NOT_EXACT_DIVIDE (e,k) ->
Printf.printf
"Constant in equation E%d is not divisible by the pgcd \
%s of its other coefficients.\n" e.id (sbi k)
| EXACT_DIVIDE (e,k) ->
Printf.printf
"Equation E%d is divided by the pgcd \
%s of its coefficients.\n" e.id (sbi k)
| WEAKEN (e,k) ->
Printf.printf
"To ensure a solution in the dark shadow \
the equation E%d is weakened by %s.\n" e (sbi k)
| SUM (e,(c1,e1),(c2,e2)) ->
Printf.printf
"We state %s E%d = %s %s E%d + %s %s E%d.\n"
(kind_of e1.kind) e (sbi c1) (kind_of e1.kind) e1.id (sbi c2)
(kind_of e2.kind) e2.id
| STATE { st_new_eq = e } ->
Printf.printf "We define a new equation E%d: " e.id;
display_eq print_var (e.body,e.constant);
print_string (operator_of_eq e.kind); print_string " 0"
| HYP e ->
Printf.printf "We define E%d: " e.id;
display_eq print_var (e.body,e.constant);
print_string (operator_of_eq e.kind); print_string " 0\n"
| FORGET_C e -> Printf.printf "E%d is trivially satisfiable.\n" e
| FORGET (e1,e2) -> Printf.printf "E%d subsumes E%d.\n" e1 e2
| FORGET_I (e1,e2) -> Printf.printf "E%d subsumes E%d.\n" e1 e2
| MERGE_EQ (e,e1,e2) ->
Printf.printf "E%d and E%d can be merged into E%d.\n" e1.id e2 e
| CONTRADICTION (e1,e2) ->
Printf.printf
"Equations E%d and E%d imply a contradiction on their \
constant factors.\n" e1.id e2.id
| NEGATE_CONTRADICT(e1,e2,b) ->
Printf.printf
"Equations E%d and E%d state that their body is at the same time \
equal and different\n" e1.id e2.id
| CONSTANT_NOT_NUL (e,k) ->
Printf.printf "Equation E%d states %s = 0.\n" e (sbi k)
| CONSTANT_NEG(e,k) ->
Printf.printf "Equation E%d states %s >= 0.\n" e (sbi k)
| CONSTANT_NUL e ->
Printf.printf "Inequation E%d states 0 != 0.\n" e
| SPLIT_INEQ (e,(e1,l1),(e2,l2)) ->
Printf.printf "Equation E%d is split in E%d and E%d\n\n" e.id e1 e2;
display_action print_var l1;
print_newline ();
display_action print_var l2;
print_newline ()
end; display_action print_var l
| [] ->
flush stdout
let add_event, history, clear_history =
let accu = ref [] in
(fun (v:action) -> if !debug then display_action default_print_var [v]; push v accu),
(fun () -> !accu),
(fun () -> accu := [])
let nf_linear = Sort.list (fun x y -> x.v > y.v)
let nf ((b : bool),(e,(x : int))) = (b,(nf_linear e,x))
let map_eq_linear f =
let rec loop = function
| x :: l -> let c = f x.c in if c=?zero then loop l else {v=x.v; c=c} :: loop l
| [] -> []
in
loop
let map_eq_afine f e =
{ id = e.id; kind = e.kind; body = map_eq_linear f e.body;
constant = f e.constant }
let negate_eq = map_eq_afine (fun x -> neg x)
let rec sum p0 p1 = match (p0,p1) with
| ([], l) -> l | (l, []) -> l
| (((x1::l1) as l1'), ((x2::l2) as l2')) ->
if x1.v = x2.v then
let c = x1.c + x2.c in
if c =? zero then sum l1 l2 else {v=x1.v;c=c} :: sum l1 l2
else if x1.v > x2.v then
x1 :: sum l1 l2'
else
x2 :: sum l1' l2
let sum_afine new_eq_id eq1 eq2 =
{ kind = eq1.kind; id = new_eq_id ();
body = sum eq1.body eq2.body; constant = eq1.constant + eq2.constant }
exception FACTOR1
let rec chop_factor_1 = function
| x :: l ->
if abs x.c =? one then x,l else let (c',l') = chop_factor_1 l in (c',x::l')
| [] -> raise FACTOR1
exception CHOPVAR
let rec chop_var v = function
| f :: l -> if f.v = v then f,l else let (f',l') = chop_var v l in (f',f::l')
| [] -> raise CHOPVAR
let normalize ({id=id; kind=eq_flag; body=e; constant =x} as eq) =
if e = [] then begin
match eq_flag with
| EQUA ->
if x =? zero then [] else begin
add_event (CONSTANT_NOT_NUL(id,x)); raise UNSOLVABLE
end
| DISE ->
if x <> zero then [] else begin
add_event (CONSTANT_NUL id); raise UNSOLVABLE
end
| INEQ ->
if x >=? zero then [] else begin
add_event (CONSTANT_NEG(id,x)); raise UNSOLVABLE
end
end else
let gcd = pgcd_l (List.map (fun f -> abs f.c) e) in
if eq_flag=EQUA & x mod gcd <> zero then begin
add_event (NOT_EXACT_DIVIDE (eq,gcd)); raise UNSOLVABLE
end else if eq_flag=DISE & x mod gcd <> zero then begin
add_event (FORGET_C eq.id); []
end else if gcd <> one then begin
let c = floor_div x gcd in
let d = x - c * gcd in
let new_eq = {id=id; kind=eq_flag; constant=c;
body=map_eq_linear (fun c -> c / gcd) e} in
add_event (if eq_flag=EQUA or eq_flag = DISE then EXACT_DIVIDE(eq,gcd)
else DIVIDE_AND_APPROX(eq,new_eq,gcd,d));
[new_eq]
end else [eq]
let eliminate_with_in new_eq_id {v=v;c=c_unite} eq2
({body=e1; constant=c1} as eq1) =
try
let (f,_) = chop_var v e1 in
let coeff = if c_unite=?one then neg f.c else if c_unite=? negone then f.c
else failwith "eliminate_with_in" in
let res = sum_afine new_eq_id eq1 (map_eq_afine (fun c -> c * coeff) eq2) in
add_event (SUM (res.id,(one,eq1),(coeff,eq2))); res
with CHOPVAR -> eq1
let omega_mod a b = a - b * floor_div (two * a + b) (two * b)
let banerjee_step (new_eq_id,new_var_id,print_var) original l1 l2 =
let e = original.body in
let sigma = new_var_id () in
let smallest,var =
try
List.fold_left (fun (v,p) c -> if v >? (abs c.c) then abs c.c,c.v else (v,p))
(abs (List.hd e).c, (List.hd e).v) (List.tl e)
with Failure "tl" -> display_system print_var [original] ; failwith "TL" in
let m = smallest + one in
let new_eq =
{ constant = omega_mod original.constant m;
body = {c= neg m;v=sigma} ::
map_eq_linear (fun a -> omega_mod a m) original.body;
id = new_eq_id (); kind = EQUA } in
let definition =
{ constant = neg (floor_div (two * original.constant + m) (two * m));
body = map_eq_linear (fun a -> neg (floor_div (two * a + m) (two * m)))
original.body;
id = new_eq_id (); kind = EQUA } in
add_event (STATE {st_new_eq = new_eq; st_def = definition;
st_orig = original; st_coef = m; st_var = sigma});
let new_eq = List.hd (normalize new_eq) in
let eliminated_var, def = chop_var var new_eq.body in
let other_equations =
Util.list_map_append
(fun e ->
normalize (eliminate_with_in new_eq_id eliminated_var new_eq e)) l1 in
let inequations =
Util.list_map_append
(fun e ->
normalize (eliminate_with_in new_eq_id eliminated_var new_eq e)) l2 in
let original' = eliminate_with_in new_eq_id eliminated_var new_eq original in
let mod_original = map_eq_afine (fun c -> c / m) original' in
add_event (EXACT_DIVIDE (original',m));
List.hd (normalize mod_original),other_equations,inequations
let rec eliminate_one_equation ((new_eq_id,new_var_id,print_var) as new_ids) (e,other,ineqs) =
if !debug then display_system print_var (e::other);
try
let v,def = chop_factor_1 e.body in
(Util.list_map_append
(fun e' -> normalize (eliminate_with_in new_eq_id v e e')) other,
Util.list_map_append
(fun e' -> normalize (eliminate_with_in new_eq_id v e e')) ineqs)
with FACTOR1 ->
eliminate_one_equation new_ids (banerjee_step new_ids e other ineqs)
let rec banerjee ((_,_,print_var) as new_ids) (sys_eq,sys_ineq) =
let rec fst_eq_1 = function
(eq::l) ->
if List.exists (fun x -> abs x.c =? one) eq.body then eq,l
else let (eq',l') = fst_eq_1 l in (eq',eq::l')
| [] -> raise Not_found in
match sys_eq with
[] -> if !debug then display_system print_var sys_ineq; sys_ineq
| (e1::rest) ->
let eq,other = try fst_eq_1 sys_eq with Not_found -> (e1,rest) in
if eq.body = [] then
if eq.constant =? zero then begin
add_event (FORGET_C eq.id); banerjee new_ids (other,sys_ineq)
end else begin
add_event (CONSTANT_NOT_NUL(eq.id,eq.constant)); raise UNSOLVABLE
end
else
banerjee new_ids
(eliminate_one_equation new_ids (eq,other,sys_ineq))
type kind = INVERTED | NORMAL
let redundancy_elimination new_eq_id system =
let normal = function
({body=f::_} as e) when f.c <? zero -> negate_eq e, INVERTED
| e -> e,NORMAL in
let table = Hashtbl.create 7 in
List.iter
(fun e ->
let ({body=ne} as nx) ,kind = normal e in
if ne = [] then
if nx.constant <? zero then begin
add_event (CONSTANT_NEG(nx.id,nx.constant)); raise UNSOLVABLE
end else add_event (FORGET_C nx.id)
else
try
let (optnormal,optinvert) = Hashtbl.find table ne in
let final =
if kind = NORMAL then begin
match optnormal with
Some v ->
let kept =
if v.constant <? nx.constant
then begin add_event (FORGET (v.id,nx.id));v end
else begin add_event (FORGET (nx.id,v.id));nx end in
(Some(kept),optinvert)
| None -> Some nx,optinvert
end else begin
match optinvert with
Some v ->
let _kept =
if v.constant >? nx.constant
then begin add_event (FORGET_I (v.id,nx.id));v end
else begin add_event (FORGET_I (nx.id,v.id));nx end in
(optnormal,Some(if v.constant >? nx.constant then v else nx))
| None -> optnormal,Some nx
end in
begin match final with
(Some high, Some low) ->
if high.constant <? low.constant then begin
add_event(CONTRADICTION (high,negate_eq low));
raise UNSOLVABLE
end
| _ -> () end;
Hashtbl.remove table ne;
Hashtbl.add table ne final
with Not_found ->
Hashtbl.add table ne
(if kind = NORMAL then (Some nx,None) else (None,Some nx)))
system;
let accu_eq = ref [] in
let accu_ineq = ref [] in
Hashtbl.iter
(fun p0 p1 -> match (p0,p1) with
| (e, (Some x, Some y)) when x.constant =? y.constant ->
let id=new_eq_id () in
add_event (MERGE_EQ(id,x,y.id));
push {id=id; kind=EQUA; body=x.body; constant=x.constant} accu_eq
| (e, (optnorm,optinvert)) ->
begin match optnorm with
Some x -> push x accu_ineq | _ -> () end;
begin match optinvert with
Some x -> push (negate_eq x) accu_ineq | _ -> () end)
table;
!accu_eq,!accu_ineq
exception SOLVED_SYSTEM
let select_variable system =
let table = Hashtbl.create 7 in
let push v c=
try let r = Hashtbl.find table v in r := max !r (abs c)
with Not_found -> Hashtbl.add table v (ref (abs c)) in
List.iter (fun {body=l} -> List.iter (fun f -> push f.v f.c) l) system;
let vmin,cmin = ref (-1), ref zero in
let var_cpt = ref 0 in
Hashtbl.iter
(fun v ({contents = c}) ->
incr var_cpt;
if c <? !cmin or !vmin = (-1) then begin vmin := v; cmin := c end)
table;
if !var_cpt < 1 then raise SOLVED_SYSTEM;
!vmin
let classify v system =
List.fold_left
(fun (not_occ,below,over) eq ->
try let f,eq' = chop_var v eq.body in
if f.c >=? zero then (not_occ,((f.c,eq) :: below),over)
else (not_occ,below,((neg f.c,eq) :: over))
with CHOPVAR -> (eq::not_occ,below,over))
([],[],[]) system
let product new_eq_id dark_shadow low high =
List.fold_left
(fun accu (a,eq1) ->
List.fold_left
(fun accu (b,eq2) ->
let eq =
sum_afine new_eq_id (map_eq_afine (fun c -> c * b) eq1)
(map_eq_afine (fun c -> c * a) eq2) in
add_event(SUM(eq.id,(b,eq1),(a,eq2)));
match normalize eq with
| [eq] ->
let final_eq =
if dark_shadow then
let delta = (a - one) * (b - one) in
add_event(WEAKEN(eq.id,delta));
{id = eq.id; kind=INEQ; body = eq.body;
constant = eq.constant - delta}
else eq
in final_eq :: accu
| (e::_) -> failwith "Product dardk"
| [] -> accu)
accu high)
[] low
let fourier_motzkin (new_eq_id,_,print_var) dark_shadow system =
let v = select_variable system in
let (ineq_out, ineq_low,ineq_high) = classify v system in
let expanded = ineq_out @ product new_eq_id dark_shadow ineq_low ineq_high in
if !debug then display_system print_var expanded; expanded
let simplify ((new_eq_id,new_var_id,print_var) as new_ids) dark_shadow system =
if List.exists (fun e -> e.kind = DISE) system then
failwith "disequation in simplify";
clear_history ();
List.iter (fun e -> add_event (HYP e)) system;
let system = Util.list_map_append normalize system in
let eqs,ineqs = List.partition (fun e -> e.kind=EQUA) system in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ineqs in
let system = (eqs @ simp_eq,simp_ineq) in
let rec loop1a system =
let sys_ineq = banerjee new_ids system in
loop1b sys_ineq
and loop1b sys_ineq =
let simp_eq,simp_ineq = redundancy_elimination new_eq_id sys_ineq in
if simp_eq = [] then simp_ineq else loop1a (simp_eq,simp_ineq)
in
let rec loop2 system =
try
let expanded = fourier_motzkin new_ids dark_shadow system in
loop2 (loop1b expanded)
with SOLVED_SYSTEM ->
if !debug then display_system print_var system; system
in
loop2 (loop1a system)
let rec depend relie_on accu = function
| act :: l ->
begin match act with
| DIVIDE_AND_APPROX (e,_,_,_) ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| EXACT_DIVIDE (e,_) ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| WEAKEN (e,_) ->
if List.mem e relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| SUM (e,(_,e1),(_,e2)) ->
if List.mem e relie_on then
depend (e1.id::e2.id::relie_on) (act::accu) l
else
depend relie_on accu l
| STATE {st_new_eq=e;st_orig=o} ->
if List.mem e.id relie_on then depend (o.id::relie_on) (act::accu) l
else depend relie_on accu l
| HYP e ->
if List.mem e.id relie_on then depend relie_on (act::accu) l
else depend relie_on accu l
| FORGET_C _ -> depend relie_on accu l
| FORGET _ -> depend relie_on accu l
| FORGET_I _ -> depend relie_on accu l
| MERGE_EQ (e,e1,e2) ->
if List.mem e relie_on then
depend (e1.id::e2::relie_on) (act::accu) l
else
depend relie_on accu l
| NOT_EXACT_DIVIDE (e,_) -> depend (e.id::relie_on) (act::accu) l
| CONTRADICTION (e1,e2) ->
depend (e1.id::e2.id::relie_on) (act::accu) l
| CONSTANT_NOT_NUL (e,_) -> depend (e::relie_on) (act::accu) l
| CONSTANT_NEG (e,_) -> depend (e::relie_on) (act::accu) l
| CONSTANT_NUL e -> depend (e::relie_on) (act::accu) l
| NEGATE_CONTRADICT (e1,e2,_) ->
depend (e1.id::e2.id::relie_on) (act::accu) l
| SPLIT_INEQ _ -> failwith "depend"
end
| [] -> relie_on, accu
let depend relie_on accu trace =
Printf.printf " Longueur de la trace initiale : % d\n "
( trace_length trace + trace_length accu ) ;
let rel',trace ' = depend relie_on accu trace in
Printf.printf " Longueur de la trace simplifiée : % d\n " ( trace_length trace ' ) ;
rel',trace '
let depend relie_on accu trace =
Printf.printf "Longueur de la trace initiale : %d\n"
(trace_length trace + trace_length accu);
let rel',trace' = depend relie_on accu trace in
Printf.printf "Longueur de la trace simplifiée : %d\n" (trace_length trace');
rel',trace'
*)
let solve (new_eq_id,new_eq_var,print_var) system =
try let _ = simplify new_eq_id false system in failwith "no contradiction"
with UNSOLVABLE -> display_action print_var (snd (depend [] [] (history ())))
let negation (eqs,ineqs) =
let diseq,_ = List.partition (fun e -> e.kind = DISE) ineqs in
let normal = function
| ({body=f::_} as e) when f.c <? zero -> negate_eq e, INVERTED
| e -> e,NORMAL in
let table = Hashtbl.create 7 in
List.iter (fun e ->
let {body=ne;constant=c} ,kind = normal e in
Hashtbl.add table (ne,c) (kind,e)) diseq;
List.iter (fun e ->
assert (e.kind = EQUA);
let {body=ne;constant=c},kind = normal e in
try
let (kind',e') = Hashtbl.find table (ne,c) in
add_event (NEGATE_CONTRADICT (e,e',kind=kind'));
raise UNSOLVABLE
with Not_found -> ()) eqs
exception FULL_SOLUTION of action list * int list
let simplify_strong ((new_eq_id,new_var_id,print_var) as new_ids) system =
clear_history ();
List.iter (fun e -> add_event (HYP e)) system;
let rec loop1a system =
negation system;
let sys_ineq = banerjee new_ids system in
loop1b sys_ineq
and loop1b sys_ineq =
let dise,ine = List.partition (fun e -> e.kind = DISE) sys_ineq in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ine in
if simp_eq = [] then dise @ simp_ineq
else loop1a (simp_eq,dise @ simp_ineq)
in
let rec loop2 system =
try
let expanded = fourier_motzkin new_ids false system in
loop2 (loop1b expanded)
with SOLVED_SYSTEM -> if !debug then display_system print_var system; system
in
let rec explode_diseq = function
| (de::diseq,ineqs,expl_map) ->
let id1 = new_eq_id ()
and id2 = new_eq_id () in
let e1 =
{id = id1; kind=INEQ; body = de.body; constant = de.constant -one} in
let e2 =
{id = id2; kind=INEQ; body = map_eq_linear neg de.body;
constant = neg de.constant - one} in
let new_sys =
List.map (fun (what,sys) -> ((de.id,id1,true)::what, e1::sys))
ineqs @
List.map (fun (what,sys) -> ((de.id,id2,false)::what,e2::sys))
ineqs
in
explode_diseq (diseq,new_sys,(de.id,(de,id1,id2))::expl_map)
| ([],ineqs,expl_map) -> ineqs,expl_map
in
try
let system = Util.list_map_append normalize system in
let eqs,ineqs = List.partition (fun e -> e.kind=EQUA) system in
let dise,ine = List.partition (fun e -> e.kind = DISE) ineqs in
let simp_eq,simp_ineq = redundancy_elimination new_eq_id ine in
let system = (eqs @ simp_eq,simp_ineq @ dise) in
let system' = loop1a system in
let diseq,ineq = List.partition (fun e -> e.kind = DISE) system' in
let first_segment = history () in
let sys_exploded,explode_map = explode_diseq (diseq,[[],ineq],[]) in
let all_solutions =
List.map
(fun (decomp,sys) ->
clear_history ();
try let _ = loop2 sys in raise NO_CONTRADICTION
with UNSOLVABLE ->
let relie_on,path = depend [] [] (history ()) in
let dc,_ = List.partition (fun (_,id,_) -> List.mem id relie_on) decomp in
let red = List.map (fun (x,_,_) -> x) dc in
(red,relie_on,decomp,path))
sys_exploded
in
let max_count sys =
let tbl = Hashtbl.create 7 in
let augment x =
try incr (Hashtbl.find tbl x)
with Not_found -> Hashtbl.add tbl x (ref 1) in
let eq = ref (-1) and c = ref 0 in
List.iter (function
| ([],r_on,_,path) -> raise (FULL_SOLUTION (path,r_on))
| (l,_,_,_) -> List.iter augment l) sys;
Hashtbl.iter (fun x v -> if !v > !c then begin eq := x; c := !v end) tbl;
!eq
in
let rec solve systems =
try
let id = max_count systems in
let rec sign = function
| ((id',_,b)::l) -> if id=id' then b else sign l
| [] -> failwith "solve" in
let s1,s2 =
List.partition (fun (_,_,decomp,_) -> sign decomp) systems in
let s1' =
List.map (fun (dep,ro,dc,pa) -> (Util.list_except id dep,ro,dc,pa)) s1 in
let s2' =
List.map (fun (dep,ro,dc,pa) -> (Util.list_except id dep,ro,dc,pa)) s2 in
let (r1,relie1) = solve s1'
and (r2,relie2) = solve s2' in
let (eq,id1,id2) = List.assoc id explode_map in
[SPLIT_INEQ(eq,(id1,r1),(id2, r2))], eq.id :: Util.list_union relie1 relie2
with FULL_SOLUTION (x0,x1) -> (x0,x1)
in
let act,relie_on = solve all_solutions in
snd(depend relie_on act first_segment)
with UNSOLVABLE -> snd (depend [] [] (history ()))
end
|
b3ae0c32bd5add2c41bc86eda84e1cd707d40de64cdc9c7eeeba6e060ffafffc | cedlemo/OCaml-GI-ctypes-bindings-generator | File_filter_flags.ml | open Ctypes
open Foreign
type t = Filename | Uri | Display_name | Mime_type
type t_list = t list
let of_value v =
if v = Unsigned.UInt32.of_int 1 then Filename
else if v = Unsigned.UInt32.of_int 2 then Uri
else if v = Unsigned.UInt32.of_int 4 then Display_name
else if v = Unsigned.UInt32.of_int 8 then Mime_type
else raise (Invalid_argument "Unexpected File_filter_flags value")
let to_value = function
| Filename -> Unsigned.UInt32.of_int 1
| Uri -> Unsigned.UInt32.of_int 2
| Display_name -> Unsigned.UInt32.of_int 4
| Mime_type -> Unsigned.UInt32.of_int 8
let list_of_value v =
let open Unsigned.UInt32 in
let all_flags = [( 1 , Filename ); ( 2 , Uri ); ( 4 , Display_name ); ( 8 , Mime_type )]
in
let rec build_flags_list allf acc =
match allf with
| [] -> acc
| (i, f) :: q -> if ((logand v (of_int i )) <> zero) then build_flags_list q (f :: acc)
else build_flags_list q acc
in build_flags_list all_flags []
let list_to_value flags =
let open Unsigned.UInt32 in
let rec logor_flags l acc =
match l with
| [] -> acc
| f :: q -> let v = to_value f in
let acc' = logor acc v in
logor_flags q acc'
in
logor_flags flags zero
let t_list_view = view ~read:list_of_value ~write:list_to_value uint32_t
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/File_filter_flags.ml | ocaml | open Ctypes
open Foreign
type t = Filename | Uri | Display_name | Mime_type
type t_list = t list
let of_value v =
if v = Unsigned.UInt32.of_int 1 then Filename
else if v = Unsigned.UInt32.of_int 2 then Uri
else if v = Unsigned.UInt32.of_int 4 then Display_name
else if v = Unsigned.UInt32.of_int 8 then Mime_type
else raise (Invalid_argument "Unexpected File_filter_flags value")
let to_value = function
| Filename -> Unsigned.UInt32.of_int 1
| Uri -> Unsigned.UInt32.of_int 2
| Display_name -> Unsigned.UInt32.of_int 4
| Mime_type -> Unsigned.UInt32.of_int 8
let list_of_value v =
let open Unsigned.UInt32 in
let all_flags = [( 1 , Filename ); ( 2 , Uri ); ( 4 , Display_name ); ( 8 , Mime_type )]
in
let rec build_flags_list allf acc =
match allf with
| [] -> acc
| (i, f) :: q -> if ((logand v (of_int i )) <> zero) then build_flags_list q (f :: acc)
else build_flags_list q acc
in build_flags_list all_flags []
let list_to_value flags =
let open Unsigned.UInt32 in
let rec logor_flags l acc =
match l with
| [] -> acc
| f :: q -> let v = to_value f in
let acc' = logor acc v in
logor_flags q acc'
in
logor_flags flags zero
let t_list_view = view ~read:list_of_value ~write:list_to_value uint32_t
|
|
881e6dfcc147d40e65b7f4f68a093abfc6394127099b673c126f7ede2f93ef31 | andrewthad/sockets | Hybrid.hs | # language BangPatterns #
# language DataKinds #
# language MagicHash #
module Socket.Stream.Uninterruptible.Hybrid
( sendMutableBytesUnmanagedBytes
) where
import Data.Bytes.Types (MutableBytes,UnmanagedBytes)
import GHC.Exts (RealWorld,proxy#)
import Socket (Interruptibility(Uninterruptible))
import Socket.Stream (Connection,SendException)
import qualified Socket.Stream.Uninterruptible.MutableBytes.Addr.Send as MBA
sendMutableBytesUnmanagedBytes ::
Connection -- ^ Connection
^ First payload
^ Second payload
-> IO (Either (SendException 'Uninterruptible) ())
sendMutableBytesUnmanagedBytes = MBA.sendBoth proxy#
| null | https://raw.githubusercontent.com/andrewthad/sockets/90d314bd2ec71b248a90da6ad964c679f75cfcca/src/Socket/Stream/Uninterruptible/Hybrid.hs | haskell | ^ Connection | # language BangPatterns #
# language DataKinds #
# language MagicHash #
module Socket.Stream.Uninterruptible.Hybrid
( sendMutableBytesUnmanagedBytes
) where
import Data.Bytes.Types (MutableBytes,UnmanagedBytes)
import GHC.Exts (RealWorld,proxy#)
import Socket (Interruptibility(Uninterruptible))
import Socket.Stream (Connection,SendException)
import qualified Socket.Stream.Uninterruptible.MutableBytes.Addr.Send as MBA
sendMutableBytesUnmanagedBytes ::
^ First payload
^ Second payload
-> IO (Either (SendException 'Uninterruptible) ())
sendMutableBytesUnmanagedBytes = MBA.sendBoth proxy#
|
a9e6b4c190886cbf7aceb0bf825309618f47d595c8823fca6d6ca0ae5a4416a4 | elnewfie/lslforge | XmlCreate.hs | module Language.Lsl.Internal.XmlCreate(emit,emitSimple,xmlEscape,emitList) where
emit :: String -> [(String,String)] -> [(String -> String)] -> String -> String
emit name attrs body =
showString "<" . showString name .
foldl (.) (id) (map (\ (n,v) -> showString " " . showString n . showString "=" . shows v) attrs) .
showString ">" . (foldl (.) id body) . showString "</" . showString name . showString ">"
x = emit "root" [("id","one")] [
emit "child" [] [showString "hello"],
emit "child" [] [showString "world"]
]
emitSimple :: String -> [(String,String)] -> String -> String -> String
emitSimple name attrs body =
emit name attrs [showString (xmlEscape body)]
emitList tag f list = emit tag [] (map f list)
xmlEscape [] = []
xmlEscape ('<':cs) = ('&':'l':'t':';':(xmlEscape cs))
xmlEscape ('>':cs) = ('&':'g':'t':';':(xmlEscape cs))
xmlEscape ('\"':cs) = ('&':'q':'u':'o':'t':';':(xmlEscape cs))
xmlEscape ('&':cs) = ('&':'a':'m':'p':';':(xmlEscape cs))
xmlEscape ('\'':cs) = ('&':'a':'p':'o':'s':';':(xmlEscape cs))
xmlEscape (c:cs) = c:(xmlEscape cs)
| null | https://raw.githubusercontent.com/elnewfie/lslforge/27eb84231c53fffba6bdb0db67bde81c1c12dbb9/lslforge/haskell/src/Language/Lsl/Internal/XmlCreate.hs | haskell | module Language.Lsl.Internal.XmlCreate(emit,emitSimple,xmlEscape,emitList) where
emit :: String -> [(String,String)] -> [(String -> String)] -> String -> String
emit name attrs body =
showString "<" . showString name .
foldl (.) (id) (map (\ (n,v) -> showString " " . showString n . showString "=" . shows v) attrs) .
showString ">" . (foldl (.) id body) . showString "</" . showString name . showString ">"
x = emit "root" [("id","one")] [
emit "child" [] [showString "hello"],
emit "child" [] [showString "world"]
]
emitSimple :: String -> [(String,String)] -> String -> String -> String
emitSimple name attrs body =
emit name attrs [showString (xmlEscape body)]
emitList tag f list = emit tag [] (map f list)
xmlEscape [] = []
xmlEscape ('<':cs) = ('&':'l':'t':';':(xmlEscape cs))
xmlEscape ('>':cs) = ('&':'g':'t':';':(xmlEscape cs))
xmlEscape ('\"':cs) = ('&':'q':'u':'o':'t':';':(xmlEscape cs))
xmlEscape ('&':cs) = ('&':'a':'m':'p':';':(xmlEscape cs))
xmlEscape ('\'':cs) = ('&':'a':'p':'o':'s':';':(xmlEscape cs))
xmlEscape (c:cs) = c:(xmlEscape cs)
|
|
66b5df5c0787b7c99e5ae6902d0da235e0e5d35208a3d2c1fc373eb32250c112 | eval/deps-try | sexp.clj | (ns rebel-readline.clojure.sexp
(:require
[clojure.string :as string]
[rebel-readline.clojure.tokenizer :as tokenize])
(:import
[java.util.regex Pattern]))
(defn position-in-range? [s pos]
(<= 0 pos (dec (count s))))
(defn blank-at-position? [s pos]
(or (not (position-in-range? s pos))
(Character/isWhitespace (.charAt s pos))))
(defn non-interp-bounds [code-str]
(map rest
(tokenize/tag-non-interp code-str)))
(defn in-non-interp-bounds? [code-str pos] ;; position of insertion not before
(or (some #(and (< (first %) pos (second %)) %)
(non-interp-bounds code-str))
(and (<= 0 pos (dec (count code-str)))
(= (.charAt code-str pos) \\)
[pos (inc pos) :character])))
(def delims #{:bracket :brace :paren :quote})
(def openers (set (map #(->> % name (str "open-") keyword) delims)))
(def closers (set (map #(->> % name (str "close-") keyword) delims)))
(def flip-it
(->> openers
(map
(juxt identity #(as-> % x
(name x)
(string/split x #"-")
(str "close-" (second x))
(keyword x))))
((juxt identity (partial map (comp vec reverse))))
(apply concat)
(into {})))
(def delim-key->delim
{:open-paren \(
:close-paren \)
:open-brace \{
:close-brace \}
:open-bracket \[
:close-bracket \]
:open-quote \"
:close-quote \"})
(def flip-delimiter-char
(into {} (map (partial mapv delim-key->delim)) flip-it))
(defn scan-builder [open-test close-test]
(fn [specific-test stack x]
(cond
(open-test x)
(cons x stack)
(close-test x)
(cond
(and (empty? stack) (specific-test x))
(reduced [:finished x])
(empty? stack) (reduced [:finished nil]) ;; found closing bracket of wrong type
(= (-> stack first last) (flip-it (last x)))
(rest stack)
;; unbalanced
:else (reduced [:finished nil]))
:else stack)))
(def end-scan (scan-builder (comp openers last) (comp closers last)))
(def start-scan (scan-builder (comp closers last) (comp openers last)))
(declare in-quote?)
(defn find-open-sexp-end
([tokens pos]
(find-open-sexp-end tokens pos nil))
([tokens pos final-delim-pred]
(let [res (reduce
(partial end-scan (or final-delim-pred identity))
nil
(drop-while
#(<= (nth % 2) pos)
tokens))]
(when (= :finished (first res))
(second res)))))
(defn find-open-sexp-ends [tokens pos]
(when-let [[_ _ end _ :as res] (find-open-sexp-end tokens pos)]
(cons res
(lazy-seq
(find-open-sexp-ends tokens end)))))
(defn find-open-sexp-start
([tokens pos]
(find-open-sexp-start tokens pos nil))
([tokens pos final-delim-pred]
(let [res (reduce
(partial start-scan (or final-delim-pred identity))
nil
(reverse (take-while
#(<= (nth % 2) pos)
tokens)))]
(when (= :finished (first res))
(second res)))))
(defn find-open-sexp-starts [tokens pos]
(when-let [[_ start _ :as res] (find-open-sexp-start tokens pos)]
(cons res
(lazy-seq
(find-open-sexp-starts tokens start)))))
;; TODO :character should not be in in-quote?
(defn in-quote? [tokens pos]
(->> tokens
(filter #(#{:string-literal-body
:unterm-string-literal-body
:character} (last %)))
(filter (fn [[_ start end typ]]
(if (= :character typ)
(< start pos (inc end))
(<= start pos end))))
first))
(defn in-line-comment? [tokens pos]
(->> tokens
(filter #(#{:end-line-comment} (last %)))
(filter (fn [[_ start end _]]
(< start pos (inc end))))
first))
(defn search-for-line-start [s pos]
(loop [p pos]
(cond
(<= p 0) 0
(= (.charAt ^String s p) \newline)
(inc p)
:else (recur (dec p)))))
(defn count-leading-white-space [s] (count (re-find #"^[^\S\n]+" s)))
(defn delims-outward-from-pos [tokens pos]
(map vector
(find-open-sexp-starts tokens pos)
(concat (find-open-sexp-ends tokens pos)
(repeat nil))))
(defn valid-sexp-from-point [s pos]
(let [tokens (tokenize/tag-sexp-traversal s)
delims (take-while
(fn [[a b]]
(or (= (last a) (flip-it (last b)))
(nil? (last b))))
(delims-outward-from-pos tokens pos))
max-exist (last (take-while some? (map second delims)))
end (max (nth max-exist 2 0) pos)
need-repairs (filter (complement second) delims)
[_ start _ _] (first (last delims))]
(when (not-empty delims)
(->> need-repairs
(map (comp delim-key->delim flip-it last first))
(apply str (subs s start end))))))
(defn word-at-position [s pos]
(->> (tokenize/tag-words s)
(filter #(= :word (last %)))
(filter #(<= (second %) pos (nth % 2)))
first))
(defn whitespace? [c]
(re-matches #"[\s,]+" (str c)))
(defn scan-back-from [pred s pos]
(first (filter #(pred (.charAt s %))
(range (min (dec (count s)) pos) -1 -1))))
(defn first-non-whitespace-char-backwards-from [s pos]
(scan-back-from (complement whitespace?) s pos))
(defn sexp-ending-at-position [s pos]
(let [c (try (.charAt s pos) (catch Exception e nil))]
(when (#{ \" \) \} \] } c)
(let [sexp-tokens (tokenize/tag-sexp-traversal s)]
(when-let [[_ start] (find-open-sexp-start sexp-tokens pos)]
[(subs s start (inc pos)) start (inc pos) :sexp])))))
(defn sexp-or-word-ending-at-position [s pos]
(or (sexp-ending-at-position s pos)
(word-at-position s (inc pos))))
(defn funcall-word
"Given a string with sexps an a position into that string that
points to an open paren, return the first token that is the function
call word"
[code-str open-paren-pos]
(some->>
(tokenize/tag-matches (subs code-str open-paren-pos)
matches first word after paren
(Pattern/compile (str "(\\()\\s*(" tokenize/not-delimiter-exp "+)"))
:open-paren
:word)
not-empty
(take 2)
((fn [[a b]]
(when (= a ["(" 0 1 :open-paren])
b)))))
| null | https://raw.githubusercontent.com/eval/deps-try/da691c68b527ad5f9e770dbad82cce6cbbe16fb4/vendor/rebel-readline/rebel-readline/src/rebel_readline/clojure/sexp.clj | clojure | position of insertion not before
found closing bracket of wrong type
unbalanced
TODO :character should not be in in-quote? | (ns rebel-readline.clojure.sexp
(:require
[clojure.string :as string]
[rebel-readline.clojure.tokenizer :as tokenize])
(:import
[java.util.regex Pattern]))
(defn position-in-range? [s pos]
(<= 0 pos (dec (count s))))
(defn blank-at-position? [s pos]
(or (not (position-in-range? s pos))
(Character/isWhitespace (.charAt s pos))))
(defn non-interp-bounds [code-str]
(map rest
(tokenize/tag-non-interp code-str)))
(or (some #(and (< (first %) pos (second %)) %)
(non-interp-bounds code-str))
(and (<= 0 pos (dec (count code-str)))
(= (.charAt code-str pos) \\)
[pos (inc pos) :character])))
(def delims #{:bracket :brace :paren :quote})
(def openers (set (map #(->> % name (str "open-") keyword) delims)))
(def closers (set (map #(->> % name (str "close-") keyword) delims)))
(def flip-it
(->> openers
(map
(juxt identity #(as-> % x
(name x)
(string/split x #"-")
(str "close-" (second x))
(keyword x))))
((juxt identity (partial map (comp vec reverse))))
(apply concat)
(into {})))
(def delim-key->delim
{:open-paren \(
:close-paren \)
:open-brace \{
:close-brace \}
:open-bracket \[
:close-bracket \]
:open-quote \"
:close-quote \"})
(def flip-delimiter-char
(into {} (map (partial mapv delim-key->delim)) flip-it))
(defn scan-builder [open-test close-test]
(fn [specific-test stack x]
(cond
(open-test x)
(cons x stack)
(close-test x)
(cond
(and (empty? stack) (specific-test x))
(reduced [:finished x])
(= (-> stack first last) (flip-it (last x)))
(rest stack)
:else (reduced [:finished nil]))
:else stack)))
(def end-scan (scan-builder (comp openers last) (comp closers last)))
(def start-scan (scan-builder (comp closers last) (comp openers last)))
(declare in-quote?)
(defn find-open-sexp-end
([tokens pos]
(find-open-sexp-end tokens pos nil))
([tokens pos final-delim-pred]
(let [res (reduce
(partial end-scan (or final-delim-pred identity))
nil
(drop-while
#(<= (nth % 2) pos)
tokens))]
(when (= :finished (first res))
(second res)))))
(defn find-open-sexp-ends [tokens pos]
(when-let [[_ _ end _ :as res] (find-open-sexp-end tokens pos)]
(cons res
(lazy-seq
(find-open-sexp-ends tokens end)))))
(defn find-open-sexp-start
([tokens pos]
(find-open-sexp-start tokens pos nil))
([tokens pos final-delim-pred]
(let [res (reduce
(partial start-scan (or final-delim-pred identity))
nil
(reverse (take-while
#(<= (nth % 2) pos)
tokens)))]
(when (= :finished (first res))
(second res)))))
(defn find-open-sexp-starts [tokens pos]
(when-let [[_ start _ :as res] (find-open-sexp-start tokens pos)]
(cons res
(lazy-seq
(find-open-sexp-starts tokens start)))))
(defn in-quote? [tokens pos]
(->> tokens
(filter #(#{:string-literal-body
:unterm-string-literal-body
:character} (last %)))
(filter (fn [[_ start end typ]]
(if (= :character typ)
(< start pos (inc end))
(<= start pos end))))
first))
(defn in-line-comment? [tokens pos]
(->> tokens
(filter #(#{:end-line-comment} (last %)))
(filter (fn [[_ start end _]]
(< start pos (inc end))))
first))
(defn search-for-line-start [s pos]
(loop [p pos]
(cond
(<= p 0) 0
(= (.charAt ^String s p) \newline)
(inc p)
:else (recur (dec p)))))
(defn count-leading-white-space [s] (count (re-find #"^[^\S\n]+" s)))
(defn delims-outward-from-pos [tokens pos]
(map vector
(find-open-sexp-starts tokens pos)
(concat (find-open-sexp-ends tokens pos)
(repeat nil))))
(defn valid-sexp-from-point [s pos]
(let [tokens (tokenize/tag-sexp-traversal s)
delims (take-while
(fn [[a b]]
(or (= (last a) (flip-it (last b)))
(nil? (last b))))
(delims-outward-from-pos tokens pos))
max-exist (last (take-while some? (map second delims)))
end (max (nth max-exist 2 0) pos)
need-repairs (filter (complement second) delims)
[_ start _ _] (first (last delims))]
(when (not-empty delims)
(->> need-repairs
(map (comp delim-key->delim flip-it last first))
(apply str (subs s start end))))))
(defn word-at-position [s pos]
(->> (tokenize/tag-words s)
(filter #(= :word (last %)))
(filter #(<= (second %) pos (nth % 2)))
first))
(defn whitespace? [c]
(re-matches #"[\s,]+" (str c)))
(defn scan-back-from [pred s pos]
(first (filter #(pred (.charAt s %))
(range (min (dec (count s)) pos) -1 -1))))
(defn first-non-whitespace-char-backwards-from [s pos]
(scan-back-from (complement whitespace?) s pos))
(defn sexp-ending-at-position [s pos]
(let [c (try (.charAt s pos) (catch Exception e nil))]
(when (#{ \" \) \} \] } c)
(let [sexp-tokens (tokenize/tag-sexp-traversal s)]
(when-let [[_ start] (find-open-sexp-start sexp-tokens pos)]
[(subs s start (inc pos)) start (inc pos) :sexp])))))
(defn sexp-or-word-ending-at-position [s pos]
(or (sexp-ending-at-position s pos)
(word-at-position s (inc pos))))
(defn funcall-word
"Given a string with sexps an a position into that string that
points to an open paren, return the first token that is the function
call word"
[code-str open-paren-pos]
(some->>
(tokenize/tag-matches (subs code-str open-paren-pos)
matches first word after paren
(Pattern/compile (str "(\\()\\s*(" tokenize/not-delimiter-exp "+)"))
:open-paren
:word)
not-empty
(take 2)
((fn [[a b]]
(when (= a ["(" 0 1 :open-paren])
b)))))
|
1fb4ee0dfc1971b1b2b84890ec3d83e177d33e5cf7b9cc276aaa181a7e8ca371 | metosin/vega-tools | expr_test.cljs | (ns vega-tools.expr-test
(:require [cljs.test :refer-macros [deftest is testing]]
[vega-tools.expr :refer [compile-expr] :refer-macros [expr]]))
(deftest test-compile-expr
(testing "Primitive expressions"
(is (= (compile-expr :datum) "datum"))
(is (= (compile-expr {}) "{}"))
(is (= (compile-expr 123) "123")))
(testing "Functions"
(is (= (compile-expr [:if :datum.x 1 0]) "if(datum.x,1,0)"))
(is (= (compile-expr [:< 1 2 3]) "1<2<3"))))
(deftest test-expr
(testing "Primitive expressions"
(is (= "123" (expr 123)))
(is (= "{}" (expr {})))
(is (= "datum" (expr :datum)))
(testing "Functions"
(is (= "if(datum.x,1,0)" (expr (if :datum.x 1 0))))
(is (= "if(a<1,x,y)" (expr (if (< :a 1) :x :y)))))
(testing "Variables"
(let [x 5]
(is (= "5" (expr x)))
(is (= "sin(5)" (expr (sin x))))))))
| null | https://raw.githubusercontent.com/metosin/vega-tools/9530b7514fc24ba3b918d69d78c6ab31971e91d7/test/cljs/vega_tools/expr_test.cljs | clojure | (ns vega-tools.expr-test
(:require [cljs.test :refer-macros [deftest is testing]]
[vega-tools.expr :refer [compile-expr] :refer-macros [expr]]))
(deftest test-compile-expr
(testing "Primitive expressions"
(is (= (compile-expr :datum) "datum"))
(is (= (compile-expr {}) "{}"))
(is (= (compile-expr 123) "123")))
(testing "Functions"
(is (= (compile-expr [:if :datum.x 1 0]) "if(datum.x,1,0)"))
(is (= (compile-expr [:< 1 2 3]) "1<2<3"))))
(deftest test-expr
(testing "Primitive expressions"
(is (= "123" (expr 123)))
(is (= "{}" (expr {})))
(is (= "datum" (expr :datum)))
(testing "Functions"
(is (= "if(datum.x,1,0)" (expr (if :datum.x 1 0))))
(is (= "if(a<1,x,y)" (expr (if (< :a 1) :x :y)))))
(testing "Variables"
(let [x 5]
(is (= "5" (expr x)))
(is (= "sin(5)" (expr (sin x))))))))
|
|
06333643b735cd61148b60f2c947585dec1ca56c9eb88be373500cf868fdf56f | emotiq/emotiq | genesis.lisp | (in-package :emotiq-config-generate-test)
(defun create-genesis-block ()
(let ((d (emotiq/filesystem:new-temporary-directory)))
(let* ((nodes
(emotiq/config/generate::generate-keys
emotiq/config/generate::*eg-config-zerotier*))
(stakes
(emotiq/config/generate::generate-stakes
(mapcar (lambda (plist)
(getf plist :public))
nodes)))
(public-key-for-coins
(getf (first nodes) :public))
(coinbase-keypair
(pbc:make-keying-triple
public-key-for-coins (getf (first nodes) :private)))
(configuration
(emotiq/config/generate::make-configuration
(first nodes)
:address-for-coins public-key-for-coins
:stakes stakes)))
(emotiq/config/generate::generate-node d configuration
:key-records nodes)
(let* ((genesis-block
(emotiq/config:get-genesis-block
:root d))
(keypair
(emotiq/config:get-nth-key 0 :root d)))
(values
(cosi-simgen:with-block-list ((list genesis-block))
(cosi/proofs/newtx:get-balance (emotiq/txn:address keypair)))
d
coinbase-keypair)))))
(defun verify-genesis-block (&key (root (emotiq/fs:etc/)))
(let* ((genesis-block
(emotiq/config:get-genesis-block
:root root))
(keypair
(emotiq/config:get-nth-key 0 :root root)))
(values
(cosi-simgen:with-block-list ((list genesis-block))
(cosi/proofs/newtx:get-balance (emotiq/txn:address keypair)))
root)))
(define-test genesis-block ()
(multiple-value-bind (coinbase-amount directory coinbase-paid-to-keypair)
(create-genesis-block)
(emotiq:note "Created genesis block with coinbase paid ~a EMTQ to ~a~%~tin '~a'."
coinbase-amount
(emotiq/txn:address coinbase-paid-to-keypair)
directory)
(assert-true (equal coinbase-amount
(cosi/proofs/newtx:initial-total-coin-amount)))))
| null | https://raw.githubusercontent.com/emotiq/emotiq/9af78023f670777895a3dac29a2bbe98e19b6249/src/test/genesis.lisp | lisp | (in-package :emotiq-config-generate-test)
(defun create-genesis-block ()
(let ((d (emotiq/filesystem:new-temporary-directory)))
(let* ((nodes
(emotiq/config/generate::generate-keys
emotiq/config/generate::*eg-config-zerotier*))
(stakes
(emotiq/config/generate::generate-stakes
(mapcar (lambda (plist)
(getf plist :public))
nodes)))
(public-key-for-coins
(getf (first nodes) :public))
(coinbase-keypair
(pbc:make-keying-triple
public-key-for-coins (getf (first nodes) :private)))
(configuration
(emotiq/config/generate::make-configuration
(first nodes)
:address-for-coins public-key-for-coins
:stakes stakes)))
(emotiq/config/generate::generate-node d configuration
:key-records nodes)
(let* ((genesis-block
(emotiq/config:get-genesis-block
:root d))
(keypair
(emotiq/config:get-nth-key 0 :root d)))
(values
(cosi-simgen:with-block-list ((list genesis-block))
(cosi/proofs/newtx:get-balance (emotiq/txn:address keypair)))
d
coinbase-keypair)))))
(defun verify-genesis-block (&key (root (emotiq/fs:etc/)))
(let* ((genesis-block
(emotiq/config:get-genesis-block
:root root))
(keypair
(emotiq/config:get-nth-key 0 :root root)))
(values
(cosi-simgen:with-block-list ((list genesis-block))
(cosi/proofs/newtx:get-balance (emotiq/txn:address keypair)))
root)))
(define-test genesis-block ()
(multiple-value-bind (coinbase-amount directory coinbase-paid-to-keypair)
(create-genesis-block)
(emotiq:note "Created genesis block with coinbase paid ~a EMTQ to ~a~%~tin '~a'."
coinbase-amount
(emotiq/txn:address coinbase-paid-to-keypair)
directory)
(assert-true (equal coinbase-amount
(cosi/proofs/newtx:initial-total-coin-amount)))))
|
|
42adb5c7aa50a8a1be74b228515b2b97451560ff07592a4fe095c66d39c3da57 | liebke/avout | atom.clj | (ns avout.sdb.atom
(:use avout.state)
(:require [simpledb.core :as sdb]
[avout.atoms :as atoms])
(:import clojure.lang.IRef))
(deftype SDBStateContainer [client domainName name]
StateContainer
(initStateContainer [this]
(when-not (seq (sdb/get-attributes client domainName name))
(sdb/put-attributes client domainName name [{:name "value" :value (pr-str nil)}])))
(destroyStateContainer [this]
(sdb/delete-attributes client domainName name [{:name "value"}]))
(getState [this]
(let [data (sdb/get-attributes client domainName name)]
(if (contains? data "value")
(read-string (get data "value"))
(throw (RuntimeException. "sdb-atom unbound")))))
(setState [this newValue]
(sdb/put-attributes client domainName name [{:name "value" :value (pr-str newValue)}]))
(compareAndSwap [this oldValue newValue]
(sdb/put-attributes client domainName name
[{:name "value" :value (pr-str newValue)}]
{:name "value" :value (pr-str oldValue)})))
(defn sdb-atom
([sdb-client domain-name name init-value & {:keys [validator]}]
(doto (avout.atoms.DistributedAtom. sdb-client domain-name name
(SDBStateContainer. sdb-client domain-name name)
(atom validator))
.init
(.reset init-value)))
([sdb-client domain-name name]
(doto (avout.atoms.DistributedAtom. sdb-client domain-name name
(SDBStateContainer. sdb-client domain-name name)
(atom nil))
.init)))
(defn sdb-initializer
([name {:keys [sdb-client domain-name]}]
(sdb-atom sdb-client domain-name name))
([name init-value {:keys [sdb-client domain-name]}]
(sdb-atom sdb-client domain-name name init-value)))
(comment
(use 'simpledb.core)
(use 'avout.core)
(use 'avout.sdb.atom)
(def ACCESS-KEY (get (System/getenv) "AWS_ACCESS_KEY"))
(def SECRET-KEY (get (System/getenv) "AWS_SECRET_KEY"))
(def sdb (sdb-client ACCESS-KEY SECRET-KEY))
(def a0 (sdb-atom sdb "test-domain" "atest" 0))
@a
(swap!! a0 inc)
(def a1 (sdb-atom sdb "test-domain" "atest" []))
@a1
(swap!! a1 conj 0)
(swap!! a1 conj 1)
) | null | https://raw.githubusercontent.com/liebke/avout/06f3e00d63f487ebd01581343302e96b915f5b03/experimental/orolo/plugins/avout-sdb/src/avout/sdb/atom.clj | clojure | (ns avout.sdb.atom
(:use avout.state)
(:require [simpledb.core :as sdb]
[avout.atoms :as atoms])
(:import clojure.lang.IRef))
(deftype SDBStateContainer [client domainName name]
StateContainer
(initStateContainer [this]
(when-not (seq (sdb/get-attributes client domainName name))
(sdb/put-attributes client domainName name [{:name "value" :value (pr-str nil)}])))
(destroyStateContainer [this]
(sdb/delete-attributes client domainName name [{:name "value"}]))
(getState [this]
(let [data (sdb/get-attributes client domainName name)]
(if (contains? data "value")
(read-string (get data "value"))
(throw (RuntimeException. "sdb-atom unbound")))))
(setState [this newValue]
(sdb/put-attributes client domainName name [{:name "value" :value (pr-str newValue)}]))
(compareAndSwap [this oldValue newValue]
(sdb/put-attributes client domainName name
[{:name "value" :value (pr-str newValue)}]
{:name "value" :value (pr-str oldValue)})))
(defn sdb-atom
([sdb-client domain-name name init-value & {:keys [validator]}]
(doto (avout.atoms.DistributedAtom. sdb-client domain-name name
(SDBStateContainer. sdb-client domain-name name)
(atom validator))
.init
(.reset init-value)))
([sdb-client domain-name name]
(doto (avout.atoms.DistributedAtom. sdb-client domain-name name
(SDBStateContainer. sdb-client domain-name name)
(atom nil))
.init)))
(defn sdb-initializer
([name {:keys [sdb-client domain-name]}]
(sdb-atom sdb-client domain-name name))
([name init-value {:keys [sdb-client domain-name]}]
(sdb-atom sdb-client domain-name name init-value)))
(comment
(use 'simpledb.core)
(use 'avout.core)
(use 'avout.sdb.atom)
(def ACCESS-KEY (get (System/getenv) "AWS_ACCESS_KEY"))
(def SECRET-KEY (get (System/getenv) "AWS_SECRET_KEY"))
(def sdb (sdb-client ACCESS-KEY SECRET-KEY))
(def a0 (sdb-atom sdb "test-domain" "atest" 0))
@a
(swap!! a0 inc)
(def a1 (sdb-atom sdb "test-domain" "atest" []))
@a1
(swap!! a1 conj 0)
(swap!! a1 conj 1)
) |
|
288eb7a72e5a7d89f2fe8ed3aa5cfbd394d43c5a737464a45c670f548e44c925 | SamB/coq | showproof.ml |
# use " /cygdrive / D / Tools / coq-7avril / dev / base_include " ; ;
open Coqast ; ;
#use "/cygdrive/D/Tools/coq-7avril/dev/base_include";;
open Coqast;;
*)
open Environ
open Evd
open Names
open Nameops
open Libnames
open Term
open Termops
open Util
open Proof_type
open Pfedit
open Translate
open Term
open Reductionops
open Clenv
open Typing
open Inductive
open Inductiveops
open Vernacinterp
open Declarations
open Showproof_ct
open Proof_trees
open Sign
open Pp
open Printer
open Rawterm
open Tacexpr
open Genarg
(*****************************************************************************)
:
Arbre de preuve maison:
*)
(* hypotheses *)
type nhyp = {hyp_name : identifier;
hyp_type : Term.constr;
hyp_full_type: Term.constr}
;;
type ntactic = tactic_expr
;;
type nproof =
Notproved
| Proof of ntactic * (ntree list)
and ngoal=
{newhyp : nhyp list;
t_concl : Term.constr;
t_full_concl: Term.constr;
t_full_env: Environ.named_context_val}
and ntree=
{t_info:string;
t_goal:ngoal;
t_proof : nproof}
;;
let hyps {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = lh
;;
let concl {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = g
;;
let proof {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = p
;;
let g_env {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = ge
;;
let sub_ntrees t =
match (proof t) with
Notproved -> []
| Proof (_,l) -> l
;;
let tactic t =
match (proof t) with
Notproved -> failwith "no tactic applied"
| Proof (t,_) -> t
;;
un arbre est contient pas de sous - but non prouves ,
ou bien s'il a un cousin pas a au plus but non clos , le premier sous - but .
un arbre est clos s'il ne contient pas de sous-but non prouves,
ou bien s'il a un cousin gauche qui n'est pas clos
ce qui fait qu'on a au plus un sous-but non clos, le premier sous-but.
*)
let update_closed nt =
let found_not_closed=ref false in
let rec update {t_info=b; t_goal=g; t_proof =p} =
if !found_not_closed
then {t_info="to_prove"; t_goal=g; t_proof =p}
else
match p with
Notproved -> found_not_closed:=true;
{t_info="not_proved"; t_goal=g; t_proof =p}
| Proof(tac,lt) ->
let lt1=List.map update lt in
let b=ref "proved" in
(List.iter
(fun x ->
if x.t_info ="not_proved" then b:="not_proved") lt1;
{t_info=(!b);
t_goal=g;
t_proof=Proof(tac,lt1)})
in update nt
;;
(*
type complet avec les hypotheses.
*)
let long_type_hyp lh t=
let t=ref t in
List.iter (fun (n,th) ->
let ni = match n with Name ni -> ni | _ -> assert false in
t:= mkProd(n,th,subst_term (mkVar ni) !t))
(List.rev lh);
!t
;;
(* let long_type_hyp x y = y;; *)
(* Expansion des tactikelles *)
let seq_to_lnhyp sign sign' cl =
let lh= ref (List.map (fun (x,c,t) -> (Name x, t)) sign) in
let nh=List.map (fun (id,c,ty) ->
{hyp_name=id;
hyp_type=ty;
hyp_full_type=
let res= long_type_hyp !lh ty in
lh:=(!lh)@[(Name id,ty)];
res})
sign'
in
{newhyp=nh;
t_concl=cl;
t_full_concl=long_type_hyp !lh cl;
t_full_env = Environ.val_of_named_context (sign@sign')}
;;
let rule_is_complex r =
match r with
Nested (Tactic
((TacArg (Tacexp _)
|TacAtom (_,(TacAuto _|TacSymmetry _))),_),_) -> true
|_ -> false
;;
let rule_to_ntactic r =
let rt =
(match r with
Nested(Tactic (t,_),_) -> t
| Prim (Refine h) -> TacAtom (dummy_loc,TacExact (Tactics.inj_open h))
| _ -> TacAtom (dummy_loc, TacIntroPattern [])) in
if rule_is_complex r
then (match rt with
TacArg (Tacexp _) as t -> t
| _ -> assert false)
else rt
;;
Attribue les preuves de la liste l aux sous - buts
let fill_unproved nt l =
let lnt = ref l in
let rec fill nt =
let {t_goal=g;t_proof=p}=nt in
match p with
Notproved -> let p=List.hd (!lnt) in
lnt:=List.tl (!lnt);
{t_info="to_prove";t_goal=g;t_proof=p}
|Proof(tac,lt) ->
{t_info="to_prove";t_goal=g;
t_proof=Proof(tac,List.map fill lt)}
in fill nt
;;
(* Differences entre signatures *)
let new_sign osign sign =
let res=ref [] in
List.iter (fun (id,c,ty) ->
try (let (_,_,_ty1)= (lookup_named id osign) in
())
with Not_found -> res:=(id,c,ty)::(!res))
sign;
!res
;;
let old_sign osign sign =
let res=ref [] in
List.iter (fun (id,c,ty) ->
try (let (_,_,ty1) = (lookup_named id osign) in
if ty1 = ty then res:=(id,c,ty)::(!res))
with Not_found -> ())
sign;
!res
;;
convertit l'arbre de preuve courant en
let to_nproof sigma osign pf =
let rec to_nproof_rec sigma osign pf =
let {evar_hyps=sign;evar_concl=cl} = pf.goal in
let sign = Environ.named_context_of_val sign in
let nsign = new_sign osign sign in
let oldsign = old_sign osign sign in
match pf.ref with
None -> {t_info="to_prove";
t_goal=(seq_to_lnhyp oldsign nsign cl);
t_proof=Notproved}
| Some(r,spfl) ->
if rule_is_complex r
then (
let p1= to_nproof_rec sigma sign (subproof_of_proof pf) in
let ntree= fill_unproved p1
(List.map (fun x -> (to_nproof_rec sigma sign x).t_proof)
spfl) in
(match r with
Nested(Tactic (TacAtom (_, TacAuto _),_),_) ->
if spfl=[]
then
{t_info="to_prove";
t_goal= {newhyp=[];
t_concl=concl ntree;
t_full_concl=ntree.t_goal.t_full_concl;
t_full_env=ntree.t_goal.t_full_env};
t_proof= Proof (TacAtom (dummy_loc,TacExtend (dummy_loc,"InfoAuto",[])), [ntree])}
else ntree
| _ -> ntree))
else
{t_info="to_prove";
t_goal=(seq_to_lnhyp oldsign nsign cl);
t_proof=(Proof (rule_to_ntactic r,
List.map (fun x -> to_nproof_rec sigma sign x) spfl))}
in update_closed (to_nproof_rec sigma osign pf)
;;
(*
recupere l'arbre de preuve courant.
*)
let get_nproof () =
to_nproof (Global.env()) []
(Tacmach.proof_of_pftreestate (get_pftreestate()))
;;
(*****************************************************************************)
Pprinter
*)
let pr_void () = sphs "";;
let list_rem l = match l with [] -> [] |x::l1->l1;;
liste
let prls l =
let res = ref (sps (List.hd l)) in
List.iter (fun s ->
res:= sphv [ !res; spb; sps s]) (list_rem l);
!res
;;
let prphrases f l =
spv (List.map (fun s -> sphv [f s; sps ","]) l)
;;
(* indentation *)
let spi = spnb 3;;
(* en colonne *)
let prl f l =
if l=[] then spe else spv (List.map f l);;
(*en colonne, avec indentation *)
let prli f l =
if l=[] then spe else sph [spi; spv (List.map f l)];;
(*
Langues.
*)
let rand l =
List.nth l (Random.int (List.length l))
;;
type natural_languages = French | English;;
let natural_language = ref French;;
(*****************************************************************************)
liens html pour proof - by - pointing
Les liens html pour proof-by-pointing
*)
(* le path du but en cours. *)
let path=ref[1];;
let ftag_apply =ref (fun (n:string) t -> spt t);;
let ftag_case =ref (fun n -> sps n);;
let ftag_elim =ref (fun n -> sps n);;
let ftag_hypt =ref (fun h t -> sphypt (translate_path !path) h t);;
let ftag_hyp =ref (fun h t -> sphyp (translate_path !path) h t);;
let ftag_uselemma =ref (fun h t ->
let intro = match !natural_language with
French -> "par"
| English -> "by"
in
spuselemma intro h t);;
let ftag_toprove =ref (fun t -> sptoprove (translate_path !path) t);;
let tag_apply = !ftag_apply;;
let tag_case = !ftag_case;;
let tag_elim = !ftag_elim;;
let tag_uselemma = !ftag_uselemma;;
let tag_hyp = !ftag_hyp;;
let tag_hypt = !ftag_hypt;;
let tag_toprove = !ftag_toprove;;
(*****************************************************************************)
(* pluriel *)
let txtn n s =
if n=1 then s
else match s with
|"un" -> "des"
|"a" -> ""
|"an" -> ""
|"une" -> "des"
|"Soit" -> "Soient"
|"Let" -> "Let"
| s -> s^"s"
;;
let _et () = match !natural_language with
French -> sps "et"
| English -> sps "and"
;;
let name_count = ref 0;;
let new_name () =
name_count:=(!name_count)+1;
string_of_int !name_count
;;
let enumerate f ln =
match ln with
[] -> []
| [x] -> [f x]
|ln ->
let rec enum_rec f ln =
(match ln with
[x;y] -> [f x; spb; sph [_et ();spb;f y]]
|x::l -> [sph [(f x);sps ","];spb]@(enum_rec f l)
| _ -> assert false)
in enum_rec f ln
;;
let constr_of_ast = Constrintern.interp_constr Evd.empty (Global.env());;
let sp_tac tac = failwith "TODO"
let soit_A_une_proposition nh ln t= match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "une";txtn nh "proposition"]])
| English ->
sphv ([sps "Let";spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be"; txtn nh "a";txtn nh "proposition"]])
;;
let on_a ()= match !natural_language with
French -> rand ["on a "]
| English ->rand ["we have "]
;;
let bon_a ()= match !natural_language with
French -> rand ["On a "]
| English ->rand ["We have "]
;;
let soit_X_un_element_de_T nh ln t = match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "un";txtn nh "élément";"de"]]
@[spb; spt t])
| English ->
sphv ([sps (txtn nh "Let");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be";txtn nh "an";txtn nh "element";"of"]]
@[spb; spt t])
;;
let soit_F_une_fonction_de_type_T nh ln t = match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "une";txtn nh "fonction";"de";"type"]]
@[spb; spt t])
| English ->
sphv ([sps (txtn nh "Let");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be";txtn nh "a";txtn nh "function";"of";"type"]]
@[spb; spt t])
;;
let telle_que nh = match !natural_language with
French -> [prls [" ";txtn nh "telle";"que";" "]]
| English -> [prls [" "; "such";"that";" "]]
;;
let tel_que nh = match !natural_language with
French -> [prls [" ";txtn nh "tel";"que";" "]]
| English -> [prls [" ";"such";"that";" "]]
;;
let supposons () = match !natural_language with
French -> "Supposons "
| English -> "Suppose "
;;
let cas () = match !natural_language with
French -> "Cas"
| English -> "Case"
;;
let donnons_une_proposition () = match !natural_language with
French -> sph[ (prls ["Donnons";"une";"proposition"])]
| English -> sph[ (prls ["Let us give";"a";"proposition"])]
;;
let montrons g = match !natural_language with
French -> sph[ sps (rand ["Prouvons";"Montrons";"Démontrons"]);
spb; spt g; sps ". "]
| English -> sph[ sps (rand ["Let us";"Now"]);spb;
sps (rand ["prove";"show"]);
spb; spt g; sps ". "]
;;
let calculons_un_element_de g = match !natural_language with
French -> sph[ (prls ["Calculons";"un";"élément";"de"]);
spb; spt g; sps ". "]
| English -> sph[ (prls ["Let us";"compute";"an";"element";"of"]);
spb; spt g; sps ". "]
;;
let calculons_une_fonction_de_type g = match !natural_language with
French -> sphv [ (prls ["Calculons";"une";"fonction";"de";"type"]);
spb; spt g; sps ". "]
| English -> sphv [ (prls ["Let";"us";"compute";"a";"function";"of";"type"]);
spb; spt g; sps ". "];;
let en_simplifiant_on_obtient g = match !natural_language with
French ->
sphv [ (prls [rand ["Après simplification,"; "En simplifiant,"];
rand ["on doit";"il reste à"];
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English ->
sphv [ (prls [rand ["After simplification,"; "Simplifying,"];
rand ["we must";"it remains to"];
rand ["prove";"show"]]);
spb; spt g; sps ". "] ;;
let on_obtient g = match !natural_language with
French -> sph[ (prls [rand ["on doit";"il reste à"];
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English ->sph[ (prls [rand ["we must";"it remains to"];
rand ["prove";"show"]]);
spb; spt g; sps ". "]
;;
let reste_a_montrer g = match !natural_language with
French -> sph[ (prls ["Reste";"à";
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English -> sph[ (prls ["It remains";"to";
rand ["prove";"show"]]);
spb; spt g; sps ". "]
;;
let discutons_avec_A type_arg = match !natural_language with
French -> sphv [sps "Discutons"; spb; sps "avec"; spb;
spt type_arg; sps ":"]
| English -> sphv [sps "Let us discuss"; spb; sps "with"; spb;
spt type_arg; sps ":"]
;;
let utilisons_A arg1 = match !natural_language with
French -> sphv [sps (rand ["Utilisons";"Avec";"A l'aide de"]);
spb; spt arg1; sps ":"]
| English -> sphv [sps (rand ["Let us use";"With";"With the help of"]);
spb; spt arg1; sps ":"]
;;
let selon_les_valeurs_de_A arg1 = match !natural_language with
French -> sphv [ (prls ["Selon";"les";"valeurs";"de"]);
spb; spt arg1; sps ":"]
| English -> sphv [ (prls ["According";"values";"of"]);
spb; spt arg1; sps ":"]
;;
let de_A_on_a arg1 = match !natural_language with
French -> sphv [ sps (rand ["De";"Avec";"Grâce à"]); spb; spt arg1; spb;
sps (rand ["on a:";"on déduit:";"on obtient:"])]
| English -> sphv [ sps (rand ["From";"With";"Thanks to"]); spb;
spt arg1; spb;
sps (rand ["we have:";"we deduce:";"we obtain:"])]
;;
let procedons_par_recurrence_sur_A arg1 = match !natural_language with
French -> sphv [ (prls ["Procédons";"par";"récurrence";"sur"]);
spb; spt arg1; sps ":"]
| English -> sphv [ (prls ["By";"induction";"on"]);
spb; spt arg1; sps ":"]
;;
let calculons_la_fonction_F_de_type_T_par_recurrence_sur_son_argument_A
nfun tfun narg = match !natural_language with
French -> sphv [
sphv [ prls ["Calculons";"la";"fonction"];
spb; sps (string_of_id nfun);spb;
prls ["de";"type"];
spb; spt tfun;spb;
prls ["par";"récurrence";"sur";"son";"argument"];
spb; sps (string_of_int narg); sps ":"]
]
| English -> sphv [
sphv [ prls ["Let us compute";"the";"function"];
spb; sps (string_of_id nfun);spb;
prls ["of";"type"];
spb; spt tfun;spb;
prls ["by";"induction";"on";"its";"argument"];
spb; sps (string_of_int narg); sps ":"]
]
;;
let pour_montrer_G_la_valeur_recherchee_est_A g arg1 =
match !natural_language with
French -> sph [sps "Pour";spb;sps "montrer"; spt g; spb;
sps ","; spb; sps "choisissons";spb;
spt arg1;sps ". " ]
| English -> sph [sps "In order to";spb;sps "show"; spt g; spb;
sps ","; spb; sps "let us choose";spb;
spt arg1;sps ". " ]
;;
let on_se_sert_de_A arg1 = match !natural_language with
French -> sph [sps "On se sert de";spb ;spt arg1;sps ":" ]
| English -> sph [sps "We use";spb ;spt arg1;sps ":" ]
;;
let d_ou_A g = match !natural_language with
French -> sph [spi; sps "d'où";spb ;spt g;sps ". " ]
| English -> sph [spi; sps "then";spb ;spt g;sps ". " ]
;;
let coq_le_demontre_seul () = match !natural_language with
French -> rand [prls ["Coq";"le";"démontre"; "seul."];
sps "Fastoche.";
sps "Trop cool"]
| English -> rand [prls ["Coq";"shows";"it"; "alone."];
sps "Fingers in the nose."]
;;
let de_A_on_deduit_donc_B arg g = match !natural_language with
French -> sph
[ sps "De"; spb; spt arg; spb; sps "on";spb;
sps "déduit";spb; sps "donc";spb; spt g ]
| English -> sph
[ sps "From"; spb; spt arg; spb; sps "we";spb;
sps "deduce";spb; sps "then";spb; spt g ]
;;
let _A_est_immediat_par_B g arg = match !natural_language with
French -> sph [ spt g; spb; (prls ["est";"immédiat";"par"]);
spb; spt arg ]
| English -> sph [ spt g; spb; (prls ["is";"immediate";"from"]);
spb; spt arg ]
;;
let le_resultat_est arg = match !natural_language with
French -> sph [ (prls ["le";"résultat";"est"]);
spb; spt arg ]
| English -> sph [ (prls ["the";"result";"is"]);
spb; spt arg ];;
let on_applique_la_tactique tactic tac = match !natural_language with
French -> sphv
[ sps "on applique";spb;sps "la tactique"; spb;tactic;spb;tac]
| English -> sphv
[ sps "we apply";spb;sps "the tactic"; spb;tactic;spb;tac]
;;
let de_A_il_vient_B arg g = match !natural_language with
French -> sph
[ sps "De"; spb; spt arg; spb;
sps "il";spb; sps "vient";spb; spt g; sps ". " ]
| English -> sph
[ sps "From"; spb; spt arg; spb;
sps "it";spb; sps "comes";spb; spt g; sps ". " ]
;;
let ce_qui_est_trivial () = match !natural_language with
French -> sps "Trivial."
| English -> sps "Trivial."
;;
let en_utilisant_l_egalite_A arg = match !natural_language with
French -> sphv [ sps "En"; spb;sps "utilisant"; spb;
sps "l'egalite"; spb; spt arg; sps ","
]
| English -> sphv [ sps "Using"; spb;
sps "the equality"; spb; spt arg; sps ","
]
;;
let simplifions_H_T hyp thyp = match !natural_language with
French -> sphv [sps"En simplifiant";spb;sps hyp;spb;sps "on obtient:";
spb;spt thyp;sps "."]
| English -> sphv [sps"Simplifying";spb;sps hyp;spb;sps "we get:";
spb;spt thyp;sps "."]
;;
let grace_a_A_il_suffit_de_montrer_LA arg lg=
match !natural_language with
French -> sphv ([sps (rand ["Grâce à";"Avec";"A l'aide de"]);spb;
spt arg;sps ",";spb;
sps "il suffit";spb; sps "de"; spb;
sps (rand["prouver";"montrer";"démontrer"]); spb]
@[spv (enumerate (fun x->x) lg)])
| English -> sphv ([sps (rand ["Thanks to";"With"]);spb;
spt arg;sps ",";spb;
sps "it suffices";spb; sps "to"; spb;
sps (rand["prove";"show"]); spb]
@[spv (enumerate (fun x->x) lg)])
;;
let reste_a_montrer_LA lg=
match !natural_language with
French -> sphv ([ sps "Il reste";spb; sps "à"; spb;
sps (rand["prouver";"montrer";"démontrer"]); spb]
@[spv (enumerate (fun x->x) lg)])
| English -> sphv ([ sps "It remains";spb; sps "to"; spb;
sps (rand["prove";"show"]); spb]
@[spv (enumerate (fun x->x) lg)])
;;
(*****************************************************************************)
(*
Traduction des hypothèses.
*)
type n_sort=
Nprop
| Nformula
| Ntype
| Nfunction
;;
let sort_of_type t ts =
let t=(strip_outer_cast t) in
if is_Prop t
then Nprop
else
match ts with
Prop(Null) -> Nformula
|_ -> (match (kind_of_term t) with
Prod(_,_,_) -> Nfunction
|_ -> Ntype)
;;
let adrel (x,t) e =
match x with
Name(xid) -> Environ.push_rel (x,None,t) e
| Anonymous -> Environ.push_rel (x,None,t) e
let rec nsortrec vl x =
match (kind_of_term x) with
Prod(n,t,c)->
let vl = (adrel (n,t) vl) in nsortrec vl c
| Lambda(n,t,c) ->
let vl = (adrel (n,t) vl) in nsortrec vl c
| App(f,args) -> nsortrec vl f
| Sort(Prop(Null)) -> Prop(Null)
| Sort(c) -> c
| Ind(ind) ->
let (mib,mip) = lookup_mind_specif vl ind in
new_sort_in_family (inductive_sort_family mip)
| Construct(c) ->
nsortrec vl (mkInd (inductive_of_constructor c))
| Case(_,x,t,a)
-> nsortrec vl x
| Cast(x,_, t)-> nsortrec vl t
| Const c -> nsortrec vl (Typeops.type_of_constant vl c)
| _ -> nsortrec vl (type_of vl Evd.empty x)
;;
let nsort x =
nsortrec (Global.env()) (strip_outer_cast x)
;;
let sort_of_hyp h =
(sort_of_type h.hyp_type (nsort h.hyp_full_type))
;;
grouper les hypotheses successives de meme type , ou logiques .
liste de liste
donne une liste de liste *)
let rec group_lhyp lh =
match lh with
[] -> []
|[h] -> [[h]]
|h::lh ->
match group_lhyp lh with
(h1::lh1)::lh2 ->
if h.hyp_type=h1.hyp_type
|| ((sort_of_hyp h)=(sort_of_hyp h1) && (sort_of_hyp h1)=Nformula)
then (h::(h1::lh1))::lh2
else [h]::((h1::lh1)::lh2)
|_-> assert false
;;
ln noms des hypotheses , lt leurs types
let natural_ghyp (sort,ln,lt) intro =
let t=List.hd lt in
let nh=List.length ln in
let _ns=List.hd ln in
match sort with
Nprop -> soit_A_une_proposition nh ln t
| Ntype -> soit_X_un_element_de_T nh ln t
| Nfunction -> soit_F_une_fonction_de_type_T nh ln t
| Nformula ->
sphv ((sps intro)::(enumerate (fun (n,t) -> tag_hypt n t)
(List.combine ln lt)))
;;
Cas d'une
let natural_hyp h =
let ns= string_of_id h.hyp_name in
let t=h.hyp_type in
let ts= (nsort h.hyp_full_type) in
natural_ghyp ((sort_of_type t ts),[ns],[t]) (supposons ())
;;
let rec pr_ghyp lh intro=
match lh with
[] -> []
| [(sort,ln,t)]->
(match sort with
Nformula -> [natural_ghyp(sort,ln,t) intro; sps ". "]
| _ -> [natural_ghyp(sort,ln,t) ""; sps ". "])
| (sort,ln,t)::lh ->
let hp=
([natural_ghyp(sort,ln,t) intro]
@(match lh with
[] -> [sps ". "]
|(sort1,ln1,t1)::lh1 ->
match sort1 with
Nformula ->
(let nh=List.length ln in
match sort with
Nprop -> telle_que nh
|Nfunction -> telle_que nh
|Ntype -> tel_que nh
|Nformula -> [sps ". "])
| _ -> [sps ". "])) in
(sphv hp)::(pr_ghyp lh "")
;;
traduction d'une liste d'hypotheses groupees .
let prnatural_ghyp llh intro=
if llh=[]
then spe
else
sphv (pr_ghyp (List.map
(fun lh ->
let h=(List.hd lh) in
let sh = sort_of_hyp h in
let lhname = (List.map (fun h ->
string_of_id h.hyp_name) lh) in
let lhtype = (List.map (fun h -> h.hyp_type) lh) in
(sh,lhname,lhtype))
llh) intro)
;;
(*****************************************************************************)
(*
Liste des hypotheses.
*)
type type_info_subgoals_hyp=
All_subgoals_hyp
| Reduce_hyp
| No_subgoals_hyp
| Case_subgoals_hyp of string (* word for introduction *)
* Term.constr (* variable *)
* string (* constructor *)
* int (* arity *)
* int (* number of constructors *)
| Case_prop_subgoals_hyp of string (* word for introduction *)
* Term.constr (* variable *)
* int (* index of constructor *)
* int (* arity *)
* int (* number of constructors *)
| Elim_subgoals_hyp of Term.constr (* variable *)
* string (* constructor *)
* int (* arity *)
rec hyp
* int (* number of constructors *)
| Elim_prop_subgoals_hyp of Term.constr (* variable *)
* int (* index of constructor *)
* int (* arity *)
rec hyp
* int (* number of constructors *)
;;
let rec nrem l n =
if n<=0 then l else nrem (list_rem l) (n-1)
;;
let rec nhd l n =
if n<=0 then [] else (List.hd l)::(nhd (list_rem l) (n-1))
;;
let par_hypothese_de_recurrence () = match !natural_language with
French -> sphv [(prls ["par";"hypothèse";"de";"récurrence";","])]
| English -> sphv [(prls ["by";"induction";"hypothesis";","])]
;;
let natural_lhyp lh hi =
match hi with
All_subgoals_hyp ->
( match lh with
[] -> spe
|_-> prnatural_ghyp (group_lhyp lh) (supposons ()))
| Reduce_hyp ->
(match lh with
[h] -> simplifions_H_T (string_of_id h.hyp_name) h.hyp_type
| _-> spe)
| No_subgoals_hyp -> spe
sintro pas encore utilisee
let s=ref c in
for i=1 to a do
let nh=(List.nth lh (i-1)) in
s:=(!s)^" "^(string_of_id nh.hyp_name);
done;
if a>0 then s:="("^(!s)^")";
sphv [ (if ncase>1
then sph[ sps ("-"^(cas ()));spb]
else spe);
(* spt var;sps "="; *) sps !s; sps ":";
(prphrases (natural_hyp) (nrem lh a))]
|Case_prop_subgoals_hyp (sintro,var,c,a,ncase) ->
prnatural_ghyp (group_lhyp lh) sintro
|Elim_subgoals_hyp (var,c,a,lhci,ncase) ->
let nlh = List.length lh in
let nlhci = List.length lhci in
let lh0 = ref [] in
for i=1 to (nlh-nlhci) do
lh0:=(!lh0)@[List.nth lh (i-1)];
done;
let lh=nrem lh (nlh-nlhci) in
let s=ref c in
let lh1=ref [] in
for i=1 to nlhci do
let targ=(List.nth lhci (i-1))in
let nh=(List.nth lh (i-1)) in
if targ="arg" || targ="argrec"
then
(s:=(!s)^" "^(string_of_id nh.hyp_name);
lh0:=(!lh0)@[nh])
else lh1:=(!lh1)@[nh];
done;
let introhyprec=
(if (!lh1)=[] then spe
else par_hypothese_de_recurrence () )
in
if a>0 then s:="("^(!s)^")";
spv [sphv [(if ncase>1
then sph[ sps ("-"^(cas ()));spb]
else spe);
sps !s; sps ":"];
prnatural_ghyp (group_lhyp !lh0) (supposons ());
introhyprec;
prl (natural_hyp) !lh1]
|Elim_prop_subgoals_hyp (var,c,a,lhci,ncase) ->
sphv [ (if ncase>1
then sph[ sps ("-"^(cas ()));spb;sps (string_of_int c);
sps ":";spb]
else spe);
(prphrases (natural_hyp) lh )]
;;
(*****************************************************************************)
(*
Analyse des tactiques.
*)
let name_tactic = function
| TacIntroPattern _ -> "Intro"
| TacAssumption -> "Assumption"
| _ -> failwith "TODO"
;;
let arg1_tactic tac =
match tac with
( Node(_,"Interp " ,
( Node ( _ , _ ,
( : : _ ) ): : _ ) ): : _ ) ): : _ ->x
| ( : : _ ) ): : _ - > x
| x : : _ - > x
| _ - > assert false
; ;
let arg1_tactic tac =
match tac with
(Node(_,"Interp",
(Node(_,_,
(Node(_,_,x::_))::_))::_))::_ ->x
| (Node(_,_,x::_))::_ -> x
| x::_ -> x
| _ -> assert false
;;
*)
let arg1_tactic tac = failwith "TODO";;
type type_info_subgoals =
{ihsg: type_info_subgoals_hyp;
isgintro : string}
;;
let rec show_goal lh ig g gs =
match ig with
"intros" ->
if lh = []
then spe
else show_goal lh "standard" g gs
|"standard" ->
(match (sort_of_type g gs) with
Nprop -> donnons_une_proposition ()
| Nformula -> montrons g
| Ntype -> calculons_un_element_de g
| Nfunction ->calculons_une_fonction_de_type g)
| "apply" -> show_goal lh "" g gs
| "simpl" ->en_simplifiant_on_obtient g
| "rewrite" -> on_obtient g
| "equality" -> reste_a_montrer g
| "trivial_equality" -> reste_a_montrer g
| "" -> spe
|_ -> sph[ sps "A faire ..."; spb; spt g; sps ". " ]
;;
let show_goal2 lh {ihsg=hi;isgintro=ig} g gs s =
if ig="" && lh = []
then spe
else sphv [ show_goal lh ig g gs; sps s]
;;
let imaginez_une_preuve_de () = match !natural_language with
French -> "Imaginez une preuve de"
| English -> "Imagine a proof of"
;;
let donnez_un_element_de () = match !natural_language with
French -> "Donnez un element de"
| English -> "Give an element of";;
let intro_not_proved_goal gs =
match gs with
Prop(Null) -> imaginez_une_preuve_de ()
|_ -> donnez_un_element_de ()
;;
let first_name_hyp_of_ntree {t_goal={newhyp=lh}}=
match lh with
{hyp_name=n}::_ -> n
| _ -> assert false
;;
let rec find_type x t=
match (kind_of_term (strip_outer_cast t)) with
Prod(y,ty,t) ->
(match y with
Name y ->
if x=(string_of_id y) then ty
else find_type x t
| _ -> find_type x t)
|_-> assert false
;;
(***********************************************************************
Traitement des égalités
*)
(*
let is_equality e =
match (kind_of_term e) with
AppL args ->
(match (kind_of_term args.(0)) with
Const (c,_) ->
(match (string_of_sp c) with
"Equal" -> true
| "eq" -> true
| "eqT" -> true
| "identityT" -> true
| _ -> false)
| _ -> false)
| _ -> false
;;
*)
let is_equality e =
let e= (strip_outer_cast e) in
match (kind_of_term e) with
App (f,args) -> (Array.length args) >= 3
| _ -> false
;;
let terms_of_equality e =
let e= (strip_outer_cast e) in
match (kind_of_term e) with
App (f,args) -> (args.(1) , args.(2))
| _ -> assert false
;;
let eq_term = eq_constr;;
let is_equality_tac = function
| TacAtom (_,
(TacExtend
(_,("ERewriteLR"|"ERewriteRL"|"ERewriteLRocc"|"ERewriteRLocc"
|"ERewriteParallel"|"ERewriteNormal"
|"RewriteLR"|"RewriteRL"|"Replace"),_)
| TacReduce _
| TacSymmetry _ | TacReflexivity
| TacExact _ | TacIntroPattern _ | TacIntroMove _ | TacAuto _)) -> true
| _ -> false
let equalities_ntree ig ntree =
let rec equalities_ntree ig ntree =
if not (is_equality (concl ntree))
then []
else
match (proof ntree) with
Notproved -> [(ig,ntree)]
| Proof (tac,ltree) ->
if is_equality_tac tac
then (match ltree with
[] -> [(ig,ntree)]
| t::_ -> let res=(equalities_ntree ig t) in
if eq_term (concl ntree) (concl t)
then res
else (ig,ntree)::res)
else [(ig,ntree)]
in
equalities_ntree ig ntree
;;
let remove_seq_of_terms l =
let rec remove_seq_of_terms l = match l with
a::b::l -> if (eq_term (fst a) (fst b))
then remove_seq_of_terms (b::l)
else a::(remove_seq_of_terms (b::l))
| _ -> l
in remove_seq_of_terms l
;;
let list_to_eq l o=
let switch = fun h h' -> (if o then h else h') in
match l with
[a] -> spt (fst a)
| (a,h)::(b,h')::l ->
let rec list_to_eq h l =
match l with
[] -> []
| (b,h')::l ->
(sph [sps "="; spb; spt b; spb;tag_uselemma (switch h h') spe])
:: (list_to_eq (switch h' h) l)
in sph [spt a; spb;
spv ((sph [sps "="; spb; spt b; spb;
tag_uselemma (switch h h') spe])
::(list_to_eq (switch h' h) l))]
| _ -> assert false
;;
let stde = Global.env;;
let dbize env = Constrintern.interp_constr Evd.empty env;;
(**********************************************************************)
let rec natural_ntree ig ntree =
let {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = ntree in
let leq = List.rev (equalities_ntree ig ntree) in
if List.length leq > 1
then (* Several equalities to treate ... *)
(
print_string("Several equalities to treate ...\n");
let l1 = ref [] in
let l2 = ref [] in
List.iter
(fun (_,ntree) ->
let lemma = match (proof ntree) with
Proof (tac,ltree) ->
TODO
(match ltree with
[] ->spe
| [_] -> spe
| _::l -> sphv[sps ": ";
prli (natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="standard"})
l])])
with _ -> sps "simplification" )
| Notproved -> spe
in
let (t1,t2)= terms_of_equality (concl ntree) in
l2:=(t2,lemma)::(!l2);
l1:=(t1,lemma)::(!l1))
leq;
l1:=remove_seq_of_terms !l1;
l2:=remove_seq_of_terms !l2;
l2:=List.rev !l2;
let ltext=ref [] in
if List.length !l1 > 1
then (ltext:=(!ltext)@[list_to_eq !l1 true];
if List.length !l2 > 1 then
(ltext:=(!ltext)@[_et()];
ltext:=(!ltext)@[list_to_eq !l2 false]))
else if List.length !l2 > 1 then ltext:=(!ltext)@[list_to_eq !l2 false];
if !ltext<>[] then ltext:=[sps (bon_a ()); spv !ltext];
let (ig,ntree)=(List.hd leq) in
spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g (nsort gf) "");
sph !ltext;
natural_ntree {ihsg=All_subgoals_hyp;
isgintro=
let (t1,t2)= terms_of_equality (concl ntree) in
if eq_term t1 t2
then "trivial_equality"
else "equality"}
ntree]
)
else
let ntext =
let gs=nsort gf in
match p with
Notproved -> spv [ (natural_lhyp lh ig.ihsg);
sph [spi; sps (intro_not_proved_goal gs); spb;
tag_toprove g ]
]
| Proof (TacId _,ltree) -> natural_ntree ig (List.hd ltree)
| Proof (TacAtom (_,tac),ltree) ->
(let ntext =
match tac with
(* Pas besoin de l'argument éventuel de la tactique *)
TacIntroPattern _ -> natural_intros ig lh g gs ltree
| TacIntroMove _ -> natural_intros ig lh g gs ltree
| TacFix (_,n) -> natural_fix ig lh g gs n ltree
| TacSplit (_,_,NoBindings) -> natural_split ig lh g gs ge [] ltree
| TacSplit(_,_,ImplicitBindings l) -> natural_split ig lh g gs ge (List.map snd l) ltree
| TacGeneralize l -> natural_generalize ig lh g gs ge l ltree
| TacRight _ -> natural_right ig lh g gs ltree
| TacLeft _ -> natural_left ig lh g gs ltree
| (* "Simpl" *)TacReduce (r,cl) ->
natural_reduce ig lh g gs ge r cl ltree
| TacExtend (_,"InfoAuto",[]) -> natural_infoauto ig lh g gs ltree
| TacAuto _ -> natural_auto ig lh g gs ltree
| TacExtend (_,"EAuto",_) -> natural_auto ig lh g gs ltree
| TacTrivial _ -> natural_trivial ig lh g gs ltree
| TacAssumption -> natural_trivial ig lh g gs ltree
| TacClear _ -> natural_clear ig lh g gs ltree
(* Besoin de l'argument de la tactique *)
| TacSimpleInduction (NamedHyp id) ->
natural_induction ig lh g gs ge id ltree false
| TacExtend (_,"InductionIntro",[a]) ->
let id=(out_gen wit_ident a) in
natural_induction ig lh g gs ge id ltree true
| TacApply (_,false,(c,_)) -> natural_apply ig lh g gs (snd c) ltree
| TacExact c -> natural_exact ig lh g gs (snd c) ltree
| TacCut c -> natural_cut ig lh g gs (snd c) ltree
| TacExtend (_,"CutIntro",[a]) ->
let _c = out_gen wit_constr a in
natural_cutintro ig lh g gs a ltree
| TacCase (_,(c,_)) -> natural_case ig lh g gs ge (snd c) ltree false
| TacExtend (_,"CaseIntro",[a]) ->
let c = out_gen wit_constr a in
natural_case ig lh g gs ge c ltree true
| TacElim (_,(c,_),_) ->
natural_elim ig lh g gs ge (snd c) ltree false
| TacExtend (_,"ElimIntro",[a]) ->
let c = out_gen wit_constr a in
natural_elim ig lh g gs ge c ltree true
| TacExtend (_,"Rewrite",[_;a]) ->
let (c,_) = out_gen wit_constr_with_bindings a in
natural_rewrite ig lh g gs c ltree
| TacExtend (_,"ERewriteRL",[a]) ->
TODO
natural_rewrite ig lh g gs c ltree
| TacExtend (_,"ERewriteLR",[a]) ->
TODO
natural_rewrite ig lh g gs c ltree
|_ -> natural_generic ig lh g gs (sps (name_tactic tac)) (prl sp_tac [tac]) ltree
in
ntext (* spwithtac ntext tactic*)
)
| Proof _ -> failwith "Don't know what to do with that"
in
if info<>"not_proved"
then spshrink info ntext
else ntext
and natural_generic ig lh g gs tactic tac ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
on_applique_la_tactique tactic tac ;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="standard"})
ltree)
]
and natural_clear ig lh g gs ltree = natural_ntree ig (List.hd ltree)
spv
[ ( natural_lhyp lh ig.ihsg ) ;
( show_goal2 lh ig g gs " " ) ;
( prl ( natural_ntree ig ) ltree )
]
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree ig) ltree)
]
*)
and natural_intros ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="intros"})
ltree)
]
and natural_apply ig lh g gs arg ltree =
let lg = List.map concl ltree in
match lg with
[] ->
spv
[ (natural_lhyp lh ig.ihsg);
de_A_il_vient_B arg g
]
| [sg]->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh
{ihsg=ig.ihsg; isgintro= if ig.isgintro<>"apply"
then "standard"
else ""}
g gs "");
grace_a_A_il_suffit_de_montrer_LA arg [spt sg];
sph [spi ; natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} (List.hd ltree)]
]
| _ ->
let ln = List.map (fun _ -> new_name()) lg in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh
{ihsg=ig.ihsg; isgintro= if ig.isgintro<>"apply"
then "standard"
else ""}
g gs "");
grace_a_A_il_suffit_de_montrer_LA arg
(List.map2 (fun g n -> sph [sps ("("^n^")"); spb; spt g])
lg ln);
sph [spi; spv (List.map2
(fun x n -> sph [sps ("("^n^"):"); spb;
natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} x])
ltree ln)]
]
and natural_rem_goals ltree =
let lg = List.map concl ltree in
match lg with
[] -> spe
| [sg]->
spv
[ reste_a_montrer_LA [spt sg];
sph [spi ; natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} (List.hd ltree)]
]
| _ ->
let ln = List.map (fun _ -> new_name()) lg in
spv
[ reste_a_montrer_LA
(List.map2 (fun g n -> sph [sps ("("^n^")"); spb; spt g])
lg ln);
sph [spi; spv (List.map2
(fun x n -> sph [sps ("("^n^"):"); spb;
natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} x])
ltree ln)]
]
and natural_exact ig lh g gs arg ltree =
spv
[
(natural_lhyp lh ig.ihsg);
(let {ihsg=pi;isgintro=ig}= ig in
(show_goal2 lh {ihsg=pi;isgintro=""}
g gs ""));
(match gs with
Prop(Null) -> _A_est_immediat_par_B g arg
|_ -> le_resultat_est arg)
]
and natural_cut ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
(List.rev ltree));
de_A_on_deduit_donc_B arg g
]
and natural_cutintro ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
sph [spi;
(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""}
(List.nth ltree 1))];
sph [spi;
(natural_ntree
{ihsg=No_subgoals_hyp;isgintro=""}
(List.nth ltree 0))]
]
and whd_betadeltaiota x = whd_betaiotaevar (Global.env()) Evd.empty x
and type_of_ast s c = type_of (Global.env()) Evd.empty (constr_of_ast c)
and prod_head t =
match (kind_of_term (strip_outer_cast t)) with
Prod(_,_,c) -> prod_head c
(* |App(f,a) -> f *)
| _ -> t
and string_of_sp sp = string_of_id (basename sp)
and constr_of_mind mip i =
(string_of_id mip.mind_consnames.(i-1))
and arity_of_constr_of_mind env indf i =
(get_constructors env indf).(i-1).cs_nargs
and gLOB ge = Global.env_of_context ge (* (Global.env()) *)
and natural_case ig lh g gs ge arg1 ltree with_intros =
let env= (gLOB ge) in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let ti =(string_of_id mip.mind_typename) in
let type_arg= targ1 (* List.nth targ (mis_index dmi)*) in
if ncti<>1
Zéro ou Plusieurs constructeurs
then (
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(match (nsort targ1) with
Prop(Null) ->
(match ti with
"or" -> discutons_avec_A type_arg
| _ -> utilisons_A arg1)
|_ -> selon_les_valeurs_de_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=if with_intros
then (arity_of_constr_of_mind env indf !ci)
else 0 in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Case_prop_subgoals_hyp (supposons (),arg1,ici,aci,
(List.length ltree))
|_-> Case_subgoals_hyp ("",arg1,nci,aci,
(List.length ltree)));
isgintro= if with_intros then "" else "standard"}
treearg)
])
(nrem ltree ((List.length ltree)- ncti))));
(sph [spi; (natural_rem_goals
(nhd ltree ((List.length ltree)- ncti)))])
] )
Cas d'un seul constructeur
else (
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
de_A_on_a arg1;
(let treearg=List.hd ltree in
let nci=(constr_of_mind mip 1) in
let aci=
if with_intros
then (arity_of_constr_of_mind env indf 1)
else 0 in
let _ici= 1 in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Case_prop_subgoals_hyp ("",arg1,1,aci,
(List.length ltree))
|_-> Case_subgoals_hyp ("",arg1,nci,aci,
(List.length ltree)));
isgintro=""}
treearg)
]);
(sph [spi; (natural_rem_goals
(nhd ltree ((List.length ltree)- 1)))])
]
)
with _ ->natural_generic ig lh g gs ( sps " Case " ) ( spt arg1 ) ltree
(*****************************************************************************)
(*
Elim
*)
and prod_list_var t =
match (kind_of_term (strip_outer_cast t)) with
Prod(_,t,c) -> t::(prod_list_var c)
|_ -> []
and hd_is_mind t ti =
try (let env = Global.env() in
let IndType (indf,targ) = find_rectype env Evd.empty t in
let _ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
(string_of_id mip.mind_typename) = ti)
with _ -> false
and mind_ind_info_hyp_constr indf c =
let env = Global.env() in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _p = mib.mind_nparams in
let a = arity_of_constr_of_mind env indf c in
let lp=ref (get_constructors env indf).(c).cs_args in
let lr=ref [] in
let ti = (string_of_id mip.mind_typename) in
for i=1 to a do
match !lp with
((_,_,t)::lp1)->
if hd_is_mind t ti
then (lr:=(!lr)@["argrec";"hyprec"]; lp:=List.tl lp1)
else (lr:=(!lr)@["arg"];lp:=lp1)
| _ -> raise (Failure "mind_ind_info_hyp_constr")
done;
!lr
mind_ind_info_hyp_constr " le " 2 ; ;
donne [ " arg " ; " argrec " ]
mind_ind_info_hyp_constr " le " 1 ; ;
donne [ ]
mind_ind_info_hyp_constr " nat " 2 ; ;
donne [ " argrec " ]
mind_ind_info_hyp_constr "le" 2;;
donne ["arg"; "argrec"]
mind_ind_info_hyp_constr "le" 1;;
donne []
mind_ind_info_hyp_constr "nat" 2;;
donne ["argrec"]
*)
and natural_elim ig lh g gs ge arg1 ltree with_intros=
let env= (gLOB ge) in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _ti =(string_of_id mip.mind_typename) in
let _type_arg=targ1 (* List.nth targ (mis_index dmi) *) in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(match (nsort targ1) with
Prop(Null) -> utilisons_A arg1
|_ ->procedons_par_recurrence_sur_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=(arity_of_constr_of_mind env indf !ci) in
let hci=
if with_intros
then mind_ind_info_hyp_constr indf !ci
else [] in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Elim_prop_subgoals_hyp (arg1,ici,aci,hci,
(List.length ltree))
|_-> Elim_subgoals_hyp (arg1,nci,aci,hci,
(List.length ltree)));
isgintro= ""}
treearg)
])
(nhd ltree ncti)));
(sph [spi; (natural_rem_goals (nrem ltree ncti))])
]
)
with _ ->natural_generic ig lh g gs ( sps " Elim " ) ( spt arg1 ) ltree
with _ ->natural_generic ig lh g gs (sps "Elim") (spt arg1) ltree *)
(*****************************************************************************)
InductionIntro n
InductionIntro n
*)
and natural_induction ig lh g gs ge arg2 ltree with_intros=
let env = (gLOB (g_env (List.hd ltree))) in
let arg1= mkVar arg2 in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let _ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _ti =(string_of_id mip.mind_typename) in
let _type_arg= targ1(*List.nth targ (mis_index dmi)*) in
let lh1= hyps (List.hd ltree) in (* la liste des hyp jusqu'a n *)
(* on les enleve des hypotheses des sous-buts *)
let ltree = List.map
(fun {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} ->
{t_info=info;
t_goal={newhyp=(nrem lh (List.length lh1));
t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p}) ltree in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(natural_lhyp lh1 All_subgoals_hyp);
(match (print_string "targ1------------\n";(nsort targ1)) with
Prop(Null) -> utilisons_A arg1
|_ -> procedons_par_recurrence_sur_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=(arity_of_constr_of_mind env indf !ci) in
let hci=
if with_intros
then mind_ind_info_hyp_constr indf !ci
else [] in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Elim_prop_subgoals_hyp (arg1,ici,aci,hci,
(List.length ltree))
|_-> Elim_subgoals_hyp (arg1,nci,aci,hci,
(List.length ltree)));
isgintro= "standard"}
treearg)
])
ltree))
]
(************************************************************************)
(* Points fixes *)
and natural_fix ig lh g gs narg ltree =
let {t_info=info;
t_goal={newhyp=lh1;t_concl=g1;t_full_concl=gf1;
t_full_env=ge1};t_proof=p1}=(List.hd ltree) in
match lh1 with
{hyp_name=nfun;hyp_type=tfun}::lh2 ->
let ltree=[{t_info=info;
t_goal={newhyp=lh2;t_concl=g1;t_full_concl=gf1;
t_full_env=ge1};
t_proof=p1}] in
spv
[ (natural_lhyp lh ig.ihsg);
calculons_la_fonction_F_de_type_T_par_recurrence_sur_son_argument_A nfun tfun narg;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_reduce ig lh g gs ge mode la ltree =
match la with
{onhyps=Some[]} when la.concl_occs <> no_occurrences_expr ->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro="simpl"})
ltree)
]
| {onhyps=Some[hyp]} when la.concl_occs = no_occurrences_expr ->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=Reduce_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_split ig lh g gs ge la ltree =
match la with
[arg] ->
let _env= (gLOB ge) in
let arg1= (*dbize _env*) arg in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
pour_montrer_G_la_valeur_recherchee_est_A g arg1;
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)
]
| [] ->
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)
]
| _ -> assert false
and natural_generalize ig lh g gs ge la ltree =
match la with
[(_,(_,arg)),_] ->
let _env= (gLOB ge) in
let arg1= (*dbize env*) arg in
let _type_arg=type_of (Global.env()) Evd.empty arg in
(* let type_arg=type_of_ast ge arg in*)
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
on_se_sert_de_A arg1;
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_right ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree);
d_ou_A g
]
and natural_left ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree);
d_ou_A g
]
and natural_auto ig lh g gs ltree =
match ig.isgintro with
"trivial_equality" -> spe
| _ ->
if ltree=[]
then sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
coq_le_demontre_seul ()]
else spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prli (natural_ntree {ihsg=All_subgoals_hyp;isgintro=""}
)
ltree)]
and natural_infoauto ig lh g gs ltree =
match ig.isgintro with
"trivial_equality" ->
spshrink "trivial_equality"
(natural_ntree {ihsg=All_subgoals_hyp;isgintro="standard"}
(List.hd ltree))
| _ -> sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
coq_le_demontre_seul ();
spshrink "auto"
(sph [spi;
(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""}
(List.hd ltree))])]
and natural_trivial ig lh g gs ltree =
if ltree=[]
then sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
ce_qui_est_trivial () ]
else spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs ". ");
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)]
and natural_rewrite ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
en_utilisant_l_egalite_A arg;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="rewrite"})
ltree)
]
;;
let natural_ntree_path ig g =
Random.init(0);
natural_ntree ig g
;;
let show_proof lang gpath =
(match lang with
"fr" -> natural_language:=French
|"en" -> natural_language:=English
| _ -> natural_language:=English);
path:=List.rev gpath;
name_count:=0;
let ntree=(get_nproof ()) in
let {t_info=i;t_goal=g;t_proof=p} =ntree in
root_of_text_proof
(sph [(natural_ntree_path {ihsg=All_subgoals_hyp;
isgintro="standard"}
{t_info="not_proved";t_goal=g;t_proof=p});
spr])
;;
let show_nproof path =
pp (sp_print (sph [spi; show_proof "fr" path]));;
vinterp_add "ShowNaturalProof"
(fun _ ->
(fun () ->show_nproof[];()));;
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
debug :
PATH=/usr / local / bin:/usr / bin:$PATH
COQTOP = d:/Tools / coq-7avril
CAMLLIB=/usr / local / lib / ocaml
CAMLP4LIB=/usr / local / lib / camlp4
export CAMLLIB
export COQTOP
export CAMLP4LIB
cd d:/Tools / pcoq / src / text
d:/Tools / coq-7avril / bin / coqtop.byte.exe -I /cygdrive / D / Tools / pcoq / src / abs_syntax -I /cygdrive / D / Tools / pcoq / src / text -I /cygdrive / D / Tools / pcoq / src / coq -I /cygdrive / D / Tools / pcoq / src / pbp -I /cygdrive / D / Tools / pcoq / src / dad -I /cygdrive / D / Tools / pcoq / src / history
l1 : ( A , B : Prop ) A \/ B - > B - > A.
Intros .
.
Qed .
Drop .
# use " /cygdrive / D / Tools / coq-7avril / dev / base_include " ; ;
# load " xlate.cmo " ; ;
# load " translate.cmo " ; ;
# load " showproof_ct.cmo " ; ;
# load " showproof.cmo " ; ;
# load " pbp.cmo " ; ;
# load " debug_tac.cmo " ; ;
# load " name_to_ast.cmo " ; ;
# load " paths.cmo " ; ;
# load " dad.cmo " ; ;
# load " vtp.cmo " ; ;
# load " history.cmo " ; ;
# load " centaur.cmo " ; ;
Xlate.set_xlate_mut_stuff Centaur.globcv ; ;
Xlate.declare_in_coq ( ) ; ;
# use " showproof.ml " ; ;
let pproof x = pP ( sp_print x ) ; ;
Pp_control.set_depth_boxes 100 ; ;
# install_printer pproof ; ;
ep ( ) ; ;
let ( constr_of_string " O " ) ; ;
# trace to_nproof ; ;
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
debug sous cygwin:
PATH=/usr/local/bin:/usr/bin:$PATH
COQTOP=d:/Tools/coq-7avril
CAMLLIB=/usr/local/lib/ocaml
CAMLP4LIB=/usr/local/lib/camlp4
export CAMLLIB
export COQTOP
export CAMLP4LIB
cd d:/Tools/pcoq/src/text
d:/Tools/coq-7avril/bin/coqtop.byte.exe -I /cygdrive/D/Tools/pcoq/src/abs_syntax -I /cygdrive/D/Tools/pcoq/src/text -I /cygdrive/D/Tools/pcoq/src/coq -I /cygdrive/D/Tools/pcoq/src/pbp -I /cygdrive/D/Tools/pcoq/src/dad -I /cygdrive/D/Tools/pcoq/src/history
Lemma l1: (A, B : Prop) A \/ B -> B -> A.
Intros.
Elim H.
Auto.
Qed.
Drop.
#use "/cygdrive/D/Tools/coq-7avril/dev/base_include";;
#load "xlate.cmo";;
#load "translate.cmo";;
#load "showproof_ct.cmo";;
#load "showproof.cmo";;
#load "pbp.cmo";;
#load "debug_tac.cmo";;
#load "name_to_ast.cmo";;
#load "paths.cmo";;
#load "dad.cmo";;
#load "vtp.cmo";;
#load "history.cmo";;
#load "centaur.cmo";;
Xlate.set_xlate_mut_stuff Centaur.globcv;;
Xlate.declare_in_coq();;
#use "showproof.ml";;
let pproof x = pP (sp_print x);;
Pp_control.set_depth_boxes 100;;
#install_printer pproof;;
ep();;
let bidon = ref (constr_of_string "O");;
#trace to_nproof;;
***********************************************************************)
let ep()=show_proof "fr" [];;
| null | https://raw.githubusercontent.com/SamB/coq/8f84aba9ae83a4dc43ea6e804227ae8cae8086b1/contrib/interface/showproof.ml | ocaml | ***************************************************************************
hypotheses
type complet avec les hypotheses.
let long_type_hyp x y = y;;
Expansion des tactikelles
Differences entre signatures
recupere l'arbre de preuve courant.
***************************************************************************
indentation
en colonne
en colonne, avec indentation
Langues.
***************************************************************************
le path du but en cours.
***************************************************************************
pluriel
***************************************************************************
Traduction des hypothèses.
***************************************************************************
Liste des hypotheses.
word for introduction
variable
constructor
arity
number of constructors
word for introduction
variable
index of constructor
arity
number of constructors
variable
constructor
arity
number of constructors
variable
index of constructor
arity
number of constructors
spt var;sps "=";
***************************************************************************
Analyse des tactiques.
**********************************************************************
Traitement des égalités
let is_equality e =
match (kind_of_term e) with
AppL args ->
(match (kind_of_term args.(0)) with
Const (c,_) ->
(match (string_of_sp c) with
"Equal" -> true
| "eq" -> true
| "eqT" -> true
| "identityT" -> true
| _ -> false)
| _ -> false)
| _ -> false
;;
********************************************************************
Several equalities to treate ...
Pas besoin de l'argument éventuel de la tactique
"Simpl"
Besoin de l'argument de la tactique
spwithtac ntext tactic
|App(f,a) -> f
(Global.env())
List.nth targ (mis_index dmi)
***************************************************************************
Elim
List.nth targ (mis_index dmi)
***************************************************************************
List.nth targ (mis_index dmi)
la liste des hyp jusqu'a n
on les enleve des hypotheses des sous-buts
**********************************************************************
Points fixes
dbize _env
dbize env
let type_arg=type_of_ast ge arg in |
# use " /cygdrive / D / Tools / coq-7avril / dev / base_include " ; ;
open Coqast ; ;
#use "/cygdrive/D/Tools/coq-7avril/dev/base_include";;
open Coqast;;
*)
open Environ
open Evd
open Names
open Nameops
open Libnames
open Term
open Termops
open Util
open Proof_type
open Pfedit
open Translate
open Term
open Reductionops
open Clenv
open Typing
open Inductive
open Inductiveops
open Vernacinterp
open Declarations
open Showproof_ct
open Proof_trees
open Sign
open Pp
open Printer
open Rawterm
open Tacexpr
open Genarg
:
Arbre de preuve maison:
*)
type nhyp = {hyp_name : identifier;
hyp_type : Term.constr;
hyp_full_type: Term.constr}
;;
type ntactic = tactic_expr
;;
type nproof =
Notproved
| Proof of ntactic * (ntree list)
and ngoal=
{newhyp : nhyp list;
t_concl : Term.constr;
t_full_concl: Term.constr;
t_full_env: Environ.named_context_val}
and ntree=
{t_info:string;
t_goal:ngoal;
t_proof : nproof}
;;
let hyps {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = lh
;;
let concl {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = g
;;
let proof {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = p
;;
let g_env {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = ge
;;
let sub_ntrees t =
match (proof t) with
Notproved -> []
| Proof (_,l) -> l
;;
let tactic t =
match (proof t) with
Notproved -> failwith "no tactic applied"
| Proof (t,_) -> t
;;
un arbre est contient pas de sous - but non prouves ,
ou bien s'il a un cousin pas a au plus but non clos , le premier sous - but .
un arbre est clos s'il ne contient pas de sous-but non prouves,
ou bien s'il a un cousin gauche qui n'est pas clos
ce qui fait qu'on a au plus un sous-but non clos, le premier sous-but.
*)
let update_closed nt =
let found_not_closed=ref false in
let rec update {t_info=b; t_goal=g; t_proof =p} =
if !found_not_closed
then {t_info="to_prove"; t_goal=g; t_proof =p}
else
match p with
Notproved -> found_not_closed:=true;
{t_info="not_proved"; t_goal=g; t_proof =p}
| Proof(tac,lt) ->
let lt1=List.map update lt in
let b=ref "proved" in
(List.iter
(fun x ->
if x.t_info ="not_proved" then b:="not_proved") lt1;
{t_info=(!b);
t_goal=g;
t_proof=Proof(tac,lt1)})
in update nt
;;
let long_type_hyp lh t=
let t=ref t in
List.iter (fun (n,th) ->
let ni = match n with Name ni -> ni | _ -> assert false in
t:= mkProd(n,th,subst_term (mkVar ni) !t))
(List.rev lh);
!t
;;
let seq_to_lnhyp sign sign' cl =
let lh= ref (List.map (fun (x,c,t) -> (Name x, t)) sign) in
let nh=List.map (fun (id,c,ty) ->
{hyp_name=id;
hyp_type=ty;
hyp_full_type=
let res= long_type_hyp !lh ty in
lh:=(!lh)@[(Name id,ty)];
res})
sign'
in
{newhyp=nh;
t_concl=cl;
t_full_concl=long_type_hyp !lh cl;
t_full_env = Environ.val_of_named_context (sign@sign')}
;;
let rule_is_complex r =
match r with
Nested (Tactic
((TacArg (Tacexp _)
|TacAtom (_,(TacAuto _|TacSymmetry _))),_),_) -> true
|_ -> false
;;
let rule_to_ntactic r =
let rt =
(match r with
Nested(Tactic (t,_),_) -> t
| Prim (Refine h) -> TacAtom (dummy_loc,TacExact (Tactics.inj_open h))
| _ -> TacAtom (dummy_loc, TacIntroPattern [])) in
if rule_is_complex r
then (match rt with
TacArg (Tacexp _) as t -> t
| _ -> assert false)
else rt
;;
Attribue les preuves de la liste l aux sous - buts
let fill_unproved nt l =
let lnt = ref l in
let rec fill nt =
let {t_goal=g;t_proof=p}=nt in
match p with
Notproved -> let p=List.hd (!lnt) in
lnt:=List.tl (!lnt);
{t_info="to_prove";t_goal=g;t_proof=p}
|Proof(tac,lt) ->
{t_info="to_prove";t_goal=g;
t_proof=Proof(tac,List.map fill lt)}
in fill nt
;;
let new_sign osign sign =
let res=ref [] in
List.iter (fun (id,c,ty) ->
try (let (_,_,_ty1)= (lookup_named id osign) in
())
with Not_found -> res:=(id,c,ty)::(!res))
sign;
!res
;;
let old_sign osign sign =
let res=ref [] in
List.iter (fun (id,c,ty) ->
try (let (_,_,ty1) = (lookup_named id osign) in
if ty1 = ty then res:=(id,c,ty)::(!res))
with Not_found -> ())
sign;
!res
;;
convertit l'arbre de preuve courant en
let to_nproof sigma osign pf =
let rec to_nproof_rec sigma osign pf =
let {evar_hyps=sign;evar_concl=cl} = pf.goal in
let sign = Environ.named_context_of_val sign in
let nsign = new_sign osign sign in
let oldsign = old_sign osign sign in
match pf.ref with
None -> {t_info="to_prove";
t_goal=(seq_to_lnhyp oldsign nsign cl);
t_proof=Notproved}
| Some(r,spfl) ->
if rule_is_complex r
then (
let p1= to_nproof_rec sigma sign (subproof_of_proof pf) in
let ntree= fill_unproved p1
(List.map (fun x -> (to_nproof_rec sigma sign x).t_proof)
spfl) in
(match r with
Nested(Tactic (TacAtom (_, TacAuto _),_),_) ->
if spfl=[]
then
{t_info="to_prove";
t_goal= {newhyp=[];
t_concl=concl ntree;
t_full_concl=ntree.t_goal.t_full_concl;
t_full_env=ntree.t_goal.t_full_env};
t_proof= Proof (TacAtom (dummy_loc,TacExtend (dummy_loc,"InfoAuto",[])), [ntree])}
else ntree
| _ -> ntree))
else
{t_info="to_prove";
t_goal=(seq_to_lnhyp oldsign nsign cl);
t_proof=(Proof (rule_to_ntactic r,
List.map (fun x -> to_nproof_rec sigma sign x) spfl))}
in update_closed (to_nproof_rec sigma osign pf)
;;
let get_nproof () =
to_nproof (Global.env()) []
(Tacmach.proof_of_pftreestate (get_pftreestate()))
;;
Pprinter
*)
let pr_void () = sphs "";;
let list_rem l = match l with [] -> [] |x::l1->l1;;
liste
let prls l =
let res = ref (sps (List.hd l)) in
List.iter (fun s ->
res:= sphv [ !res; spb; sps s]) (list_rem l);
!res
;;
let prphrases f l =
spv (List.map (fun s -> sphv [f s; sps ","]) l)
;;
let spi = spnb 3;;
let prl f l =
if l=[] then spe else spv (List.map f l);;
let prli f l =
if l=[] then spe else sph [spi; spv (List.map f l)];;
let rand l =
List.nth l (Random.int (List.length l))
;;
type natural_languages = French | English;;
let natural_language = ref French;;
liens html pour proof - by - pointing
Les liens html pour proof-by-pointing
*)
let path=ref[1];;
let ftag_apply =ref (fun (n:string) t -> spt t);;
let ftag_case =ref (fun n -> sps n);;
let ftag_elim =ref (fun n -> sps n);;
let ftag_hypt =ref (fun h t -> sphypt (translate_path !path) h t);;
let ftag_hyp =ref (fun h t -> sphyp (translate_path !path) h t);;
let ftag_uselemma =ref (fun h t ->
let intro = match !natural_language with
French -> "par"
| English -> "by"
in
spuselemma intro h t);;
let ftag_toprove =ref (fun t -> sptoprove (translate_path !path) t);;
let tag_apply = !ftag_apply;;
let tag_case = !ftag_case;;
let tag_elim = !ftag_elim;;
let tag_uselemma = !ftag_uselemma;;
let tag_hyp = !ftag_hyp;;
let tag_hypt = !ftag_hypt;;
let tag_toprove = !ftag_toprove;;
let txtn n s =
if n=1 then s
else match s with
|"un" -> "des"
|"a" -> ""
|"an" -> ""
|"une" -> "des"
|"Soit" -> "Soient"
|"Let" -> "Let"
| s -> s^"s"
;;
let _et () = match !natural_language with
French -> sps "et"
| English -> sps "and"
;;
let name_count = ref 0;;
let new_name () =
name_count:=(!name_count)+1;
string_of_int !name_count
;;
let enumerate f ln =
match ln with
[] -> []
| [x] -> [f x]
|ln ->
let rec enum_rec f ln =
(match ln with
[x;y] -> [f x; spb; sph [_et ();spb;f y]]
|x::l -> [sph [(f x);sps ","];spb]@(enum_rec f l)
| _ -> assert false)
in enum_rec f ln
;;
let constr_of_ast = Constrintern.interp_constr Evd.empty (Global.env());;
let sp_tac tac = failwith "TODO"
let soit_A_une_proposition nh ln t= match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "une";txtn nh "proposition"]])
| English ->
sphv ([sps "Let";spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be"; txtn nh "a";txtn nh "proposition"]])
;;
let on_a ()= match !natural_language with
French -> rand ["on a "]
| English ->rand ["we have "]
;;
let bon_a ()= match !natural_language with
French -> rand ["On a "]
| English ->rand ["We have "]
;;
let soit_X_un_element_de_T nh ln t = match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "un";txtn nh "élément";"de"]]
@[spb; spt t])
| English ->
sphv ([sps (txtn nh "Let");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be";txtn nh "an";txtn nh "element";"of"]]
@[spb; spt t])
;;
let soit_F_une_fonction_de_type_T nh ln t = match !natural_language with
French ->
sphv ([sps (txtn nh "Soit");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls [txtn nh "une";txtn nh "fonction";"de";"type"]]
@[spb; spt t])
| English ->
sphv ([sps (txtn nh "Let");spb]@(enumerate (fun x -> tag_hyp x t) ln)
@[spb; prls ["be";txtn nh "a";txtn nh "function";"of";"type"]]
@[spb; spt t])
;;
let telle_que nh = match !natural_language with
French -> [prls [" ";txtn nh "telle";"que";" "]]
| English -> [prls [" "; "such";"that";" "]]
;;
let tel_que nh = match !natural_language with
French -> [prls [" ";txtn nh "tel";"que";" "]]
| English -> [prls [" ";"such";"that";" "]]
;;
let supposons () = match !natural_language with
French -> "Supposons "
| English -> "Suppose "
;;
let cas () = match !natural_language with
French -> "Cas"
| English -> "Case"
;;
let donnons_une_proposition () = match !natural_language with
French -> sph[ (prls ["Donnons";"une";"proposition"])]
| English -> sph[ (prls ["Let us give";"a";"proposition"])]
;;
let montrons g = match !natural_language with
French -> sph[ sps (rand ["Prouvons";"Montrons";"Démontrons"]);
spb; spt g; sps ". "]
| English -> sph[ sps (rand ["Let us";"Now"]);spb;
sps (rand ["prove";"show"]);
spb; spt g; sps ". "]
;;
let calculons_un_element_de g = match !natural_language with
French -> sph[ (prls ["Calculons";"un";"élément";"de"]);
spb; spt g; sps ". "]
| English -> sph[ (prls ["Let us";"compute";"an";"element";"of"]);
spb; spt g; sps ". "]
;;
let calculons_une_fonction_de_type g = match !natural_language with
French -> sphv [ (prls ["Calculons";"une";"fonction";"de";"type"]);
spb; spt g; sps ". "]
| English -> sphv [ (prls ["Let";"us";"compute";"a";"function";"of";"type"]);
spb; spt g; sps ". "];;
let en_simplifiant_on_obtient g = match !natural_language with
French ->
sphv [ (prls [rand ["Après simplification,"; "En simplifiant,"];
rand ["on doit";"il reste à"];
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English ->
sphv [ (prls [rand ["After simplification,"; "Simplifying,"];
rand ["we must";"it remains to"];
rand ["prove";"show"]]);
spb; spt g; sps ". "] ;;
let on_obtient g = match !natural_language with
French -> sph[ (prls [rand ["on doit";"il reste à"];
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English ->sph[ (prls [rand ["we must";"it remains to"];
rand ["prove";"show"]]);
spb; spt g; sps ". "]
;;
let reste_a_montrer g = match !natural_language with
French -> sph[ (prls ["Reste";"à";
rand ["prouver";"montrer";"démontrer"]]);
spb; spt g; sps ". "]
| English -> sph[ (prls ["It remains";"to";
rand ["prove";"show"]]);
spb; spt g; sps ". "]
;;
let discutons_avec_A type_arg = match !natural_language with
French -> sphv [sps "Discutons"; spb; sps "avec"; spb;
spt type_arg; sps ":"]
| English -> sphv [sps "Let us discuss"; spb; sps "with"; spb;
spt type_arg; sps ":"]
;;
let utilisons_A arg1 = match !natural_language with
French -> sphv [sps (rand ["Utilisons";"Avec";"A l'aide de"]);
spb; spt arg1; sps ":"]
| English -> sphv [sps (rand ["Let us use";"With";"With the help of"]);
spb; spt arg1; sps ":"]
;;
let selon_les_valeurs_de_A arg1 = match !natural_language with
French -> sphv [ (prls ["Selon";"les";"valeurs";"de"]);
spb; spt arg1; sps ":"]
| English -> sphv [ (prls ["According";"values";"of"]);
spb; spt arg1; sps ":"]
;;
let de_A_on_a arg1 = match !natural_language with
French -> sphv [ sps (rand ["De";"Avec";"Grâce à"]); spb; spt arg1; spb;
sps (rand ["on a:";"on déduit:";"on obtient:"])]
| English -> sphv [ sps (rand ["From";"With";"Thanks to"]); spb;
spt arg1; spb;
sps (rand ["we have:";"we deduce:";"we obtain:"])]
;;
let procedons_par_recurrence_sur_A arg1 = match !natural_language with
French -> sphv [ (prls ["Procédons";"par";"récurrence";"sur"]);
spb; spt arg1; sps ":"]
| English -> sphv [ (prls ["By";"induction";"on"]);
spb; spt arg1; sps ":"]
;;
let calculons_la_fonction_F_de_type_T_par_recurrence_sur_son_argument_A
nfun tfun narg = match !natural_language with
French -> sphv [
sphv [ prls ["Calculons";"la";"fonction"];
spb; sps (string_of_id nfun);spb;
prls ["de";"type"];
spb; spt tfun;spb;
prls ["par";"récurrence";"sur";"son";"argument"];
spb; sps (string_of_int narg); sps ":"]
]
| English -> sphv [
sphv [ prls ["Let us compute";"the";"function"];
spb; sps (string_of_id nfun);spb;
prls ["of";"type"];
spb; spt tfun;spb;
prls ["by";"induction";"on";"its";"argument"];
spb; sps (string_of_int narg); sps ":"]
]
;;
let pour_montrer_G_la_valeur_recherchee_est_A g arg1 =
match !natural_language with
French -> sph [sps "Pour";spb;sps "montrer"; spt g; spb;
sps ","; spb; sps "choisissons";spb;
spt arg1;sps ". " ]
| English -> sph [sps "In order to";spb;sps "show"; spt g; spb;
sps ","; spb; sps "let us choose";spb;
spt arg1;sps ". " ]
;;
let on_se_sert_de_A arg1 = match !natural_language with
French -> sph [sps "On se sert de";spb ;spt arg1;sps ":" ]
| English -> sph [sps "We use";spb ;spt arg1;sps ":" ]
;;
let d_ou_A g = match !natural_language with
French -> sph [spi; sps "d'où";spb ;spt g;sps ". " ]
| English -> sph [spi; sps "then";spb ;spt g;sps ". " ]
;;
let coq_le_demontre_seul () = match !natural_language with
French -> rand [prls ["Coq";"le";"démontre"; "seul."];
sps "Fastoche.";
sps "Trop cool"]
| English -> rand [prls ["Coq";"shows";"it"; "alone."];
sps "Fingers in the nose."]
;;
let de_A_on_deduit_donc_B arg g = match !natural_language with
French -> sph
[ sps "De"; spb; spt arg; spb; sps "on";spb;
sps "déduit";spb; sps "donc";spb; spt g ]
| English -> sph
[ sps "From"; spb; spt arg; spb; sps "we";spb;
sps "deduce";spb; sps "then";spb; spt g ]
;;
let _A_est_immediat_par_B g arg = match !natural_language with
French -> sph [ spt g; spb; (prls ["est";"immédiat";"par"]);
spb; spt arg ]
| English -> sph [ spt g; spb; (prls ["is";"immediate";"from"]);
spb; spt arg ]
;;
let le_resultat_est arg = match !natural_language with
French -> sph [ (prls ["le";"résultat";"est"]);
spb; spt arg ]
| English -> sph [ (prls ["the";"result";"is"]);
spb; spt arg ];;
let on_applique_la_tactique tactic tac = match !natural_language with
French -> sphv
[ sps "on applique";spb;sps "la tactique"; spb;tactic;spb;tac]
| English -> sphv
[ sps "we apply";spb;sps "the tactic"; spb;tactic;spb;tac]
;;
let de_A_il_vient_B arg g = match !natural_language with
French -> sph
[ sps "De"; spb; spt arg; spb;
sps "il";spb; sps "vient";spb; spt g; sps ". " ]
| English -> sph
[ sps "From"; spb; spt arg; spb;
sps "it";spb; sps "comes";spb; spt g; sps ". " ]
;;
let ce_qui_est_trivial () = match !natural_language with
French -> sps "Trivial."
| English -> sps "Trivial."
;;
let en_utilisant_l_egalite_A arg = match !natural_language with
French -> sphv [ sps "En"; spb;sps "utilisant"; spb;
sps "l'egalite"; spb; spt arg; sps ","
]
| English -> sphv [ sps "Using"; spb;
sps "the equality"; spb; spt arg; sps ","
]
;;
let simplifions_H_T hyp thyp = match !natural_language with
French -> sphv [sps"En simplifiant";spb;sps hyp;spb;sps "on obtient:";
spb;spt thyp;sps "."]
| English -> sphv [sps"Simplifying";spb;sps hyp;spb;sps "we get:";
spb;spt thyp;sps "."]
;;
let grace_a_A_il_suffit_de_montrer_LA arg lg=
match !natural_language with
French -> sphv ([sps (rand ["Grâce à";"Avec";"A l'aide de"]);spb;
spt arg;sps ",";spb;
sps "il suffit";spb; sps "de"; spb;
sps (rand["prouver";"montrer";"démontrer"]); spb]
@[spv (enumerate (fun x->x) lg)])
| English -> sphv ([sps (rand ["Thanks to";"With"]);spb;
spt arg;sps ",";spb;
sps "it suffices";spb; sps "to"; spb;
sps (rand["prove";"show"]); spb]
@[spv (enumerate (fun x->x) lg)])
;;
let reste_a_montrer_LA lg=
match !natural_language with
French -> sphv ([ sps "Il reste";spb; sps "à"; spb;
sps (rand["prouver";"montrer";"démontrer"]); spb]
@[spv (enumerate (fun x->x) lg)])
| English -> sphv ([ sps "It remains";spb; sps "to"; spb;
sps (rand["prove";"show"]); spb]
@[spv (enumerate (fun x->x) lg)])
;;
type n_sort=
Nprop
| Nformula
| Ntype
| Nfunction
;;
let sort_of_type t ts =
let t=(strip_outer_cast t) in
if is_Prop t
then Nprop
else
match ts with
Prop(Null) -> Nformula
|_ -> (match (kind_of_term t) with
Prod(_,_,_) -> Nfunction
|_ -> Ntype)
;;
let adrel (x,t) e =
match x with
Name(xid) -> Environ.push_rel (x,None,t) e
| Anonymous -> Environ.push_rel (x,None,t) e
let rec nsortrec vl x =
match (kind_of_term x) with
Prod(n,t,c)->
let vl = (adrel (n,t) vl) in nsortrec vl c
| Lambda(n,t,c) ->
let vl = (adrel (n,t) vl) in nsortrec vl c
| App(f,args) -> nsortrec vl f
| Sort(Prop(Null)) -> Prop(Null)
| Sort(c) -> c
| Ind(ind) ->
let (mib,mip) = lookup_mind_specif vl ind in
new_sort_in_family (inductive_sort_family mip)
| Construct(c) ->
nsortrec vl (mkInd (inductive_of_constructor c))
| Case(_,x,t,a)
-> nsortrec vl x
| Cast(x,_, t)-> nsortrec vl t
| Const c -> nsortrec vl (Typeops.type_of_constant vl c)
| _ -> nsortrec vl (type_of vl Evd.empty x)
;;
let nsort x =
nsortrec (Global.env()) (strip_outer_cast x)
;;
let sort_of_hyp h =
(sort_of_type h.hyp_type (nsort h.hyp_full_type))
;;
grouper les hypotheses successives de meme type , ou logiques .
liste de liste
donne une liste de liste *)
let rec group_lhyp lh =
match lh with
[] -> []
|[h] -> [[h]]
|h::lh ->
match group_lhyp lh with
(h1::lh1)::lh2 ->
if h.hyp_type=h1.hyp_type
|| ((sort_of_hyp h)=(sort_of_hyp h1) && (sort_of_hyp h1)=Nformula)
then (h::(h1::lh1))::lh2
else [h]::((h1::lh1)::lh2)
|_-> assert false
;;
ln noms des hypotheses , lt leurs types
let natural_ghyp (sort,ln,lt) intro =
let t=List.hd lt in
let nh=List.length ln in
let _ns=List.hd ln in
match sort with
Nprop -> soit_A_une_proposition nh ln t
| Ntype -> soit_X_un_element_de_T nh ln t
| Nfunction -> soit_F_une_fonction_de_type_T nh ln t
| Nformula ->
sphv ((sps intro)::(enumerate (fun (n,t) -> tag_hypt n t)
(List.combine ln lt)))
;;
Cas d'une
let natural_hyp h =
let ns= string_of_id h.hyp_name in
let t=h.hyp_type in
let ts= (nsort h.hyp_full_type) in
natural_ghyp ((sort_of_type t ts),[ns],[t]) (supposons ())
;;
let rec pr_ghyp lh intro=
match lh with
[] -> []
| [(sort,ln,t)]->
(match sort with
Nformula -> [natural_ghyp(sort,ln,t) intro; sps ". "]
| _ -> [natural_ghyp(sort,ln,t) ""; sps ". "])
| (sort,ln,t)::lh ->
let hp=
([natural_ghyp(sort,ln,t) intro]
@(match lh with
[] -> [sps ". "]
|(sort1,ln1,t1)::lh1 ->
match sort1 with
Nformula ->
(let nh=List.length ln in
match sort with
Nprop -> telle_que nh
|Nfunction -> telle_que nh
|Ntype -> tel_que nh
|Nformula -> [sps ". "])
| _ -> [sps ". "])) in
(sphv hp)::(pr_ghyp lh "")
;;
traduction d'une liste d'hypotheses groupees .
let prnatural_ghyp llh intro=
if llh=[]
then spe
else
sphv (pr_ghyp (List.map
(fun lh ->
let h=(List.hd lh) in
let sh = sort_of_hyp h in
let lhname = (List.map (fun h ->
string_of_id h.hyp_name) lh) in
let lhtype = (List.map (fun h -> h.hyp_type) lh) in
(sh,lhname,lhtype))
llh) intro)
;;
type type_info_subgoals_hyp=
All_subgoals_hyp
| Reduce_hyp
| No_subgoals_hyp
rec hyp
rec hyp
;;
let rec nrem l n =
if n<=0 then l else nrem (list_rem l) (n-1)
;;
let rec nhd l n =
if n<=0 then [] else (List.hd l)::(nhd (list_rem l) (n-1))
;;
let par_hypothese_de_recurrence () = match !natural_language with
French -> sphv [(prls ["par";"hypothèse";"de";"récurrence";","])]
| English -> sphv [(prls ["by";"induction";"hypothesis";","])]
;;
let natural_lhyp lh hi =
match hi with
All_subgoals_hyp ->
( match lh with
[] -> spe
|_-> prnatural_ghyp (group_lhyp lh) (supposons ()))
| Reduce_hyp ->
(match lh with
[h] -> simplifions_H_T (string_of_id h.hyp_name) h.hyp_type
| _-> spe)
| No_subgoals_hyp -> spe
sintro pas encore utilisee
let s=ref c in
for i=1 to a do
let nh=(List.nth lh (i-1)) in
s:=(!s)^" "^(string_of_id nh.hyp_name);
done;
if a>0 then s:="("^(!s)^")";
sphv [ (if ncase>1
then sph[ sps ("-"^(cas ()));spb]
else spe);
(prphrases (natural_hyp) (nrem lh a))]
|Case_prop_subgoals_hyp (sintro,var,c,a,ncase) ->
prnatural_ghyp (group_lhyp lh) sintro
|Elim_subgoals_hyp (var,c,a,lhci,ncase) ->
let nlh = List.length lh in
let nlhci = List.length lhci in
let lh0 = ref [] in
for i=1 to (nlh-nlhci) do
lh0:=(!lh0)@[List.nth lh (i-1)];
done;
let lh=nrem lh (nlh-nlhci) in
let s=ref c in
let lh1=ref [] in
for i=1 to nlhci do
let targ=(List.nth lhci (i-1))in
let nh=(List.nth lh (i-1)) in
if targ="arg" || targ="argrec"
then
(s:=(!s)^" "^(string_of_id nh.hyp_name);
lh0:=(!lh0)@[nh])
else lh1:=(!lh1)@[nh];
done;
let introhyprec=
(if (!lh1)=[] then spe
else par_hypothese_de_recurrence () )
in
if a>0 then s:="("^(!s)^")";
spv [sphv [(if ncase>1
then sph[ sps ("-"^(cas ()));spb]
else spe);
sps !s; sps ":"];
prnatural_ghyp (group_lhyp !lh0) (supposons ());
introhyprec;
prl (natural_hyp) !lh1]
|Elim_prop_subgoals_hyp (var,c,a,lhci,ncase) ->
sphv [ (if ncase>1
then sph[ sps ("-"^(cas ()));spb;sps (string_of_int c);
sps ":";spb]
else spe);
(prphrases (natural_hyp) lh )]
;;
let name_tactic = function
| TacIntroPattern _ -> "Intro"
| TacAssumption -> "Assumption"
| _ -> failwith "TODO"
;;
let arg1_tactic tac =
match tac with
( Node(_,"Interp " ,
( Node ( _ , _ ,
( : : _ ) ): : _ ) ): : _ ) ): : _ ->x
| ( : : _ ) ): : _ - > x
| x : : _ - > x
| _ - > assert false
; ;
let arg1_tactic tac =
match tac with
(Node(_,"Interp",
(Node(_,_,
(Node(_,_,x::_))::_))::_))::_ ->x
| (Node(_,_,x::_))::_ -> x
| x::_ -> x
| _ -> assert false
;;
*)
let arg1_tactic tac = failwith "TODO";;
type type_info_subgoals =
{ihsg: type_info_subgoals_hyp;
isgintro : string}
;;
let rec show_goal lh ig g gs =
match ig with
"intros" ->
if lh = []
then spe
else show_goal lh "standard" g gs
|"standard" ->
(match (sort_of_type g gs) with
Nprop -> donnons_une_proposition ()
| Nformula -> montrons g
| Ntype -> calculons_un_element_de g
| Nfunction ->calculons_une_fonction_de_type g)
| "apply" -> show_goal lh "" g gs
| "simpl" ->en_simplifiant_on_obtient g
| "rewrite" -> on_obtient g
| "equality" -> reste_a_montrer g
| "trivial_equality" -> reste_a_montrer g
| "" -> spe
|_ -> sph[ sps "A faire ..."; spb; spt g; sps ". " ]
;;
let show_goal2 lh {ihsg=hi;isgintro=ig} g gs s =
if ig="" && lh = []
then spe
else sphv [ show_goal lh ig g gs; sps s]
;;
let imaginez_une_preuve_de () = match !natural_language with
French -> "Imaginez une preuve de"
| English -> "Imagine a proof of"
;;
let donnez_un_element_de () = match !natural_language with
French -> "Donnez un element de"
| English -> "Give an element of";;
let intro_not_proved_goal gs =
match gs with
Prop(Null) -> imaginez_une_preuve_de ()
|_ -> donnez_un_element_de ()
;;
let first_name_hyp_of_ntree {t_goal={newhyp=lh}}=
match lh with
{hyp_name=n}::_ -> n
| _ -> assert false
;;
let rec find_type x t=
match (kind_of_term (strip_outer_cast t)) with
Prod(y,ty,t) ->
(match y with
Name y ->
if x=(string_of_id y) then ty
else find_type x t
| _ -> find_type x t)
|_-> assert false
;;
let is_equality e =
let e= (strip_outer_cast e) in
match (kind_of_term e) with
App (f,args) -> (Array.length args) >= 3
| _ -> false
;;
let terms_of_equality e =
let e= (strip_outer_cast e) in
match (kind_of_term e) with
App (f,args) -> (args.(1) , args.(2))
| _ -> assert false
;;
let eq_term = eq_constr;;
let is_equality_tac = function
| TacAtom (_,
(TacExtend
(_,("ERewriteLR"|"ERewriteRL"|"ERewriteLRocc"|"ERewriteRLocc"
|"ERewriteParallel"|"ERewriteNormal"
|"RewriteLR"|"RewriteRL"|"Replace"),_)
| TacReduce _
| TacSymmetry _ | TacReflexivity
| TacExact _ | TacIntroPattern _ | TacIntroMove _ | TacAuto _)) -> true
| _ -> false
let equalities_ntree ig ntree =
let rec equalities_ntree ig ntree =
if not (is_equality (concl ntree))
then []
else
match (proof ntree) with
Notproved -> [(ig,ntree)]
| Proof (tac,ltree) ->
if is_equality_tac tac
then (match ltree with
[] -> [(ig,ntree)]
| t::_ -> let res=(equalities_ntree ig t) in
if eq_term (concl ntree) (concl t)
then res
else (ig,ntree)::res)
else [(ig,ntree)]
in
equalities_ntree ig ntree
;;
let remove_seq_of_terms l =
let rec remove_seq_of_terms l = match l with
a::b::l -> if (eq_term (fst a) (fst b))
then remove_seq_of_terms (b::l)
else a::(remove_seq_of_terms (b::l))
| _ -> l
in remove_seq_of_terms l
;;
let list_to_eq l o=
let switch = fun h h' -> (if o then h else h') in
match l with
[a] -> spt (fst a)
| (a,h)::(b,h')::l ->
let rec list_to_eq h l =
match l with
[] -> []
| (b,h')::l ->
(sph [sps "="; spb; spt b; spb;tag_uselemma (switch h h') spe])
:: (list_to_eq (switch h' h) l)
in sph [spt a; spb;
spv ((sph [sps "="; spb; spt b; spb;
tag_uselemma (switch h h') spe])
::(list_to_eq (switch h' h) l))]
| _ -> assert false
;;
let stde = Global.env;;
let dbize env = Constrintern.interp_constr Evd.empty env;;
let rec natural_ntree ig ntree =
let {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} = ntree in
let leq = List.rev (equalities_ntree ig ntree) in
if List.length leq > 1
(
print_string("Several equalities to treate ...\n");
let l1 = ref [] in
let l2 = ref [] in
List.iter
(fun (_,ntree) ->
let lemma = match (proof ntree) with
Proof (tac,ltree) ->
TODO
(match ltree with
[] ->spe
| [_] -> spe
| _::l -> sphv[sps ": ";
prli (natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="standard"})
l])])
with _ -> sps "simplification" )
| Notproved -> spe
in
let (t1,t2)= terms_of_equality (concl ntree) in
l2:=(t2,lemma)::(!l2);
l1:=(t1,lemma)::(!l1))
leq;
l1:=remove_seq_of_terms !l1;
l2:=remove_seq_of_terms !l2;
l2:=List.rev !l2;
let ltext=ref [] in
if List.length !l1 > 1
then (ltext:=(!ltext)@[list_to_eq !l1 true];
if List.length !l2 > 1 then
(ltext:=(!ltext)@[_et()];
ltext:=(!ltext)@[list_to_eq !l2 false]))
else if List.length !l2 > 1 then ltext:=(!ltext)@[list_to_eq !l2 false];
if !ltext<>[] then ltext:=[sps (bon_a ()); spv !ltext];
let (ig,ntree)=(List.hd leq) in
spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g (nsort gf) "");
sph !ltext;
natural_ntree {ihsg=All_subgoals_hyp;
isgintro=
let (t1,t2)= terms_of_equality (concl ntree) in
if eq_term t1 t2
then "trivial_equality"
else "equality"}
ntree]
)
else
let ntext =
let gs=nsort gf in
match p with
Notproved -> spv [ (natural_lhyp lh ig.ihsg);
sph [spi; sps (intro_not_proved_goal gs); spb;
tag_toprove g ]
]
| Proof (TacId _,ltree) -> natural_ntree ig (List.hd ltree)
| Proof (TacAtom (_,tac),ltree) ->
(let ntext =
match tac with
TacIntroPattern _ -> natural_intros ig lh g gs ltree
| TacIntroMove _ -> natural_intros ig lh g gs ltree
| TacFix (_,n) -> natural_fix ig lh g gs n ltree
| TacSplit (_,_,NoBindings) -> natural_split ig lh g gs ge [] ltree
| TacSplit(_,_,ImplicitBindings l) -> natural_split ig lh g gs ge (List.map snd l) ltree
| TacGeneralize l -> natural_generalize ig lh g gs ge l ltree
| TacRight _ -> natural_right ig lh g gs ltree
| TacLeft _ -> natural_left ig lh g gs ltree
natural_reduce ig lh g gs ge r cl ltree
| TacExtend (_,"InfoAuto",[]) -> natural_infoauto ig lh g gs ltree
| TacAuto _ -> natural_auto ig lh g gs ltree
| TacExtend (_,"EAuto",_) -> natural_auto ig lh g gs ltree
| TacTrivial _ -> natural_trivial ig lh g gs ltree
| TacAssumption -> natural_trivial ig lh g gs ltree
| TacClear _ -> natural_clear ig lh g gs ltree
| TacSimpleInduction (NamedHyp id) ->
natural_induction ig lh g gs ge id ltree false
| TacExtend (_,"InductionIntro",[a]) ->
let id=(out_gen wit_ident a) in
natural_induction ig lh g gs ge id ltree true
| TacApply (_,false,(c,_)) -> natural_apply ig lh g gs (snd c) ltree
| TacExact c -> natural_exact ig lh g gs (snd c) ltree
| TacCut c -> natural_cut ig lh g gs (snd c) ltree
| TacExtend (_,"CutIntro",[a]) ->
let _c = out_gen wit_constr a in
natural_cutintro ig lh g gs a ltree
| TacCase (_,(c,_)) -> natural_case ig lh g gs ge (snd c) ltree false
| TacExtend (_,"CaseIntro",[a]) ->
let c = out_gen wit_constr a in
natural_case ig lh g gs ge c ltree true
| TacElim (_,(c,_),_) ->
natural_elim ig lh g gs ge (snd c) ltree false
| TacExtend (_,"ElimIntro",[a]) ->
let c = out_gen wit_constr a in
natural_elim ig lh g gs ge c ltree true
| TacExtend (_,"Rewrite",[_;a]) ->
let (c,_) = out_gen wit_constr_with_bindings a in
natural_rewrite ig lh g gs c ltree
| TacExtend (_,"ERewriteRL",[a]) ->
TODO
natural_rewrite ig lh g gs c ltree
| TacExtend (_,"ERewriteLR",[a]) ->
TODO
natural_rewrite ig lh g gs c ltree
|_ -> natural_generic ig lh g gs (sps (name_tactic tac)) (prl sp_tac [tac]) ltree
in
)
| Proof _ -> failwith "Don't know what to do with that"
in
if info<>"not_proved"
then spshrink info ntext
else ntext
and natural_generic ig lh g gs tactic tac ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
on_applique_la_tactique tactic tac ;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="standard"})
ltree)
]
and natural_clear ig lh g gs ltree = natural_ntree ig (List.hd ltree)
spv
[ ( natural_lhyp lh ig.ihsg ) ;
( show_goal2 lh ig g gs " " ) ;
( prl ( natural_ntree ig ) ltree )
]
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree ig) ltree)
]
*)
and natural_intros ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="intros"})
ltree)
]
and natural_apply ig lh g gs arg ltree =
let lg = List.map concl ltree in
match lg with
[] ->
spv
[ (natural_lhyp lh ig.ihsg);
de_A_il_vient_B arg g
]
| [sg]->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh
{ihsg=ig.ihsg; isgintro= if ig.isgintro<>"apply"
then "standard"
else ""}
g gs "");
grace_a_A_il_suffit_de_montrer_LA arg [spt sg];
sph [spi ; natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} (List.hd ltree)]
]
| _ ->
let ln = List.map (fun _ -> new_name()) lg in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh
{ihsg=ig.ihsg; isgintro= if ig.isgintro<>"apply"
then "standard"
else ""}
g gs "");
grace_a_A_il_suffit_de_montrer_LA arg
(List.map2 (fun g n -> sph [sps ("("^n^")"); spb; spt g])
lg ln);
sph [spi; spv (List.map2
(fun x n -> sph [sps ("("^n^"):"); spb;
natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} x])
ltree ln)]
]
and natural_rem_goals ltree =
let lg = List.map concl ltree in
match lg with
[] -> spe
| [sg]->
spv
[ reste_a_montrer_LA [spt sg];
sph [spi ; natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} (List.hd ltree)]
]
| _ ->
let ln = List.map (fun _ -> new_name()) lg in
spv
[ reste_a_montrer_LA
(List.map2 (fun g n -> sph [sps ("("^n^")"); spb; spt g])
lg ln);
sph [spi; spv (List.map2
(fun x n -> sph [sps ("("^n^"):"); spb;
natural_ntree
{ihsg=All_subgoals_hyp;
isgintro="apply"} x])
ltree ln)]
]
and natural_exact ig lh g gs arg ltree =
spv
[
(natural_lhyp lh ig.ihsg);
(let {ihsg=pi;isgintro=ig}= ig in
(show_goal2 lh {ihsg=pi;isgintro=""}
g gs ""));
(match gs with
Prop(Null) -> _A_est_immediat_par_B g arg
|_ -> le_resultat_est arg)
]
and natural_cut ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
(List.rev ltree));
de_A_on_deduit_donc_B arg g
]
and natural_cutintro ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
sph [spi;
(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""}
(List.nth ltree 1))];
sph [spi;
(natural_ntree
{ihsg=No_subgoals_hyp;isgintro=""}
(List.nth ltree 0))]
]
and whd_betadeltaiota x = whd_betaiotaevar (Global.env()) Evd.empty x
and type_of_ast s c = type_of (Global.env()) Evd.empty (constr_of_ast c)
and prod_head t =
match (kind_of_term (strip_outer_cast t)) with
Prod(_,_,c) -> prod_head c
| _ -> t
and string_of_sp sp = string_of_id (basename sp)
and constr_of_mind mip i =
(string_of_id mip.mind_consnames.(i-1))
and arity_of_constr_of_mind env indf i =
(get_constructors env indf).(i-1).cs_nargs
and natural_case ig lh g gs ge arg1 ltree with_intros =
let env= (gLOB ge) in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let ti =(string_of_id mip.mind_typename) in
if ncti<>1
Zéro ou Plusieurs constructeurs
then (
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(match (nsort targ1) with
Prop(Null) ->
(match ti with
"or" -> discutons_avec_A type_arg
| _ -> utilisons_A arg1)
|_ -> selon_les_valeurs_de_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=if with_intros
then (arity_of_constr_of_mind env indf !ci)
else 0 in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Case_prop_subgoals_hyp (supposons (),arg1,ici,aci,
(List.length ltree))
|_-> Case_subgoals_hyp ("",arg1,nci,aci,
(List.length ltree)));
isgintro= if with_intros then "" else "standard"}
treearg)
])
(nrem ltree ((List.length ltree)- ncti))));
(sph [spi; (natural_rem_goals
(nhd ltree ((List.length ltree)- ncti)))])
] )
Cas d'un seul constructeur
else (
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
de_A_on_a arg1;
(let treearg=List.hd ltree in
let nci=(constr_of_mind mip 1) in
let aci=
if with_intros
then (arity_of_constr_of_mind env indf 1)
else 0 in
let _ici= 1 in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Case_prop_subgoals_hyp ("",arg1,1,aci,
(List.length ltree))
|_-> Case_subgoals_hyp ("",arg1,nci,aci,
(List.length ltree)));
isgintro=""}
treearg)
]);
(sph [spi; (natural_rem_goals
(nhd ltree ((List.length ltree)- 1)))])
]
)
with _ ->natural_generic ig lh g gs ( sps " Case " ) ( spt arg1 ) ltree
and prod_list_var t =
match (kind_of_term (strip_outer_cast t)) with
Prod(_,t,c) -> t::(prod_list_var c)
|_ -> []
and hd_is_mind t ti =
try (let env = Global.env() in
let IndType (indf,targ) = find_rectype env Evd.empty t in
let _ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
(string_of_id mip.mind_typename) = ti)
with _ -> false
and mind_ind_info_hyp_constr indf c =
let env = Global.env() in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _p = mib.mind_nparams in
let a = arity_of_constr_of_mind env indf c in
let lp=ref (get_constructors env indf).(c).cs_args in
let lr=ref [] in
let ti = (string_of_id mip.mind_typename) in
for i=1 to a do
match !lp with
((_,_,t)::lp1)->
if hd_is_mind t ti
then (lr:=(!lr)@["argrec";"hyprec"]; lp:=List.tl lp1)
else (lr:=(!lr)@["arg"];lp:=lp1)
| _ -> raise (Failure "mind_ind_info_hyp_constr")
done;
!lr
mind_ind_info_hyp_constr " le " 2 ; ;
donne [ " arg " ; " argrec " ]
mind_ind_info_hyp_constr " le " 1 ; ;
donne [ ]
mind_ind_info_hyp_constr " nat " 2 ; ;
donne [ " argrec " ]
mind_ind_info_hyp_constr "le" 2;;
donne ["arg"; "argrec"]
mind_ind_info_hyp_constr "le" 1;;
donne []
mind_ind_info_hyp_constr "nat" 2;;
donne ["argrec"]
*)
and natural_elim ig lh g gs ge arg1 ltree with_intros=
let env= (gLOB ge) in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _ti =(string_of_id mip.mind_typename) in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(match (nsort targ1) with
Prop(Null) -> utilisons_A arg1
|_ ->procedons_par_recurrence_sur_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=(arity_of_constr_of_mind env indf !ci) in
let hci=
if with_intros
then mind_ind_info_hyp_constr indf !ci
else [] in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Elim_prop_subgoals_hyp (arg1,ici,aci,hci,
(List.length ltree))
|_-> Elim_subgoals_hyp (arg1,nci,aci,hci,
(List.length ltree)));
isgintro= ""}
treearg)
])
(nhd ltree ncti)));
(sph [spi; (natural_rem_goals (nrem ltree ncti))])
]
)
with _ ->natural_generic ig lh g gs ( sps " Elim " ) ( spt arg1 ) ltree
with _ ->natural_generic ig lh g gs (sps "Elim") (spt arg1) ltree *)
InductionIntro n
InductionIntro n
*)
and natural_induction ig lh g gs ge arg2 ltree with_intros=
let env = (gLOB (g_env (List.hd ltree))) in
let arg1= mkVar arg2 in
let targ1 = prod_head (type_of env Evd.empty arg1) in
let IndType (indf,targ) = find_rectype env Evd.empty targ1 in
let _ncti= Array.length(get_constructors env indf) in
let (ind,_) = dest_ind_family indf in
let (mib,mip) = lookup_mind_specif env ind in
let _ti =(string_of_id mip.mind_typename) in
let ltree = List.map
(fun {t_info=info;
t_goal={newhyp=lh;t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p} ->
{t_info=info;
t_goal={newhyp=(nrem lh (List.length lh1));
t_concl=g;t_full_concl=gf;t_full_env=ge};
t_proof=p}) ltree in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(natural_lhyp lh1 All_subgoals_hyp);
(match (print_string "targ1------------\n";(nsort targ1)) with
Prop(Null) -> utilisons_A arg1
|_ -> procedons_par_recurrence_sur_A arg1);
(let ci=ref 0 in
(prli
(fun treearg -> ci:=!ci+1;
let nci=(constr_of_mind mip !ci) in
let aci=(arity_of_constr_of_mind env indf !ci) in
let hci=
if with_intros
then mind_ind_info_hyp_constr indf !ci
else [] in
let ici= (!ci) in
sph[ (natural_ntree
{ihsg=
(match (nsort targ1) with
Prop(Null) ->
Elim_prop_subgoals_hyp (arg1,ici,aci,hci,
(List.length ltree))
|_-> Elim_subgoals_hyp (arg1,nci,aci,hci,
(List.length ltree)));
isgintro= "standard"}
treearg)
])
ltree))
]
and natural_fix ig lh g gs narg ltree =
let {t_info=info;
t_goal={newhyp=lh1;t_concl=g1;t_full_concl=gf1;
t_full_env=ge1};t_proof=p1}=(List.hd ltree) in
match lh1 with
{hyp_name=nfun;hyp_type=tfun}::lh2 ->
let ltree=[{t_info=info;
t_goal={newhyp=lh2;t_concl=g1;t_full_concl=gf1;
t_full_env=ge1};
t_proof=p1}] in
spv
[ (natural_lhyp lh ig.ihsg);
calculons_la_fonction_F_de_type_T_par_recurrence_sur_son_argument_A nfun tfun narg;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_reduce ig lh g gs ge mode la ltree =
match la with
{onhyps=Some[]} when la.concl_occs <> no_occurrences_expr ->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro="simpl"})
ltree)
]
| {onhyps=Some[hyp]} when la.concl_occs = no_occurrences_expr ->
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prl (natural_ntree
{ihsg=Reduce_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_split ig lh g gs ge la ltree =
match la with
[arg] ->
let _env= (gLOB ge) in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
pour_montrer_G_la_valeur_recherchee_est_A g arg1;
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)
]
| [] ->
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)
]
| _ -> assert false
and natural_generalize ig lh g gs ge la ltree =
match la with
[(_,(_,arg)),_] ->
let _env= (gLOB ge) in
let _type_arg=type_of (Global.env()) Evd.empty arg in
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
on_se_sert_de_A arg1;
(prl (natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""})
ltree)
]
| _ -> assert false
and natural_right ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree);
d_ou_A g
]
and natural_left ig lh g gs ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree);
d_ou_A g
]
and natural_auto ig lh g gs ltree =
match ig.isgintro with
"trivial_equality" -> spe
| _ ->
if ltree=[]
then sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
coq_le_demontre_seul ()]
else spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
(prli (natural_ntree {ihsg=All_subgoals_hyp;isgintro=""}
)
ltree)]
and natural_infoauto ig lh g gs ltree =
match ig.isgintro with
"trivial_equality" ->
spshrink "trivial_equality"
(natural_ntree {ihsg=All_subgoals_hyp;isgintro="standard"}
(List.hd ltree))
| _ -> sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
coq_le_demontre_seul ();
spshrink "auto"
(sph [spi;
(natural_ntree
{ihsg=All_subgoals_hyp;isgintro=""}
(List.hd ltree))])]
and natural_trivial ig lh g gs ltree =
if ltree=[]
then sphv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
ce_qui_est_trivial () ]
else spv [(natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs ". ");
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="standard"})
ltree)]
and natural_rewrite ig lh g gs arg ltree =
spv
[ (natural_lhyp lh ig.ihsg);
(show_goal2 lh ig g gs "");
en_utilisant_l_egalite_A arg;
(prli(natural_ntree
{ihsg=All_subgoals_hyp;isgintro="rewrite"})
ltree)
]
;;
let natural_ntree_path ig g =
Random.init(0);
natural_ntree ig g
;;
let show_proof lang gpath =
(match lang with
"fr" -> natural_language:=French
|"en" -> natural_language:=English
| _ -> natural_language:=English);
path:=List.rev gpath;
name_count:=0;
let ntree=(get_nproof ()) in
let {t_info=i;t_goal=g;t_proof=p} =ntree in
root_of_text_proof
(sph [(natural_ntree_path {ihsg=All_subgoals_hyp;
isgintro="standard"}
{t_info="not_proved";t_goal=g;t_proof=p});
spr])
;;
let show_nproof path =
pp (sp_print (sph [spi; show_proof "fr" path]));;
vinterp_add "ShowNaturalProof"
(fun _ ->
(fun () ->show_nproof[];()));;
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
debug :
PATH=/usr / local / bin:/usr / bin:$PATH
COQTOP = d:/Tools / coq-7avril
CAMLLIB=/usr / local / lib / ocaml
CAMLP4LIB=/usr / local / lib / camlp4
export CAMLLIB
export COQTOP
export CAMLP4LIB
cd d:/Tools / pcoq / src / text
d:/Tools / coq-7avril / bin / coqtop.byte.exe -I /cygdrive / D / Tools / pcoq / src / abs_syntax -I /cygdrive / D / Tools / pcoq / src / text -I /cygdrive / D / Tools / pcoq / src / coq -I /cygdrive / D / Tools / pcoq / src / pbp -I /cygdrive / D / Tools / pcoq / src / dad -I /cygdrive / D / Tools / pcoq / src / history
l1 : ( A , B : Prop ) A \/ B - > B - > A.
Intros .
.
Qed .
Drop .
# use " /cygdrive / D / Tools / coq-7avril / dev / base_include " ; ;
# load " xlate.cmo " ; ;
# load " translate.cmo " ; ;
# load " showproof_ct.cmo " ; ;
# load " showproof.cmo " ; ;
# load " pbp.cmo " ; ;
# load " debug_tac.cmo " ; ;
# load " name_to_ast.cmo " ; ;
# load " paths.cmo " ; ;
# load " dad.cmo " ; ;
# load " vtp.cmo " ; ;
# load " history.cmo " ; ;
# load " centaur.cmo " ; ;
Xlate.set_xlate_mut_stuff Centaur.globcv ; ;
Xlate.declare_in_coq ( ) ; ;
# use " showproof.ml " ; ;
let pproof x = pP ( sp_print x ) ; ;
Pp_control.set_depth_boxes 100 ; ;
# install_printer pproof ; ;
ep ( ) ; ;
let ( constr_of_string " O " ) ; ;
# trace to_nproof ; ;
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
debug sous cygwin:
PATH=/usr/local/bin:/usr/bin:$PATH
COQTOP=d:/Tools/coq-7avril
CAMLLIB=/usr/local/lib/ocaml
CAMLP4LIB=/usr/local/lib/camlp4
export CAMLLIB
export COQTOP
export CAMLP4LIB
cd d:/Tools/pcoq/src/text
d:/Tools/coq-7avril/bin/coqtop.byte.exe -I /cygdrive/D/Tools/pcoq/src/abs_syntax -I /cygdrive/D/Tools/pcoq/src/text -I /cygdrive/D/Tools/pcoq/src/coq -I /cygdrive/D/Tools/pcoq/src/pbp -I /cygdrive/D/Tools/pcoq/src/dad -I /cygdrive/D/Tools/pcoq/src/history
Lemma l1: (A, B : Prop) A \/ B -> B -> A.
Intros.
Elim H.
Auto.
Qed.
Drop.
#use "/cygdrive/D/Tools/coq-7avril/dev/base_include";;
#load "xlate.cmo";;
#load "translate.cmo";;
#load "showproof_ct.cmo";;
#load "showproof.cmo";;
#load "pbp.cmo";;
#load "debug_tac.cmo";;
#load "name_to_ast.cmo";;
#load "paths.cmo";;
#load "dad.cmo";;
#load "vtp.cmo";;
#load "history.cmo";;
#load "centaur.cmo";;
Xlate.set_xlate_mut_stuff Centaur.globcv;;
Xlate.declare_in_coq();;
#use "showproof.ml";;
let pproof x = pP (sp_print x);;
Pp_control.set_depth_boxes 100;;
#install_printer pproof;;
ep();;
let bidon = ref (constr_of_string "O");;
#trace to_nproof;;
***********************************************************************)
let ep()=show_proof "fr" [];;
|
4bfec827444f939f677d7a1b07b06bfbb536be13df4b608635d77017c134c76f | rvantonder/hack_parallel | daemon.mli | *
* Copyright ( c ) 2015 , Facebook , Inc.
* All rights reserved .
*
* This source code is licensed under the BSD - style license found in the
* LICENSE file in the root directory of this source tree . An additional grant
* of patent rights can be found in the PATENTS file in the same directory .
*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*)
* Type - safe versions of the channels in Pervasives .
type 'a in_channel
type 'a out_channel
type ('in_, 'out) channel_pair = 'in_ in_channel * 'out out_channel
val to_channel :
'a out_channel -> ?flags:Marshal.extern_flags list -> ?flush:bool ->
'a -> unit
val from_channel : ?timeout:Timeout.t -> 'a in_channel -> 'a
val flush : 'a out_channel -> unit
(* This breaks the type safety, but is necessary in order to allow select() *)
val descr_of_in_channel : 'a in_channel -> Unix.file_descr
val descr_of_out_channel : 'a out_channel -> Unix.file_descr
val cast_in : 'a in_channel -> Timeout.in_channel
val cast_out : 'a out_channel -> Pervasives.out_channel
val close_out : 'a out_channel -> unit
val output_string : 'a out_channel -> string -> unit
val close_in : 'a in_channel -> unit
val input_char : 'a in_channel -> char
val input_value : 'a in_channel -> 'b
(** Spawning new process *)
In the absence of ' fork ' on Windows , its usage must be restricted
to Unix specifics parts .
This module provides a mechanism to " spawn " new instance of the
current program , but with a custom entry point ( e.g. Slaves ,
DfindServer , ... ) . Then , alternate entry points should not depend
on global references that may not have been ( re)initialised in the
new process .
All required data must be passed through the typed channels .
associated to the spawned process .
to Unix specifics parts.
This module provides a mechanism to "spawn" new instance of the
current program, but with a custom entry point (e.g. Slaves,
DfindServer, ...). Then, alternate entry points should not depend
on global references that may not have been (re)initialised in the
new process.
All required data must be passed through the typed channels.
associated to the spawned process.
*)
(* Alternate entry points *)
type ('param, 'input, 'output) entry
Alternate entry points must be registered at toplevel , i.e.
every call to ` Daemon.register_entry_point ` must have been
evaluated when ` Daemon.check_entry_point ` is called at the
beginning of ` ServerMain.start ` .
every call to `Daemon.register_entry_point` must have been
evaluated when `Daemon.check_entry_point` is called at the
beginning of `ServerMain.start`. *)
val register_entry_point :
string -> ('param -> ('input, 'output) channel_pair -> unit) ->
('param, 'input, 'output) entry
(* Handler upon spawn and forked process. *)
type ('in_, 'out) handle = {
channels : ('in_, 'out) channel_pair;
pid : int;
}
(* for unit tests *)
val devnull : unit -> ('a, 'b) handle
val fd_of_path : string -> Unix.file_descr
val null_fd : unit -> Unix.file_descr
(* Fork and run a function that communicates via the typed channels *)
val fork :
?channel_mode:[ `pipe | `socket ] ->
(* Where the daemon's output should go *)
(Unix.file_descr * Unix.file_descr) ->
('param -> ('input, 'output) channel_pair -> unit) -> 'param ->
('output, 'input) handle
(* Spawn a new instance of the current process, and execute the
alternate entry point. *)
val spawn :
?channel_mode:[ `pipe | `socket ] ->
(* Where the daemon's input and output should go *)
(Unix.file_descr * Unix.file_descr * Unix.file_descr) ->
('param, 'input, 'output) entry -> 'param -> ('output, 'input) handle
(* Close the typed channels associated to a 'spawned' child. *)
val close : ('a, 'b) handle -> unit
(* Kill a 'spawned' child and close the associated typed channels. *)
val kill : ('a, 'b) handle -> unit
(* Main function, that execute a alternate entry point.
It should be called only once. Just before the main entry point.
This function does not return when a custom entry point is selected. *)
val check_entry_point : unit -> unit
| null | https://raw.githubusercontent.com/rvantonder/hack_parallel/c9d0714785adc100345835c1989f7c657e01f629/src/utils/daemon.mli | ocaml | This breaks the type safety, but is necessary in order to allow select()
* Spawning new process
Alternate entry points
Handler upon spawn and forked process.
for unit tests
Fork and run a function that communicates via the typed channels
Where the daemon's output should go
Spawn a new instance of the current process, and execute the
alternate entry point.
Where the daemon's input and output should go
Close the typed channels associated to a 'spawned' child.
Kill a 'spawned' child and close the associated typed channels.
Main function, that execute a alternate entry point.
It should be called only once. Just before the main entry point.
This function does not return when a custom entry point is selected. | *
* Copyright ( c ) 2015 , Facebook , Inc.
* All rights reserved .
*
* This source code is licensed under the BSD - style license found in the
* LICENSE file in the root directory of this source tree . An additional grant
* of patent rights can be found in the PATENTS file in the same directory .
*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*)
* Type - safe versions of the channels in Pervasives .
type 'a in_channel
type 'a out_channel
type ('in_, 'out) channel_pair = 'in_ in_channel * 'out out_channel
val to_channel :
'a out_channel -> ?flags:Marshal.extern_flags list -> ?flush:bool ->
'a -> unit
val from_channel : ?timeout:Timeout.t -> 'a in_channel -> 'a
val flush : 'a out_channel -> unit
val descr_of_in_channel : 'a in_channel -> Unix.file_descr
val descr_of_out_channel : 'a out_channel -> Unix.file_descr
val cast_in : 'a in_channel -> Timeout.in_channel
val cast_out : 'a out_channel -> Pervasives.out_channel
val close_out : 'a out_channel -> unit
val output_string : 'a out_channel -> string -> unit
val close_in : 'a in_channel -> unit
val input_char : 'a in_channel -> char
val input_value : 'a in_channel -> 'b
In the absence of ' fork ' on Windows , its usage must be restricted
to Unix specifics parts .
This module provides a mechanism to " spawn " new instance of the
current program , but with a custom entry point ( e.g. Slaves ,
DfindServer , ... ) . Then , alternate entry points should not depend
on global references that may not have been ( re)initialised in the
new process .
All required data must be passed through the typed channels .
associated to the spawned process .
to Unix specifics parts.
This module provides a mechanism to "spawn" new instance of the
current program, but with a custom entry point (e.g. Slaves,
DfindServer, ...). Then, alternate entry points should not depend
on global references that may not have been (re)initialised in the
new process.
All required data must be passed through the typed channels.
associated to the spawned process.
*)
type ('param, 'input, 'output) entry
Alternate entry points must be registered at toplevel , i.e.
every call to ` Daemon.register_entry_point ` must have been
evaluated when ` Daemon.check_entry_point ` is called at the
beginning of ` ServerMain.start ` .
every call to `Daemon.register_entry_point` must have been
evaluated when `Daemon.check_entry_point` is called at the
beginning of `ServerMain.start`. *)
val register_entry_point :
string -> ('param -> ('input, 'output) channel_pair -> unit) ->
('param, 'input, 'output) entry
type ('in_, 'out) handle = {
channels : ('in_, 'out) channel_pair;
pid : int;
}
val devnull : unit -> ('a, 'b) handle
val fd_of_path : string -> Unix.file_descr
val null_fd : unit -> Unix.file_descr
val fork :
?channel_mode:[ `pipe | `socket ] ->
(Unix.file_descr * Unix.file_descr) ->
('param -> ('input, 'output) channel_pair -> unit) -> 'param ->
('output, 'input) handle
val spawn :
?channel_mode:[ `pipe | `socket ] ->
(Unix.file_descr * Unix.file_descr * Unix.file_descr) ->
('param, 'input, 'output) entry -> 'param -> ('output, 'input) handle
val close : ('a, 'b) handle -> unit
val kill : ('a, 'b) handle -> unit
val check_entry_point : unit -> unit
|
2d1e98e3b11ff8cbb14cd6dbe2371f8edf597829fbe05962a4b21d5d9cc9fcee | michalrus/intero-nix-shim | Main.hs | module InteroNixShim.Main where
import Control.Monad (when)
import Data.Foldable (find, traverse_)
import Data.List (stripPrefix)
import qualified Data.List.Split as S
import Data.Maybe (catMaybes, fromMaybe, maybe)
import Data.Semigroup ((<>))
import Options.Applicative
import System.Directory
import System.Environment (getExecutablePath)
import System.FilePath
import qualified System.Posix.Escape as Posix
import System.Posix.Process (executeFile)
foreign import ccall "silence_stderr" silenceStderr :: IO ()
data Command
= Ghci GhciOpts
| Exec [String]
| Path
| IdeTargets
deriving (Show)
data GhciOpts = GhciOpts
{ withGhc :: Maybe String
, ghcOptions :: [String]
, targets :: [String]
} deriving (Show)
main :: IO ()
main = run =<< execParser (info (parse <**> helper) fullDesc)
run :: Command -> IO ()
run (Exec cmd) = do
intero <- findInteroExec
let absCmd =
case cmd of
"intero":t -> intero : t
xs -> xs
when
(cmd == ["intero", "--version"])
silenceStderr -- -nix-shim/issues/1
nixExec absCmd
run (Ghci opt) = do
cabal <- findCabalExec
intero <- findInteroExec
let ghcSubst =
maybe
[]
(\p ->
[ "--with-ghc"
, if p == "intero"
then intero
else p
])
(withGhc opt)
let ghcOpts = (\o -> ["--ghc-options", o]) =<< ghcOptions opt
Workaround for in Cabal 2.×
(projectName, availableTargets) <- ideTargets'
let libTarget = "lib:" ++ projectName
defaultTargets =
if libTarget `elem` availableTargets
then [libTarget]
else take 1 availableTargets
targets' =
case targets opt of
[] -> defaultTargets
[t]
By default intero specifies just the package name as the target ;
-- stack handles this by loading the library and all executables, excluding
tests and benchmarks . cabal repl ca n't handle multiple targets , so we
-- can't do much better than just using the default target.
->
if t == projectName
then defaultTargets
Strip project name prefix from stack target before using as cabal component
else [fromMaybe t $ stripPrefix (projectName ++ ":") t]
_:_:_ ->
error
"intero does not support using multiple targets at once \
\when using intero-nix-shim instead of stack"
Important : do NOT pass ` --verbose=0 ` to ` cabal repl ` or users ’ errors wo n’t be shown in Flycheck .
nixExec $ [cabal, "repl"] ++ ghcSubst ++ ghcOpts ++ targets'
run Path = putStrLn =<< rootDir
run IdeTargets = do
(name, targets') <- ideTargets'
let mix = (++) (name ++ ":") <$> targets'
traverse_ putStrLn mix
nixExec :: [String] -> IO ()
nixExec cmd = do
setCurrentDirectory =<< rootDir
executeFile
"nix-shell"
True
[ "--pure"
, "--no-build-output"
, "--quiet"
, "--run"
, "exec " ++ Posix.escapeMany cmd
]
Nothing
findCabalExec :: IO FilePath
findCabalExec = findInLibExec "cabal"
findInteroExec :: IO FilePath
findInteroExec = findInLibExec "intero"
findInLibExec :: String -> IO FilePath
findInLibExec name = do
me <- canonicalizePath =<< getExecutablePath
libexec <- canonicalizePath $ takeDirectory me </> ".." </> "libexec"
x <- findExecutablesInDirectories [libexec] name
case x of
res:_ -> return res
_ -> error $ "No ‘" ++ name ++ "’ found in ‘" ++ libexec ++ "’."
rootDir :: IO FilePath
rootDir = takeDirectory <$> cabalFile
cabalFile :: IO FilePath
cabalFile = do
searchDirs <- ancestors <$> getCurrentDirectory
results <- catMaybes <$> traverse findCabal searchDirs -- FIXME: suboptimal…
case results of
cabal:_ -> return cabal
_ -> error "No *.cabal file found."
where
ancestors d = d : iterateUntilRepeated takeDirectory d
findCabal :: FilePath -> IO (Maybe FilePath)
findCabal dir = do
mf <-
find
(\f -> takeExtension f == ".cabal" && (not . null $ takeBaseName f)) <$>
listDirectory dir
return $ combine dir <$> mf
iterateUntilRepeated :: Eq a => (a -> a) -> a -> [a]
iterateUntilRepeated f a0 = reverse $ loop a0 []
where
loop an acc =
let an1 = f an
in if an == an1
then acc
else loop an1 (an1 : acc)
ideTargets' :: IO (String, [String])
ideTargets' = ideTargets <$> (readFile =<< cabalFile)
FIXME : yaml / regex / attoparsec ?
ideTargets :: String -> (String, [String])
ideTargets cabal =
let lns = lines cabal
splits = S.split (S.condense . S.dropDelims $ S.oneOf " :") <$> lns
kvs =
splits >>= \case
k:v:_ -> [(k, v)]
_ -> []
name = fromMaybe "_" $ snd <$> find (\(k, _) -> k == "name") kvs
lib = ["lib:" ++ name | "library" `elem` lns]
tpe s l = (++) (s ++ ":") . snd <$> filter (\(k, _) -> k == l) kvs
exe = tpe "exe" "executable"
test = tpe "test" "test-suite"
in (name, lib ++ exe ++ test)
parse :: Parser Command
parse =
hsubparser
(command
"ghci"
(info
(Ghci <$>
(GhciOpts <$> optional (strOption (long "with-ghc")) <*>
((++) <$> many (strOption (long "ghci-options")) <*>
many (strOption (long "ghc-options"))) <*
optional (stringOption (long "docker-run-args")) <*
optional (switch (long "no-build")) <*
optional (switch (long "no-load")) <*
verbosity <*>
many (argument str (metavar "TARGET…"))))
fullDesc) <>
command
"exec"
(info
(Exec <$ verbosity <*> some (argument str (metavar "CMD…")))
fullDesc) <>
command
"path"
(info (Path <$ flag' () (long "project-root") <* verbosity) fullDesc) <>
command
"ghc"
(info
(Exec <$> ((:) "ghc" <$> many (argument str (metavar "ARG…"))) <*
verbosity)
fullDesc) <>
command
"ide"
(info
(hsubparser
(command "targets" (info (IdeTargets <$ verbosity) fullDesc)))
fullDesc) <>
command
"hoogle"
(info
(Exec <$>
((:) "hoogle" <$ verbosity <* optional (switch (long "no-setup")) <*>
((\xs ->
if null xs
then ["--help"]
else xs) <$>
many (argument str (metavar "ARG…")))))
fullDesc))
where
verbosity = optional (stringOption (long "verbosity"))
used to fix an ambiguous IsString type variable for optparse - applicative > = 0.14
stringOption :: Mod OptionFields String -> Parser String
stringOption = strOption
| null | https://raw.githubusercontent.com/michalrus/intero-nix-shim/59776d8b39eba7473bc3424eff5e391728911879/src/InteroNixShim/Main.hs | haskell | -nix-shim/issues/1
stack handles this by loading the library and all executables, excluding
can't do much better than just using the default target.
verbose=0 ` to ` cabal repl ` or users ’ errors wo n’t be shown in Flycheck .
FIXME: suboptimal… | module InteroNixShim.Main where
import Control.Monad (when)
import Data.Foldable (find, traverse_)
import Data.List (stripPrefix)
import qualified Data.List.Split as S
import Data.Maybe (catMaybes, fromMaybe, maybe)
import Data.Semigroup ((<>))
import Options.Applicative
import System.Directory
import System.Environment (getExecutablePath)
import System.FilePath
import qualified System.Posix.Escape as Posix
import System.Posix.Process (executeFile)
foreign import ccall "silence_stderr" silenceStderr :: IO ()
data Command
= Ghci GhciOpts
| Exec [String]
| Path
| IdeTargets
deriving (Show)
data GhciOpts = GhciOpts
{ withGhc :: Maybe String
, ghcOptions :: [String]
, targets :: [String]
} deriving (Show)
main :: IO ()
main = run =<< execParser (info (parse <**> helper) fullDesc)
run :: Command -> IO ()
run (Exec cmd) = do
intero <- findInteroExec
let absCmd =
case cmd of
"intero":t -> intero : t
xs -> xs
when
(cmd == ["intero", "--version"])
nixExec absCmd
run (Ghci opt) = do
cabal <- findCabalExec
intero <- findInteroExec
let ghcSubst =
maybe
[]
(\p ->
[ "--with-ghc"
, if p == "intero"
then intero
else p
])
(withGhc opt)
let ghcOpts = (\o -> ["--ghc-options", o]) =<< ghcOptions opt
Workaround for in Cabal 2.×
(projectName, availableTargets) <- ideTargets'
let libTarget = "lib:" ++ projectName
defaultTargets =
if libTarget `elem` availableTargets
then [libTarget]
else take 1 availableTargets
targets' =
case targets opt of
[] -> defaultTargets
[t]
By default intero specifies just the package name as the target ;
tests and benchmarks . cabal repl ca n't handle multiple targets , so we
->
if t == projectName
then defaultTargets
Strip project name prefix from stack target before using as cabal component
else [fromMaybe t $ stripPrefix (projectName ++ ":") t]
_:_:_ ->
error
"intero does not support using multiple targets at once \
\when using intero-nix-shim instead of stack"
nixExec $ [cabal, "repl"] ++ ghcSubst ++ ghcOpts ++ targets'
run Path = putStrLn =<< rootDir
run IdeTargets = do
(name, targets') <- ideTargets'
let mix = (++) (name ++ ":") <$> targets'
traverse_ putStrLn mix
nixExec :: [String] -> IO ()
nixExec cmd = do
setCurrentDirectory =<< rootDir
executeFile
"nix-shell"
True
[ "--pure"
, "--no-build-output"
, "--quiet"
, "--run"
, "exec " ++ Posix.escapeMany cmd
]
Nothing
findCabalExec :: IO FilePath
findCabalExec = findInLibExec "cabal"
findInteroExec :: IO FilePath
findInteroExec = findInLibExec "intero"
findInLibExec :: String -> IO FilePath
findInLibExec name = do
me <- canonicalizePath =<< getExecutablePath
libexec <- canonicalizePath $ takeDirectory me </> ".." </> "libexec"
x <- findExecutablesInDirectories [libexec] name
case x of
res:_ -> return res
_ -> error $ "No ‘" ++ name ++ "’ found in ‘" ++ libexec ++ "’."
rootDir :: IO FilePath
rootDir = takeDirectory <$> cabalFile
cabalFile :: IO FilePath
cabalFile = do
searchDirs <- ancestors <$> getCurrentDirectory
case results of
cabal:_ -> return cabal
_ -> error "No *.cabal file found."
where
ancestors d = d : iterateUntilRepeated takeDirectory d
findCabal :: FilePath -> IO (Maybe FilePath)
findCabal dir = do
mf <-
find
(\f -> takeExtension f == ".cabal" && (not . null $ takeBaseName f)) <$>
listDirectory dir
return $ combine dir <$> mf
iterateUntilRepeated :: Eq a => (a -> a) -> a -> [a]
iterateUntilRepeated f a0 = reverse $ loop a0 []
where
loop an acc =
let an1 = f an
in if an == an1
then acc
else loop an1 (an1 : acc)
ideTargets' :: IO (String, [String])
ideTargets' = ideTargets <$> (readFile =<< cabalFile)
FIXME : yaml / regex / attoparsec ?
ideTargets :: String -> (String, [String])
ideTargets cabal =
let lns = lines cabal
splits = S.split (S.condense . S.dropDelims $ S.oneOf " :") <$> lns
kvs =
splits >>= \case
k:v:_ -> [(k, v)]
_ -> []
name = fromMaybe "_" $ snd <$> find (\(k, _) -> k == "name") kvs
lib = ["lib:" ++ name | "library" `elem` lns]
tpe s l = (++) (s ++ ":") . snd <$> filter (\(k, _) -> k == l) kvs
exe = tpe "exe" "executable"
test = tpe "test" "test-suite"
in (name, lib ++ exe ++ test)
parse :: Parser Command
parse =
hsubparser
(command
"ghci"
(info
(Ghci <$>
(GhciOpts <$> optional (strOption (long "with-ghc")) <*>
((++) <$> many (strOption (long "ghci-options")) <*>
many (strOption (long "ghc-options"))) <*
optional (stringOption (long "docker-run-args")) <*
optional (switch (long "no-build")) <*
optional (switch (long "no-load")) <*
verbosity <*>
many (argument str (metavar "TARGET…"))))
fullDesc) <>
command
"exec"
(info
(Exec <$ verbosity <*> some (argument str (metavar "CMD…")))
fullDesc) <>
command
"path"
(info (Path <$ flag' () (long "project-root") <* verbosity) fullDesc) <>
command
"ghc"
(info
(Exec <$> ((:) "ghc" <$> many (argument str (metavar "ARG…"))) <*
verbosity)
fullDesc) <>
command
"ide"
(info
(hsubparser
(command "targets" (info (IdeTargets <$ verbosity) fullDesc)))
fullDesc) <>
command
"hoogle"
(info
(Exec <$>
((:) "hoogle" <$ verbosity <* optional (switch (long "no-setup")) <*>
((\xs ->
if null xs
then ["--help"]
else xs) <$>
many (argument str (metavar "ARG…")))))
fullDesc))
where
verbosity = optional (stringOption (long "verbosity"))
used to fix an ambiguous IsString type variable for optparse - applicative > = 0.14
stringOption :: Mod OptionFields String -> Parser String
stringOption = strOption
|
9d033c3fcc0c6fd2f7d2ffebc149c81e3b703be5ab36a09f7b0879c2a255aad0 | janestreet/hardcaml_circuits | lfsr.mli | (** Linear feedback shift registers *)
open Base
open! Hardcaml
module Config : sig
type t =
| Galois
| Fibonacci
[@@deriving enumerate, sexp_of]
end
module Op : sig
type t =
| Xor
| Xnor
[@@deriving enumerate, sexp_of]
end
* Create the update logic for a lfsr . Used in conjuction with [ reg_fb ] to construct
a complete [ lfsr ] .
- Shift register sizes can be between 2 and 168 bits .
- [ Galois ] or [ Fibonacci ] forms are supported - prefer [ Galois ] in general as it has a
shorter critical path .
- The basic gate can be [ ] or [ xnor ] . With [ ] the all 0 's state is invalid
while with [ xnor ] the all 1 's state is invalid .
- The lfsrs generated are complete according to xapp052 - this means they will
sequence through all possible states ( except the invalid one ) before repeating .
- All complete lfsr have a counterpart organisation of the taps which leads to
a second ( but still complete ) sequence .
a complete [lfsr].
- Shift register sizes can be between 2 and 168 bits.
- [Galois] or [Fibonacci] forms are supported - prefer [Galois] in general as it has a
shorter critical path.
- The basic gate can be [xor] or [xnor]. With [xor] the all 0's state is invalid
while with [xnor] the all 1's state is invalid.
- The lfsrs generated are complete according to xapp052 - this means they will
sequence through all possible states (except the invalid one) before repeating.
- All complete lfsr have a counterpart organisation of the taps which leads to
a second (but still complete) sequence. *)
val create
: ?config:Config.t (** default is [Galois]. *)
-> ?counterpart_taps:bool (** default is [false]. *)
-> ?op:Op.t (** default is [Xor] *)
-> (module Hardcaml.Comb.S with type t = 'a)
-> 'a
-> 'a
| null | https://raw.githubusercontent.com/janestreet/hardcaml_circuits/a2c2d1ea3e6957c3cda4767d519e94c20f1172b2/src/lfsr.mli | ocaml | * Linear feedback shift registers
* default is [Galois].
* default is [false].
* default is [Xor] |
open Base
open! Hardcaml
module Config : sig
type t =
| Galois
| Fibonacci
[@@deriving enumerate, sexp_of]
end
module Op : sig
type t =
| Xor
| Xnor
[@@deriving enumerate, sexp_of]
end
* Create the update logic for a lfsr . Used in conjuction with [ reg_fb ] to construct
a complete [ lfsr ] .
- Shift register sizes can be between 2 and 168 bits .
- [ Galois ] or [ Fibonacci ] forms are supported - prefer [ Galois ] in general as it has a
shorter critical path .
- The basic gate can be [ ] or [ xnor ] . With [ ] the all 0 's state is invalid
while with [ xnor ] the all 1 's state is invalid .
- The lfsrs generated are complete according to xapp052 - this means they will
sequence through all possible states ( except the invalid one ) before repeating .
- All complete lfsr have a counterpart organisation of the taps which leads to
a second ( but still complete ) sequence .
a complete [lfsr].
- Shift register sizes can be between 2 and 168 bits.
- [Galois] or [Fibonacci] forms are supported - prefer [Galois] in general as it has a
shorter critical path.
- The basic gate can be [xor] or [xnor]. With [xor] the all 0's state is invalid
while with [xnor] the all 1's state is invalid.
- The lfsrs generated are complete according to xapp052 - this means they will
sequence through all possible states (except the invalid one) before repeating.
- All complete lfsr have a counterpart organisation of the taps which leads to
a second (but still complete) sequence. *)
val create
-> (module Hardcaml.Comb.S with type t = 'a)
-> 'a
-> 'a
|
5f851d4d13dbaae635ea3c908f2350b03769c8e9287c23ce5e84762996f63962 | sgbj/MaximaSharp | ddot.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 1.221 2010/05/26 19:25:52 "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.204 2010/02/23 05:21:30 "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.114 2010/05/17 01:42:14 " )
Using Lisp CMU Common Lisp CVS Head 2010 - 05 - 25 18:21:07 ( 20A Unicode )
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format double-float))
(in-package :colnew)
(defun ddot (n dx incx dy incy)
(declare (type (array double-float (*)) dy dx)
(type (f2cl-lib:integer4) incy incx n))
(f2cl-lib:with-multi-array-data
((dx double-float dx-%data% dx-%offset%)
(dy double-float dy-%data% dy-%offset%))
(prog ((i 0) (ix 0) (iy 0) (m 0) (mp1 0) (dtemp 0.0) (ddot 0.0))
(declare (type (double-float) ddot dtemp)
(type (f2cl-lib:integer4) mp1 m iy ix i))
(setf ddot 0.0)
(setf dtemp 0.0)
(if (<= n 0) (go end_label))
(if (and (= incx 1) (= incy 1)) (go label20))
(setf ix 1)
(setf iy 1)
(if (< incx 0)
(setf ix
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incx)
1)))
(if (< incy 0)
(setf iy
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incy)
1)))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (ix) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (iy) ((1 *)) dy-%offset%))))
(setf ix (f2cl-lib:int-add ix incx))
(setf iy (f2cl-lib:int-add iy incy))
label10))
(setf ddot dtemp)
(go end_label)
label20
(setf m (mod n 5))
(if (= m 0) (go label40))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i m) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (i) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (i) ((1 *)) dy-%offset%))))
label30))
(if (< n 5) (go label60))
label40
(setf mp1 (f2cl-lib:int-add m 1))
(f2cl-lib:fdo (i mp1 (f2cl-lib:int-add i 5))
((> i n) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (i) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (i) ((1 *)) dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 1))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 1))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 2))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 2))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 3))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 3))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 4))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 4))
((1 *))
dy-%offset%))))
label50))
label60
(setf ddot dtemp)
(go end_label)
end_label
(return (values ddot nil nil nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::ddot fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4) (array double-float (*))
(fortran-to-lisp::integer4) (array double-float (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil)
:calls 'nil)))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/share/colnew/lisp/ddot.lisp | lisp | Compiled by f2cl version:
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 1.221 2010/05/26 19:25:52 "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.204 2010/02/23 05:21:30 "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.114 2010/05/17 01:42:14 " )
Using Lisp CMU Common Lisp CVS Head 2010 - 05 - 25 18:21:07 ( 20A Unicode )
(in-package :colnew)
(defun ddot (n dx incx dy incy)
(declare (type (array double-float (*)) dy dx)
(type (f2cl-lib:integer4) incy incx n))
(f2cl-lib:with-multi-array-data
((dx double-float dx-%data% dx-%offset%)
(dy double-float dy-%data% dy-%offset%))
(prog ((i 0) (ix 0) (iy 0) (m 0) (mp1 0) (dtemp 0.0) (ddot 0.0))
(declare (type (double-float) ddot dtemp)
(type (f2cl-lib:integer4) mp1 m iy ix i))
(setf ddot 0.0)
(setf dtemp 0.0)
(if (<= n 0) (go end_label))
(if (and (= incx 1) (= incy 1)) (go label20))
(setf ix 1)
(setf iy 1)
(if (< incx 0)
(setf ix
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incx)
1)))
(if (< incy 0)
(setf iy
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incy)
1)))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (ix) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (iy) ((1 *)) dy-%offset%))))
(setf ix (f2cl-lib:int-add ix incx))
(setf iy (f2cl-lib:int-add iy incy))
label10))
(setf ddot dtemp)
(go end_label)
label20
(setf m (mod n 5))
(if (= m 0) (go label40))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i m) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (i) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (i) ((1 *)) dy-%offset%))))
label30))
(if (< n 5) (go label60))
label40
(setf mp1 (f2cl-lib:int-add m 1))
(f2cl-lib:fdo (i mp1 (f2cl-lib:int-add i 5))
((> i n) nil)
(tagbody
(setf dtemp
(+ dtemp
(* (f2cl-lib:fref dx-%data% (i) ((1 *)) dx-%offset%)
(f2cl-lib:fref dy-%data% (i) ((1 *)) dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 1))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 1))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 2))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 2))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 3))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 3))
((1 *))
dy-%offset%))
(*
(f2cl-lib:fref dx-%data%
((f2cl-lib:int-add i 4))
((1 *))
dx-%offset%)
(f2cl-lib:fref dy-%data%
((f2cl-lib:int-add i 4))
((1 *))
dy-%offset%))))
label50))
label60
(setf ddot dtemp)
(go end_label)
end_label
(return (values ddot nil nil nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::ddot fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4) (array double-float (*))
(fortran-to-lisp::integer4) (array double-float (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil)
:calls 'nil)))
|
92712eabac625d4be335530fa3341e5e68221201261a953afd02d2418c479f9f | Haskell-Things/ImplicitCAD | Expr.hs | {- ORMOLU_DISABLE -}
Implicit CAD . Copyright ( C ) 2011 , ( )
Copyright ( C ) 2014 - 2017 , ( )
-- Released under the GNU AGPLV3+, see LICENSE
Allow us to use shorter forms of Var and Name .
# LANGUAGE PatternSynonyms #
-- Allow us to use string literals for Text
{-# LANGUAGE OverloadedStrings #-}
module ParserSpec.Expr (exprSpec) where
-- Be explicit about what we import.
import Prelude (Bool(True, False), ($))
Hspec , for writing specs .
import Test.Hspec (describe, Spec, it, specify)
import Data.Text.Lazy (Text)
expression components .
import Graphics.Implicit.ExtOpenScad.Definitions (Expr(ListE, (:$)), Symbol(Symbol))
import qualified Graphics.Implicit.ExtOpenScad.Definitions as GIED (Expr(Var), Pattern(Name))
The type used for variables , in ImplicitCAD .
import Graphics.Implicit.Definitions (ℝ)
-- Our utility library, for making these tests easier to read.
> ) , fapp , num , bool , stringLiteral , undefined , plus , minus , mult , power , divide , negate , and , or , not , gt , ternary , append , index , lambda )
Default all numbers in this file to being of the type ImplicitCAD uses for values .
default (ℝ)
Let us use the old syntax when defining and Names .
pattern Var :: Text -> Expr
pattern Var s = GIED.Var (Symbol s)
pattern Name :: Text -> GIED.Pattern
pattern Name n = GIED.Name (Symbol n)
logicalSpec :: Spec
logicalSpec = do
describe "not" $ do
> not [ Var " foo " ]
specify "double" $ "!!foo" --> Var "foo"
> not [ Var " foo " ]
it "handles and/or" $ do
> and [ Var " foo " , Var " bar " ]
> or [ Var " foo " , Var " bar " ]
describe "ternary operator" $ do
specify "with primitive expressions" $
> ternary [ Var " x " , num 2 , num 3 ]
specify "with parenthesized comparison" $
> ternary [ gt [ num 1 , num 0 ] , num 5 , num ( -5 ) ]
specify "with comparison in head position" $
> ternary [ gt [ num 1 , num 0 ] , num 5 , num ( -5 ) ]
specify "with comparison in head position, and addition in tail" $
"1 > 0 ? 5 : 1 + 2" -->
ternary [gt [num 1, num 0], num 5, plus [num 1, num 2]]
specify "nested in true and false expressions" $
"c0 ? c1 ? t1 : f1 : c2 ? t2 : f2" -->
ternary [Var "c0", ternary [Var "c1",Var "t1",Var "f1"], ternary [Var "c2",Var "t2",Var "f2"]]
literalSpec :: Spec
literalSpec = do
it "handles integers" $
> num 12356
it "handles positive leading zero integers" $
> num 12356
it "handles zero integer" $
"0" --> num 0
it "handles leading zero integer" $
"0000" --> num 0
it "handles floats" $
> num 23.42
it "handles floats with no whole component" $
> num 0.2342
describe "E notation" $ do
> num 10
> num 1
> num 10
> num 11
> num 0.128
> num 11
describe "booleans" $ do
it "accepts true" $ "true" --> bool True
it "accepts false" $ "false" --> bool False
describe "undefined" $
it "accepts undef" $ "undef" --> undefined
letBindingSpec :: Spec
letBindingSpec = do
it "handles let with integer binding and spaces" $
> lambda [ Name " a " ] ( Var " a " ) [ num 1 ]
it "handles multiple variable let" $
> lambda [ Name " a " ] ( lambda [ Name " b " ] ( plus [ Var " a " , " b " ] ) [ Var " y " ] ) [ Var " x " ]
it "handles empty let" $
> Var " a "
it "handles nested let" $
> lambda [ Name " a " ] ( lambda [ Name " b " ] ( plus [ Var " a " , " b " ] ) [ Var " y " ] ) [ Var " x " ]
it "handles let on right side of an arithmetic operator" $
> plus [ num 1 , lambda [ Name " b " ] ( Var " b " ) [ Var " y " ] ]
it "handles let on right side of a unary negation" $
> negate [ lambda [ Name " b " ] ( Var " b " ) [ Var " y " ] ]
exprSpec :: Spec
exprSpec = do
describe "literals" literalSpec
describe "identifiers" $
it "accepts valid variable names" $ do
"foo" --> Var "foo"
"foo_bar" --> Var "foo_bar"
describe "grouping" $ do
it "allows parens" $
"( false )" --> bool False
it "handles empty vectors" $
"[]" --> ListE []
it "handles single element vectors" $
> ListE [ Var " a " ]
it "handles vectors" $
> ListE [ num 1 , num 2 , num 3 ]
it "handles nested vectors" $
> ListE [ num 1 , ListE [ num 2 , num 7 ] , [ num 3 , num 4 , num 5 , num 6 ] ]
it "handles lists" $
> ListE [ num 1 , num 2 , num 3 ]
it "handles generators" $
"[ a : b ]" -->
fapp "list_gen" [Var "a", num 1, Var "b"]
it "handles generators with expression" $
"[ a : b + 10 ]" -->
fapp "list_gen" [Var "a", num 1, plus [Var "b", num 10]]
it "handles increment generators" $
"[ a : 3 : b + 10 ]" -->
fapp "list_gen" [Var "a", num 3, plus [Var "b", num 10]]
it "handles indexing" $
> index [ Var " foo " , num 23 ]
it "handles multiple indexes" $
> Var " index " : $ [ Var " index " : $ [ Var " foo " , num 23 ] , num 12 ]
it "handles single function/module call with single argument" $
> Var " foo " : $ [ num 1 ]
it "handles single function/module call with multiple arguments" $
> Var " foo " : $ [ num 1 , num 2 , num 3 ]
describe "arithmetic" $ do
it "handles unary -" $
"-42" --> num (-42)
it "handles unary +" $
> num 42
it "handles unary - with extra spaces" $
"- 42" --> num (-42)
it "handles unary + with extra spaces" $
> num 42
it "handles unary - with parentheses" $
> negate [ minus [ num 4 , num 3 ] ]
it "handles unary + with parentheses" $
> minus [ num 4 , num 1 ]
it "handles unary - with identifier" $
> negate [ Var " foo " ]
it "handles unary + with identifier" $
"+foo" --> Var "foo"
it "handles unary - with string literal" $
"-\"foo\"" --> negate [stringLiteral "foo"]
it "handles unary + with string literal" $
"+\"foo\"" --> stringLiteral "foo"
it "handles +" $ do
> plus [ num 1 , num 2 ]
> plus [ plus [ num 1 , num 2 ] , num 3 ]
it "handles -" $ do
> minus [ num 1 , num 2 ]
> minus [ minus [ num 1 , num 2 ] , num 3 ]
it "handles +/- in combination" $ do
> minus [ plus [ num 1 , num 2 ] , num 3 ]
> plus [ minus [ num 2 , num 3 ] , num 4 ]
> plus [ minus [ plus [ num 1 , num 2 ] , num 3 ] , num 4 ]
> minus [ minus [ plus [ minus [ plus [ num 1 , num 2 ] , num 3 ] , num 4 ] , num 5 ] , num 6 ]
it "handles exponentiation" $
> power [ Var " x " , " y " ]
it "handles multiple exponentiations" $
> power [ Var " x " , power [ Var " y " , " z " ] ]
it "handles *" $
> mult [ num 3 , num 4 ]
it "handles > 2 term *" $
> mult [ mult [ num 3 , num 4 ] , num 5 ]
it "handles /" $
> divide [ num 4.2 , num 2.3 ]
it "handles precedence" $
> plus [ num 1 , mult [ divide [ num 2 , num 3 ] , num 5 ] ]
it "handles append" $
> append [ append [ " foo " , Var " bar " ] , " baz " ]
describe "logical operators" logicalSpec
describe "let expressions" letBindingSpec
describe "function/module application" $ do
specify "base case" $ "foo(x)" --> Var "foo" :$ [Var "x"]
specify "multiple arguments" $
> Var " foo " : $ [ Var " x " , num 1 , num 2 ]
| null | https://raw.githubusercontent.com/Haskell-Things/ImplicitCAD/5ce28f01fda1c5285959040fe7d1eb63a1d09aef/tests/ParserSpec/Expr.hs | haskell | ORMOLU_DISABLE
Released under the GNU AGPLV3+, see LICENSE
Allow us to use string literals for Text
# LANGUAGE OverloadedStrings #
Be explicit about what we import.
Our utility library, for making these tests easier to read.
> Var "foo"
>
>
> num 0
> num 0
> bool True
> bool False
> undefined
> Var "foo"
> Var "foo_bar"
> bool False
> ListE []
>
>
>
> num (-42)
> num (-42)
> Var "foo"
> negate [stringLiteral "foo"]
> stringLiteral "foo"
> Var "foo" :$ [Var "x"] | Implicit CAD . Copyright ( C ) 2011 , ( )
Copyright ( C ) 2014 - 2017 , ( )
Allow us to use shorter forms of Var and Name .
# LANGUAGE PatternSynonyms #
module ParserSpec.Expr (exprSpec) where
import Prelude (Bool(True, False), ($))
Hspec , for writing specs .
import Test.Hspec (describe, Spec, it, specify)
import Data.Text.Lazy (Text)
expression components .
import Graphics.Implicit.ExtOpenScad.Definitions (Expr(ListE, (:$)), Symbol(Symbol))
import qualified Graphics.Implicit.ExtOpenScad.Definitions as GIED (Expr(Var), Pattern(Name))
The type used for variables , in ImplicitCAD .
import Graphics.Implicit.Definitions (ℝ)
> ) , fapp , num , bool , stringLiteral , undefined , plus , minus , mult , power , divide , negate , and , or , not , gt , ternary , append , index , lambda )
Default all numbers in this file to being of the type ImplicitCAD uses for values .
default (ℝ)
Let us use the old syntax when defining and Names .
pattern Var :: Text -> Expr
pattern Var s = GIED.Var (Symbol s)
pattern Name :: Text -> GIED.Pattern
pattern Name n = GIED.Name (Symbol n)
logicalSpec :: Spec
logicalSpec = do
describe "not" $ do
> not [ Var " foo " ]
> not [ Var " foo " ]
it "handles and/or" $ do
> and [ Var " foo " , Var " bar " ]
> or [ Var " foo " , Var " bar " ]
describe "ternary operator" $ do
specify "with primitive expressions" $
> ternary [ Var " x " , num 2 , num 3 ]
specify "with parenthesized comparison" $
> ternary [ gt [ num 1 , num 0 ] , num 5 , num ( -5 ) ]
specify "with comparison in head position" $
> ternary [ gt [ num 1 , num 0 ] , num 5 , num ( -5 ) ]
specify "with comparison in head position, and addition in tail" $
ternary [gt [num 1, num 0], num 5, plus [num 1, num 2]]
specify "nested in true and false expressions" $
ternary [Var "c0", ternary [Var "c1",Var "t1",Var "f1"], ternary [Var "c2",Var "t2",Var "f2"]]
literalSpec :: Spec
literalSpec = do
it "handles integers" $
> num 12356
it "handles positive leading zero integers" $
> num 12356
it "handles zero integer" $
it "handles leading zero integer" $
it "handles floats" $
> num 23.42
it "handles floats with no whole component" $
> num 0.2342
describe "E notation" $ do
> num 10
> num 1
> num 10
> num 11
> num 0.128
> num 11
describe "booleans" $ do
describe "undefined" $
letBindingSpec :: Spec
letBindingSpec = do
it "handles let with integer binding and spaces" $
> lambda [ Name " a " ] ( Var " a " ) [ num 1 ]
it "handles multiple variable let" $
> lambda [ Name " a " ] ( lambda [ Name " b " ] ( plus [ Var " a " , " b " ] ) [ Var " y " ] ) [ Var " x " ]
it "handles empty let" $
> Var " a "
it "handles nested let" $
> lambda [ Name " a " ] ( lambda [ Name " b " ] ( plus [ Var " a " , " b " ] ) [ Var " y " ] ) [ Var " x " ]
it "handles let on right side of an arithmetic operator" $
> plus [ num 1 , lambda [ Name " b " ] ( Var " b " ) [ Var " y " ] ]
it "handles let on right side of a unary negation" $
> negate [ lambda [ Name " b " ] ( Var " b " ) [ Var " y " ] ]
exprSpec :: Spec
exprSpec = do
describe "literals" literalSpec
describe "identifiers" $
it "accepts valid variable names" $ do
describe "grouping" $ do
it "allows parens" $
it "handles empty vectors" $
it "handles single element vectors" $
> ListE [ Var " a " ]
it "handles vectors" $
> ListE [ num 1 , num 2 , num 3 ]
it "handles nested vectors" $
> ListE [ num 1 , ListE [ num 2 , num 7 ] , [ num 3 , num 4 , num 5 , num 6 ] ]
it "handles lists" $
> ListE [ num 1 , num 2 , num 3 ]
it "handles generators" $
fapp "list_gen" [Var "a", num 1, Var "b"]
it "handles generators with expression" $
fapp "list_gen" [Var "a", num 1, plus [Var "b", num 10]]
it "handles increment generators" $
fapp "list_gen" [Var "a", num 3, plus [Var "b", num 10]]
it "handles indexing" $
> index [ Var " foo " , num 23 ]
it "handles multiple indexes" $
> Var " index " : $ [ Var " index " : $ [ Var " foo " , num 23 ] , num 12 ]
it "handles single function/module call with single argument" $
> Var " foo " : $ [ num 1 ]
it "handles single function/module call with multiple arguments" $
> Var " foo " : $ [ num 1 , num 2 , num 3 ]
describe "arithmetic" $ do
it "handles unary -" $
it "handles unary +" $
> num 42
it "handles unary - with extra spaces" $
it "handles unary + with extra spaces" $
> num 42
it "handles unary - with parentheses" $
> negate [ minus [ num 4 , num 3 ] ]
it "handles unary + with parentheses" $
> minus [ num 4 , num 1 ]
it "handles unary - with identifier" $
> negate [ Var " foo " ]
it "handles unary + with identifier" $
it "handles unary - with string literal" $
it "handles unary + with string literal" $
it "handles +" $ do
> plus [ num 1 , num 2 ]
> plus [ plus [ num 1 , num 2 ] , num 3 ]
it "handles -" $ do
> minus [ num 1 , num 2 ]
> minus [ minus [ num 1 , num 2 ] , num 3 ]
it "handles +/- in combination" $ do
> minus [ plus [ num 1 , num 2 ] , num 3 ]
> plus [ minus [ num 2 , num 3 ] , num 4 ]
> plus [ minus [ plus [ num 1 , num 2 ] , num 3 ] , num 4 ]
> minus [ minus [ plus [ minus [ plus [ num 1 , num 2 ] , num 3 ] , num 4 ] , num 5 ] , num 6 ]
it "handles exponentiation" $
> power [ Var " x " , " y " ]
it "handles multiple exponentiations" $
> power [ Var " x " , power [ Var " y " , " z " ] ]
it "handles *" $
> mult [ num 3 , num 4 ]
it "handles > 2 term *" $
> mult [ mult [ num 3 , num 4 ] , num 5 ]
it "handles /" $
> divide [ num 4.2 , num 2.3 ]
it "handles precedence" $
> plus [ num 1 , mult [ divide [ num 2 , num 3 ] , num 5 ] ]
it "handles append" $
> append [ append [ " foo " , Var " bar " ] , " baz " ]
describe "logical operators" logicalSpec
describe "let expressions" letBindingSpec
describe "function/module application" $ do
specify "multiple arguments" $
> Var " foo " : $ [ Var " x " , num 1 , num 2 ]
|
28cec08a65e13467c687b8c93116c35a22ec2fb0d4a21fd875c81e3ebeb31900 | aaronallen8455/hi-fi | FoldFields.hs | # LANGUAGE RecordWildCards #
module HiFi.TcPlugin.FoldFields
( buildFoldFieldsExpr
) where
import Data.Either
import Data.Functor ((<&>))
import qualified HiFi.GhcFacade as Ghc
import HiFi.TcPlugin.PluginInputs
import HiFi.TcPlugin.RecordParts
import HiFi.TcPlugin.Utils (makeWantedCt)
buildFoldFieldsExpr
:: PluginInputs
-> Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.CtLoc
-> Ghc.Type
-> Ghc.Type
-> Ghc.Class
-> [Ghc.Type]
-> [(Ghc.FastString, FieldParts)]
-> Ghc.TcPluginM (Either [Ghc.Ct] Ghc.CoreExpr)
buildFoldFieldsExpr inp@MkPluginInputs{..} evBindsVar givens ctLoc recordTy effectConTy predClass predArgs fields = do
xTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "x")
let xTyVar = Ghc.mkTyVar xTyVarName Ghc.liftedTypeKind
hkdTy = Ghc.mkTyConApp hkdTyCon [recordTy, effectConTy]
fieldGenBndr <- mkFieldGenBndr inp effectConTy predClass hkdTy xTyVar predArgs
eFieldGenExprs <-
traverse
(mkFieldGenExpr
inp
evBindsVar
givens
fieldGenBndr
hkdTy
ctLoc
predClass
predArgs
effectConTy
recordTy
)
fields
case partitionEithers eFieldGenExprs of
([], fieldGenExprs) -> Right <$> mkFoldFieldsExpr xTyVar fieldGenBndr fieldGenExprs
(wanteds, _) -> pure . Left $ concat wanteds
-- | Make the binder for the function that produces the x terms
mkFieldGenBndr
:: PluginInputs
-> Ghc.Type
-> Ghc.Class
-> Ghc.Type
-> Ghc.TyVar
-> [Ghc.Type]
-> Ghc.TcPluginM Ghc.Id
mkFieldGenBndr inp effectConTy predClass hkdTy xTyVar predArgs = do
fieldGenName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "fieldGen")
fieldTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "a")
let tyVar = Ghc.mkTyVar fieldTyVarName Ghc.liftedTypeKind
fieldTy = Ghc.mkTyConApp
(fieldTyTyCon inp)
[effectConTy, Ghc.mkTyVarTy tyVar]
-- forall a. (C (FieldTy f a)
-- => String
-- -> (HKD rec f -> FieldTy f a)
-- -> x
fieldGenTy = Ghc.mkSigmaTy forallBndrs preds tyBody
where
forallBndrs = [ Ghc.mkTyCoVarBinder Ghc.Required tyVar ]
preds = [ Ghc.mkClassPred predClass
$ predArgs
++ [ fieldTy ]
]
tyBody = Ghc.stringTy
`Ghc.mkVisFunTyMany`
( hkdTy
`Ghc.mkVisFunTyMany`
fieldTy
)
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy xTyVar
pure $ Ghc.mkLocalIdOrCoVar fieldGenName Ghc.Many fieldGenTy
-- | Make the expr that results from applying all arguments (including the dict)
-- to the user supplied function that generates a value for each field.
mkFieldGenExpr
:: PluginInputs
-> Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.Id
-> Ghc.Type
-> Ghc.CtLoc
-> Ghc.Class
-> [Ghc.Type]
-> Ghc.Type
-> Ghc.Type
-> (Ghc.FastString, FieldParts)
-> Ghc.TcPluginM (Either [Ghc.Ct] Ghc.CoreExpr)
mkFieldGenExpr inp evBindsVar givens fieldGenBndr hkdTy ctLoc predClass predArgs effectConTy recordTy (fieldName, fieldParts) = do
hkdName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "hkd")
let hkdBndr = Ghc.mkLocalIdOrCoVar hkdName Ghc.Many hkdTy
getterExpr =
Ghc.mkCoreLams [hkdBndr] $
case fieldNesting fieldParts of
Unnested idx ->
Ghc.mkCoreApps (Ghc.Var $ indexArrayId inp)
[ Ghc.Type recordTy
, Ghc.Type effectConTy
, Ghc.Var hkdBndr
, Ghc.mkUncheckedIntExpr idx
]
Nested offset len innerRecTy _ ->
Ghc.mkCoreApps (Ghc.Var $ getInnerRecId inp)
[ Ghc.Type recordTy
, Ghc.Type effectConTy
, Ghc.Type innerRecTy
, Ghc.Var hkdBndr
, Ghc.mkUncheckedIntExpr offset
, Ghc.mkUncheckedIntExpr len
]
predClassArgs =
predArgs ++
[Ghc.mkTyConApp (fieldTyTyCon inp) [effectConTy, fieldType fieldParts]]
(predCt, predDest) <- makeWantedCt ctLoc predClass predClassArgs
ePredDict <- Ghc.unsafeTcPluginTcM $ solvePred evBindsVar givens predCt predDest
fieldNameExpr <- Ghc.mkStringExprFS' fieldName
pure $ ePredDict <&> \predDict ->
Ghc.mkCoreApps (Ghc.Var fieldGenBndr) $
[ Ghc.Type $ fieldType fieldParts
] ++ [predDict] ++
[ fieldNameExpr
, getterExpr
]
-- | Attempt to solve a constraint returning new wanted constraints if unsuccessful.
solvePred
:: Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.Ct
-> Ghc.TcEvDest
-> Ghc.TcM (Either [Ghc.Ct] Ghc.EvExpr)
solvePred evBindsVar givens predCt predDest = do
wanteds <- Ghc.runTcSWithEvBinds evBindsVar $ do
-- Add givens back in
Ghc.solveSimpleGivens givens
-- Try to solve the constraint with both top level instances and givens
Ghc.solveSimpleWanteds (Ghc.singleCt predCt)
Check if GHC produced evidence
mEvTerm <- lookupEvTerm evBindsVar predDest
pure $ case mEvTerm of
Just (Ghc.EvExpr evExpr) -> do
if Ghc.isSolvedWC wanteds
then Right evExpr
else Left . Ghc.ctsElts $ Ghc.wc_simple wanteds
_ -> Left [predCt]
-- | Puts the pieces together to form the resulting expr
mkFoldFieldsExpr :: Ghc.TyVar -> Ghc.Id -> [Ghc.CoreExpr] -> Ghc.TcPluginM Ghc.CoreExpr
mkFoldFieldsExpr xTyVar fieldGenBndr fieldGenExprs = do
initAccName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "initAcc")
accTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "acc")
accumulatorName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "accumulator")
let accumulatorTy = Ghc.mkTyVarTy xTyVar
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy accTyVar
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy accTyVar
accumulatorBndr = Ghc.mkLocalIdOrCoVar accumulatorName Ghc.Many accumulatorTy
accTyVar = Ghc.mkTyVar accTyVarName Ghc.liftedTypeKind
initAccBndr = Ghc.mkLocalIdOrCoVar initAccName Ghc.Many (Ghc.mkTyVarTy accTyVar)
lamArgs = [ accTyVar
, xTyVar
, fieldGenBndr
, initAccBndr
, accumulatorBndr
]
bodyExpr <- Ghc.unsafeTcPluginTcM $
Ghc.mkFoldrExpr (Ghc.mkTyVarTy xTyVar)
(Ghc.mkTyVarTy accTyVar)
(Ghc.Var accumulatorBndr)
(Ghc.Var initAccBndr)
(Ghc.mkListExpr (Ghc.mkTyVarTy xTyVar) fieldGenExprs)
pure $ Ghc.mkCoreLams lamArgs bodyExpr
-- | Look up whether a 'TcEvDest' has been filled with evidence.
lookupEvTerm
:: Ghc.EvBindsVar
-> Ghc.TcEvDest
-> Ghc.TcM (Maybe Ghc.EvTerm)
lookupEvTerm _ (Ghc.HoleDest (Ghc.CoercionHole { Ghc.ch_ref = ref } ) ) = do
mb_co <- Ghc.readTcRef ref
case mb_co of
Nothing -> pure Nothing
Just co -> pure . Just $ Ghc.evCoercion co
lookupEvTerm evBindsVar (Ghc.EvVarDest ev_var) = do
evBindsMap <- Ghc.getTcEvBindsMap evBindsVar
let
mEvBind :: Maybe Ghc.EvBind
mEvBind = Ghc.lookupEvBind evBindsMap ev_var
case mEvBind of
Nothing -> pure Nothing
Just evBind -> pure . Just $ Ghc.eb_rhs evBind
| null | https://raw.githubusercontent.com/aaronallen8455/hi-fi/2ddc5c4f5e2922806b35c70645c625b39e0820ea/src/HiFi/TcPlugin/FoldFields.hs | haskell | | Make the binder for the function that produces the x terms
forall a. (C (FieldTy f a)
=> String
-> (HKD rec f -> FieldTy f a)
-> x
| Make the expr that results from applying all arguments (including the dict)
to the user supplied function that generates a value for each field.
| Attempt to solve a constraint returning new wanted constraints if unsuccessful.
Add givens back in
Try to solve the constraint with both top level instances and givens
| Puts the pieces together to form the resulting expr
| Look up whether a 'TcEvDest' has been filled with evidence. | # LANGUAGE RecordWildCards #
module HiFi.TcPlugin.FoldFields
( buildFoldFieldsExpr
) where
import Data.Either
import Data.Functor ((<&>))
import qualified HiFi.GhcFacade as Ghc
import HiFi.TcPlugin.PluginInputs
import HiFi.TcPlugin.RecordParts
import HiFi.TcPlugin.Utils (makeWantedCt)
buildFoldFieldsExpr
:: PluginInputs
-> Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.CtLoc
-> Ghc.Type
-> Ghc.Type
-> Ghc.Class
-> [Ghc.Type]
-> [(Ghc.FastString, FieldParts)]
-> Ghc.TcPluginM (Either [Ghc.Ct] Ghc.CoreExpr)
buildFoldFieldsExpr inp@MkPluginInputs{..} evBindsVar givens ctLoc recordTy effectConTy predClass predArgs fields = do
xTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "x")
let xTyVar = Ghc.mkTyVar xTyVarName Ghc.liftedTypeKind
hkdTy = Ghc.mkTyConApp hkdTyCon [recordTy, effectConTy]
fieldGenBndr <- mkFieldGenBndr inp effectConTy predClass hkdTy xTyVar predArgs
eFieldGenExprs <-
traverse
(mkFieldGenExpr
inp
evBindsVar
givens
fieldGenBndr
hkdTy
ctLoc
predClass
predArgs
effectConTy
recordTy
)
fields
case partitionEithers eFieldGenExprs of
([], fieldGenExprs) -> Right <$> mkFoldFieldsExpr xTyVar fieldGenBndr fieldGenExprs
(wanteds, _) -> pure . Left $ concat wanteds
mkFieldGenBndr
:: PluginInputs
-> Ghc.Type
-> Ghc.Class
-> Ghc.Type
-> Ghc.TyVar
-> [Ghc.Type]
-> Ghc.TcPluginM Ghc.Id
mkFieldGenBndr inp effectConTy predClass hkdTy xTyVar predArgs = do
fieldGenName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "fieldGen")
fieldTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "a")
let tyVar = Ghc.mkTyVar fieldTyVarName Ghc.liftedTypeKind
fieldTy = Ghc.mkTyConApp
(fieldTyTyCon inp)
[effectConTy, Ghc.mkTyVarTy tyVar]
fieldGenTy = Ghc.mkSigmaTy forallBndrs preds tyBody
where
forallBndrs = [ Ghc.mkTyCoVarBinder Ghc.Required tyVar ]
preds = [ Ghc.mkClassPred predClass
$ predArgs
++ [ fieldTy ]
]
tyBody = Ghc.stringTy
`Ghc.mkVisFunTyMany`
( hkdTy
`Ghc.mkVisFunTyMany`
fieldTy
)
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy xTyVar
pure $ Ghc.mkLocalIdOrCoVar fieldGenName Ghc.Many fieldGenTy
mkFieldGenExpr
:: PluginInputs
-> Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.Id
-> Ghc.Type
-> Ghc.CtLoc
-> Ghc.Class
-> [Ghc.Type]
-> Ghc.Type
-> Ghc.Type
-> (Ghc.FastString, FieldParts)
-> Ghc.TcPluginM (Either [Ghc.Ct] Ghc.CoreExpr)
mkFieldGenExpr inp evBindsVar givens fieldGenBndr hkdTy ctLoc predClass predArgs effectConTy recordTy (fieldName, fieldParts) = do
hkdName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "hkd")
let hkdBndr = Ghc.mkLocalIdOrCoVar hkdName Ghc.Many hkdTy
getterExpr =
Ghc.mkCoreLams [hkdBndr] $
case fieldNesting fieldParts of
Unnested idx ->
Ghc.mkCoreApps (Ghc.Var $ indexArrayId inp)
[ Ghc.Type recordTy
, Ghc.Type effectConTy
, Ghc.Var hkdBndr
, Ghc.mkUncheckedIntExpr idx
]
Nested offset len innerRecTy _ ->
Ghc.mkCoreApps (Ghc.Var $ getInnerRecId inp)
[ Ghc.Type recordTy
, Ghc.Type effectConTy
, Ghc.Type innerRecTy
, Ghc.Var hkdBndr
, Ghc.mkUncheckedIntExpr offset
, Ghc.mkUncheckedIntExpr len
]
predClassArgs =
predArgs ++
[Ghc.mkTyConApp (fieldTyTyCon inp) [effectConTy, fieldType fieldParts]]
(predCt, predDest) <- makeWantedCt ctLoc predClass predClassArgs
ePredDict <- Ghc.unsafeTcPluginTcM $ solvePred evBindsVar givens predCt predDest
fieldNameExpr <- Ghc.mkStringExprFS' fieldName
pure $ ePredDict <&> \predDict ->
Ghc.mkCoreApps (Ghc.Var fieldGenBndr) $
[ Ghc.Type $ fieldType fieldParts
] ++ [predDict] ++
[ fieldNameExpr
, getterExpr
]
solvePred
:: Ghc.EvBindsVar
-> [Ghc.Ct]
-> Ghc.Ct
-> Ghc.TcEvDest
-> Ghc.TcM (Either [Ghc.Ct] Ghc.EvExpr)
solvePred evBindsVar givens predCt predDest = do
wanteds <- Ghc.runTcSWithEvBinds evBindsVar $ do
Ghc.solveSimpleGivens givens
Ghc.solveSimpleWanteds (Ghc.singleCt predCt)
Check if GHC produced evidence
mEvTerm <- lookupEvTerm evBindsVar predDest
pure $ case mEvTerm of
Just (Ghc.EvExpr evExpr) -> do
if Ghc.isSolvedWC wanteds
then Right evExpr
else Left . Ghc.ctsElts $ Ghc.wc_simple wanteds
_ -> Left [predCt]
mkFoldFieldsExpr :: Ghc.TyVar -> Ghc.Id -> [Ghc.CoreExpr] -> Ghc.TcPluginM Ghc.CoreExpr
mkFoldFieldsExpr xTyVar fieldGenBndr fieldGenExprs = do
initAccName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "initAcc")
accTyVarName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "acc")
accumulatorName <- Ghc.unsafeTcPluginTcM
$ Ghc.newName (Ghc.mkOccName Ghc.varName "accumulator")
let accumulatorTy = Ghc.mkTyVarTy xTyVar
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy accTyVar
`Ghc.mkVisFunTyMany`
Ghc.mkTyVarTy accTyVar
accumulatorBndr = Ghc.mkLocalIdOrCoVar accumulatorName Ghc.Many accumulatorTy
accTyVar = Ghc.mkTyVar accTyVarName Ghc.liftedTypeKind
initAccBndr = Ghc.mkLocalIdOrCoVar initAccName Ghc.Many (Ghc.mkTyVarTy accTyVar)
lamArgs = [ accTyVar
, xTyVar
, fieldGenBndr
, initAccBndr
, accumulatorBndr
]
bodyExpr <- Ghc.unsafeTcPluginTcM $
Ghc.mkFoldrExpr (Ghc.mkTyVarTy xTyVar)
(Ghc.mkTyVarTy accTyVar)
(Ghc.Var accumulatorBndr)
(Ghc.Var initAccBndr)
(Ghc.mkListExpr (Ghc.mkTyVarTy xTyVar) fieldGenExprs)
pure $ Ghc.mkCoreLams lamArgs bodyExpr
lookupEvTerm
:: Ghc.EvBindsVar
-> Ghc.TcEvDest
-> Ghc.TcM (Maybe Ghc.EvTerm)
lookupEvTerm _ (Ghc.HoleDest (Ghc.CoercionHole { Ghc.ch_ref = ref } ) ) = do
mb_co <- Ghc.readTcRef ref
case mb_co of
Nothing -> pure Nothing
Just co -> pure . Just $ Ghc.evCoercion co
lookupEvTerm evBindsVar (Ghc.EvVarDest ev_var) = do
evBindsMap <- Ghc.getTcEvBindsMap evBindsVar
let
mEvBind :: Maybe Ghc.EvBind
mEvBind = Ghc.lookupEvBind evBindsMap ev_var
case mEvBind of
Nothing -> pure Nothing
Just evBind -> pure . Just $ Ghc.eb_rhs evBind
|
ebe41707d30027e5dd4274058ae31316145b7bfa32320eefb0b6bde64d86c059 | ryo-imai-bit/Writing-An-Interpreter-In-Go-In-OCaml | env.ml | module Env = struct
include Object
include Ast
type env = {
store: (string, Object.obj) Hashtbl.t;
outer: env option;
}
let newEnv () = {
store = Hashtbl.create 100;
outer = None;
}
let getEnv = {
store = Hashtbl.create 100;
outer = None;
}
let newEnclosedEnv env = {
store = Hashtbl.create 100;
outer = Some env;
}
let rec get env key = if Hashtbl.mem env.store key
then Some (Hashtbl.find env.store key)
else (match env.outer with
| Some env -> get env key
| None -> None)
let set env key value = Hashtbl.add env.store key value; value
let extendFunctionEnv prms args env = let nenv = newEnclosedEnv env
in let rec refe pms ags = match pms, ags with
| [], [] -> ()
| (Ast.Identifier idt)::it, obj::ot -> let _ = set nenv idt obj in refe it ot
| _, _ -> raise (Failure "extend env failed")
in refe prms args; nenv
let extendMacroEnv prms args env = let nenv = newEnclosedEnv env
in let rec refe pms ags = match pms, ags with
| [], [] -> ()
| (Ast.Identifier idt)::it, obj::ot -> let _ = set nenv idt (Object.Quote obj) in refe it ot
| _, _ -> raise (Failure "extend env failed")
in refe prms args; nenv
end
| null | https://raw.githubusercontent.com/ryo-imai-bit/Writing-An-Interpreter-In-Go-In-OCaml/8a2d13b6582d637560e8d327a05bcdfb7831a178/lib/env.ml | ocaml | module Env = struct
include Object
include Ast
type env = {
store: (string, Object.obj) Hashtbl.t;
outer: env option;
}
let newEnv () = {
store = Hashtbl.create 100;
outer = None;
}
let getEnv = {
store = Hashtbl.create 100;
outer = None;
}
let newEnclosedEnv env = {
store = Hashtbl.create 100;
outer = Some env;
}
let rec get env key = if Hashtbl.mem env.store key
then Some (Hashtbl.find env.store key)
else (match env.outer with
| Some env -> get env key
| None -> None)
let set env key value = Hashtbl.add env.store key value; value
let extendFunctionEnv prms args env = let nenv = newEnclosedEnv env
in let rec refe pms ags = match pms, ags with
| [], [] -> ()
| (Ast.Identifier idt)::it, obj::ot -> let _ = set nenv idt obj in refe it ot
| _, _ -> raise (Failure "extend env failed")
in refe prms args; nenv
let extendMacroEnv prms args env = let nenv = newEnclosedEnv env
in let rec refe pms ags = match pms, ags with
| [], [] -> ()
| (Ast.Identifier idt)::it, obj::ot -> let _ = set nenv idt (Object.Quote obj) in refe it ot
| _, _ -> raise (Failure "extend env failed")
in refe prms args; nenv
end
|
|
d0d981704dede39950092119515a179bd932c4be374d2443617bb94b263a1eaa | 0xd34df00d/lcss | site.hs | --------------------------------------------------------------------------------
# LANGUAGE OverloadedStrings , QuasiQuotes , LambdaCase #
# LANGUAGE RecordWildCards #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ParallelListComp #
# LANGUAGE NoMonomorphismRestriction #
import Hakyll
import Text.Pandoc.Options
import Text.Pandoc.Walk
import Text.Pandoc.Definition
import qualified Data.Map.Lazy as M
import Data.List.Extra
import Data.Char
import Data.Maybe
import Control.Monad
import Text.RawString.QQ
import ImageCodesProducer
import CustomFields
--------------------------------------------------------------------------------
main :: IO ()
main = do
imagesDb <- prepareImageDb "images/"
hakyll $ do
match "images/**" $ do
route idRoute
compile copyFileCompiler
match "css/*" $ do
route idRoute
compile compressCssCompiler
match "text/*.md" $ do
route $ customRoute $ dropPrefix "text/" . unmdize . toFilePath
compile $ pandocCompilerWithToc imagesDb
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
listed imagesDb (bookListedConfig "plugins") { createRoot = CustomRoot pluginsRoot }
listed imagesDb (bookListedConfig "development") { createRoot = NoRoot }
listed imagesDb (bookListedConfig "userguide") { createRoot = NoRoot }
listed imagesDb (bookListedConfig "concepts")
listed imagesDb
(defListedConfig "news") { customContext = dates
, customTemplate = Just "news-item"
, subOrder = recentFirst
, verPreprocess = False
, withRss = Just ("rss.xml",
FeedConfiguration
{ feedTitle = "LeechCraft"
, feedDescription = ""
, feedAuthorName = "0xd34df00d"
, feedAuthorEmail = ""
, feedRoot = ""
}
)
}
match "templates/*" $ compile templateBodyCompiler
--------------------------------------------------------------------------------
type CustomRootBuilder = ListedConfig -> Pattern -> Context String -> (String -> Identifier) -> Rules ()
data RootItem = NoRoot
| DefaultRoot
| CustomRoot CustomRootBuilder
newtype CustomItemsContext = CustomItemsContext { itemsContext :: ListedConfig -> Compiler (Context String) }
data ListedConfig = ListedConfig
{ section :: String
, customTemplate :: Maybe String
, customContext :: Context String
, customItemsContext :: Maybe CustomItemsContext
, listTitle :: String
, listFieldName :: String
, listTemplate :: String
, createRoot :: RootItem
, verPreprocess :: Bool
, subOrder :: forall m a. MonadMetadata m => [Item a] -> m [Item a]
, withRss :: Maybe (Identifier, FeedConfiguration)
}
defListedConfig :: String -> ListedConfig
defListedConfig section = ListedConfig
{ section = section
, customTemplate = Nothing
, customContext = mempty
, customItemsContext = Nothing
, listTitle = toUpper (head section) : tail section
, listFieldName = section
, listTemplate = section
, createRoot = DefaultRoot
, verPreprocess = True
, subOrder = pure
, withRss = Nothing
}
bookListedConfig :: String -> ListedConfig
bookListedConfig section = (defListedConfig section) { customTemplate = Just "book-item"
, customItemsContext = Just CustomItemsContext { itemsContext = sectionsContext sortBookOrder }
}
pluginsRoot :: CustomRootBuilder
pluginsRoot ListedConfig { .. } filesPat ctx tplPath = create [fromFilePath section] $ do
route idRoute
compile $ do
allItems <- loadAll (filesPat .&&. hasNoVersion) >>= subOrder
keyItems <- filterM isKeyPlugin allItems
otherItems <- filterM otherPred allItems
let itemChildren item = filterM (isDirectChild $ bareName item) allItems
children <- do
chs <- mapM itemChildren keyItems
pure $ M.fromList [(defaultTextRoute $ ident item, chs') | item <- keyItems
| chs' <- chs]
let subPluginsCtx = mconcat
[ listFieldWith "subplugins" ctx (\item -> pure $ children M.! bareName item)
, boolField "hasSubplugins" (\item -> not $ null $ children M.! bareName item)
, field "bareName" (pure . bareName)
]
let pluginsListCtx = mconcat
[ constField "title" listTitle
, listField "keyplugins" (subPluginsCtx <> ctx) $ pure keyItems
, listField "otherplugins" ctx $ pure otherItems
, ctx
]
makeItem ""
>>= loadAndApplyTemplate (tplPath listTemplate) pluginsListCtx
>>= loadAndApplyTemplate "templates/default.html" pluginsListCtx
>>= relativizeUrls
where otherPred item = do
isKey <- isKeyPlugin item
parent <- getParentPage item
pure $ not isKey && isNothing parent
bareName = defaultTextRoute . ident
listed :: ImagesDb -> ListedConfig -> Rules ()
listed imagesDb cfg@ListedConfig { .. } = do
when verPreprocess $
match filesPat $ version "preprocess" $ do
route $ customRoute defaultTextRoute
compile getResourceBody
match filesPat $ do
route $ customRoute defaultTextRoute
compile $ do
ctx' <- maybe (pure mempty) (`itemsContext` cfg) customItemsContext
pandocCompilerWithToc imagesDb
>>= (if isJust withRss then saveSnapshot "rss" else pure)
>>= loadAndApplyCustom (ctx' <> ctx)
>>= loadAndApplyTemplate "templates/default.html" (ctx' <> ctx)
>>= relativizeUrls
case createRoot of
NoRoot -> pure ()
DefaultRoot -> create [fromFilePath section] $ do
route idRoute
compile $ do
items <- loadAll (filesPat .&&. hasNoVersion) >>= subOrder
let listCtx = constField "title" listTitle <> listField listFieldName ctx (pure items) <> ctx
makeItem ""
>>= loadAndApplyTemplate (tplPath listTemplate) listCtx
>>= loadAndApplyTemplate "templates/default.html" listCtx
>>= relativizeUrls
CustomRoot rules -> rules cfg filesPat ctx tplPath
case withRss of
Nothing -> pure ()
Just (name, feedConfig) -> create [name] $ do
route idRoute
compile $ do
items <- loadAllSnapshots (filesPat .&&. hasNoVersion) "rss" >>=
fmap (take 10) . recentFirst
ctx' <- maybe (pure mempty) (`itemsContext` cfg) customItemsContext
let feedCtx = ctx' <> ctx <> field "description" (pure . rssizeBody . itemBody)
renderRss feedConfig feedCtx items
where filesPat = fromGlob $ "text/" <> section <> "/*.md"
ctx = customContext <> defaultContext
tplPath path = fromFilePath $ "templates/" <> path <> ".html"
loadAndApplyCustom | Just tpl <- customTemplate = loadAndApplyTemplate (tplPath tpl)
| otherwise = const pure
rssizeBody :: String -> String
rssizeBody = unlines . takeWhile (not . isBadLine) . take 3 . lines
where isBadLine l = "<h2" `isInfixOf` l || "img_assist" `isInfixOf` l
pandocCompilerWithToc :: ImagesDb -> Compiler (Item String)
pandocCompilerWithToc imagesDb = do
item <- getResourceBody
toc <- item /> "toc"
let writeOpts | fromMaybe "nope" toc `elem` ["true", "1", "True"] = writeOptsToc
| otherwise = defaultHakyllWriterOptions
pandocCompilerWithTransform defaultHakyllReaderOptions writeOpts
$ walk $ \case (Code (_, ["img"], _) str) -> compileImageInfo imagesDb str
x -> x
where writeOptsToc = defaultHakyllWriterOptions { writerTableOfContents = True
, writerTOCDepth = 4
, writerTemplate = Just tocTemplate
}
tocTemplate = [r|
$if(toc)$
<aside class="toc bordered">
<details open="open">
<summary>Contents</summary>
$toc$
</details>
</aside>
$endif$
$body$
|]
defaultTextRoute :: Identifier -> FilePath
defaultTextRoute = snd . breakEnd (== '/') . unmdize . toFilePath
loadCurrentPath :: Compiler FilePath
loadCurrentPath = defaultTextRoute . fromFilePath . drop 2 <$> getResourceFilePath
sectionsContext :: Sorter -> ListedConfig -> Compiler (Context a)
sectionsContext sorter cfg@ListedConfig { .. } = do
fp <- loadCurrentPath
thisItem <- getResourceBody
thisParentId <- getParentPage thisItem
allItems <- loadAll (fromGlob ("text/" <> section <> "/*.md") .&&. hasVersion "preprocess") >>= sorter
siblings <- filterM (isSibling thisParentId) allItems
children <- filterM (isDirectChild fp) allItems
shortDescrs <- buildFieldMap "shortdescr" children
let hasShortDescr = boolField "hasShortDescr" $ isJust . join . (`M.lookup` shortDescrs) . ident
parentCtx <- parentPageContext cfg allItems thisParentId
pure $ mconcat
[ listField "siblingSections" (isCurrentPageField fp <> defaultContext) (pure siblings)
, hasPagesField 1 "hasSiblingSections" siblings
, listField "childSections" (hasShortDescr <> defaultContext) (pure children)
, hasPagesField 0 "hasChildSections" children
, parentCtx
]
where hasPagesField len name = boolField name . const . (> len) . length
parentPageContext :: (HasMetadata it, MonadMetadata m) => ListedConfig -> [it] -> Maybe String -> m (Context b)
parentPageContext ListedConfig { .. } _ Nothing = pure $ mconcat
[ constField "parentPageTitle" listTitle
, constField "parentPageUrl" section
]
parentPageContext _ allItems (Just itemId) = do
title <- getMetadataField id' "title"
pure $ mconcat
[ constField "parentPageTitle" $ fromJust title
, constField "parentPageUrl" itemId
]
where id' = ident $ head $ filter ((== itemId) . defaultTextRoute . ident) allItems
unmdize :: String -> String
unmdize s = take (length s - 3) s
sortItemsBy :: (HasMetadata a, MonadMetadata m, Ord b) => (a -> m b) -> [a] -> m [a]
sortItemsBy cmp items = do
items' <- zip items <$> mapM cmp items
pure $ fst <$> sortOn snd items'
type Sorter = forall m a. (HasMetadata a, MonadMetadata m) => [a] -> m [a]
sortBookOrder :: Sorter
sortBookOrder = sortItemsBy $ getBookOrder' 0
| null | https://raw.githubusercontent.com/0xd34df00d/lcss/ebcec028ea1fc68fcf52735eeddc1044a9f84584/site.hs | haskell | ------------------------------------------------------------------------------
# LANGUAGE RankNTypes #
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE OverloadedStrings , QuasiQuotes , LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ParallelListComp #
# LANGUAGE NoMonomorphismRestriction #
import Hakyll
import Text.Pandoc.Options
import Text.Pandoc.Walk
import Text.Pandoc.Definition
import qualified Data.Map.Lazy as M
import Data.List.Extra
import Data.Char
import Data.Maybe
import Control.Monad
import Text.RawString.QQ
import ImageCodesProducer
import CustomFields
main :: IO ()
main = do
imagesDb <- prepareImageDb "images/"
hakyll $ do
match "images/**" $ do
route idRoute
compile copyFileCompiler
match "css/*" $ do
route idRoute
compile compressCssCompiler
match "text/*.md" $ do
route $ customRoute $ dropPrefix "text/" . unmdize . toFilePath
compile $ pandocCompilerWithToc imagesDb
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
listed imagesDb (bookListedConfig "plugins") { createRoot = CustomRoot pluginsRoot }
listed imagesDb (bookListedConfig "development") { createRoot = NoRoot }
listed imagesDb (bookListedConfig "userguide") { createRoot = NoRoot }
listed imagesDb (bookListedConfig "concepts")
listed imagesDb
(defListedConfig "news") { customContext = dates
, customTemplate = Just "news-item"
, subOrder = recentFirst
, verPreprocess = False
, withRss = Just ("rss.xml",
FeedConfiguration
{ feedTitle = "LeechCraft"
, feedDescription = ""
, feedAuthorName = "0xd34df00d"
, feedAuthorEmail = ""
, feedRoot = ""
}
)
}
match "templates/*" $ compile templateBodyCompiler
type CustomRootBuilder = ListedConfig -> Pattern -> Context String -> (String -> Identifier) -> Rules ()
data RootItem = NoRoot
| DefaultRoot
| CustomRoot CustomRootBuilder
newtype CustomItemsContext = CustomItemsContext { itemsContext :: ListedConfig -> Compiler (Context String) }
data ListedConfig = ListedConfig
{ section :: String
, customTemplate :: Maybe String
, customContext :: Context String
, customItemsContext :: Maybe CustomItemsContext
, listTitle :: String
, listFieldName :: String
, listTemplate :: String
, createRoot :: RootItem
, verPreprocess :: Bool
, subOrder :: forall m a. MonadMetadata m => [Item a] -> m [Item a]
, withRss :: Maybe (Identifier, FeedConfiguration)
}
defListedConfig :: String -> ListedConfig
defListedConfig section = ListedConfig
{ section = section
, customTemplate = Nothing
, customContext = mempty
, customItemsContext = Nothing
, listTitle = toUpper (head section) : tail section
, listFieldName = section
, listTemplate = section
, createRoot = DefaultRoot
, verPreprocess = True
, subOrder = pure
, withRss = Nothing
}
bookListedConfig :: String -> ListedConfig
bookListedConfig section = (defListedConfig section) { customTemplate = Just "book-item"
, customItemsContext = Just CustomItemsContext { itemsContext = sectionsContext sortBookOrder }
}
pluginsRoot :: CustomRootBuilder
pluginsRoot ListedConfig { .. } filesPat ctx tplPath = create [fromFilePath section] $ do
route idRoute
compile $ do
allItems <- loadAll (filesPat .&&. hasNoVersion) >>= subOrder
keyItems <- filterM isKeyPlugin allItems
otherItems <- filterM otherPred allItems
let itemChildren item = filterM (isDirectChild $ bareName item) allItems
children <- do
chs <- mapM itemChildren keyItems
pure $ M.fromList [(defaultTextRoute $ ident item, chs') | item <- keyItems
| chs' <- chs]
let subPluginsCtx = mconcat
[ listFieldWith "subplugins" ctx (\item -> pure $ children M.! bareName item)
, boolField "hasSubplugins" (\item -> not $ null $ children M.! bareName item)
, field "bareName" (pure . bareName)
]
let pluginsListCtx = mconcat
[ constField "title" listTitle
, listField "keyplugins" (subPluginsCtx <> ctx) $ pure keyItems
, listField "otherplugins" ctx $ pure otherItems
, ctx
]
makeItem ""
>>= loadAndApplyTemplate (tplPath listTemplate) pluginsListCtx
>>= loadAndApplyTemplate "templates/default.html" pluginsListCtx
>>= relativizeUrls
where otherPred item = do
isKey <- isKeyPlugin item
parent <- getParentPage item
pure $ not isKey && isNothing parent
bareName = defaultTextRoute . ident
listed :: ImagesDb -> ListedConfig -> Rules ()
listed imagesDb cfg@ListedConfig { .. } = do
when verPreprocess $
match filesPat $ version "preprocess" $ do
route $ customRoute defaultTextRoute
compile getResourceBody
match filesPat $ do
route $ customRoute defaultTextRoute
compile $ do
ctx' <- maybe (pure mempty) (`itemsContext` cfg) customItemsContext
pandocCompilerWithToc imagesDb
>>= (if isJust withRss then saveSnapshot "rss" else pure)
>>= loadAndApplyCustom (ctx' <> ctx)
>>= loadAndApplyTemplate "templates/default.html" (ctx' <> ctx)
>>= relativizeUrls
case createRoot of
NoRoot -> pure ()
DefaultRoot -> create [fromFilePath section] $ do
route idRoute
compile $ do
items <- loadAll (filesPat .&&. hasNoVersion) >>= subOrder
let listCtx = constField "title" listTitle <> listField listFieldName ctx (pure items) <> ctx
makeItem ""
>>= loadAndApplyTemplate (tplPath listTemplate) listCtx
>>= loadAndApplyTemplate "templates/default.html" listCtx
>>= relativizeUrls
CustomRoot rules -> rules cfg filesPat ctx tplPath
case withRss of
Nothing -> pure ()
Just (name, feedConfig) -> create [name] $ do
route idRoute
compile $ do
items <- loadAllSnapshots (filesPat .&&. hasNoVersion) "rss" >>=
fmap (take 10) . recentFirst
ctx' <- maybe (pure mempty) (`itemsContext` cfg) customItemsContext
let feedCtx = ctx' <> ctx <> field "description" (pure . rssizeBody . itemBody)
renderRss feedConfig feedCtx items
where filesPat = fromGlob $ "text/" <> section <> "/*.md"
ctx = customContext <> defaultContext
tplPath path = fromFilePath $ "templates/" <> path <> ".html"
loadAndApplyCustom | Just tpl <- customTemplate = loadAndApplyTemplate (tplPath tpl)
| otherwise = const pure
rssizeBody :: String -> String
rssizeBody = unlines . takeWhile (not . isBadLine) . take 3 . lines
where isBadLine l = "<h2" `isInfixOf` l || "img_assist" `isInfixOf` l
pandocCompilerWithToc :: ImagesDb -> Compiler (Item String)
pandocCompilerWithToc imagesDb = do
item <- getResourceBody
toc <- item /> "toc"
let writeOpts | fromMaybe "nope" toc `elem` ["true", "1", "True"] = writeOptsToc
| otherwise = defaultHakyllWriterOptions
pandocCompilerWithTransform defaultHakyllReaderOptions writeOpts
$ walk $ \case (Code (_, ["img"], _) str) -> compileImageInfo imagesDb str
x -> x
where writeOptsToc = defaultHakyllWriterOptions { writerTableOfContents = True
, writerTOCDepth = 4
, writerTemplate = Just tocTemplate
}
tocTemplate = [r|
$if(toc)$
<aside class="toc bordered">
<details open="open">
<summary>Contents</summary>
$toc$
</details>
</aside>
$endif$
$body$
|]
defaultTextRoute :: Identifier -> FilePath
defaultTextRoute = snd . breakEnd (== '/') . unmdize . toFilePath
loadCurrentPath :: Compiler FilePath
loadCurrentPath = defaultTextRoute . fromFilePath . drop 2 <$> getResourceFilePath
sectionsContext :: Sorter -> ListedConfig -> Compiler (Context a)
sectionsContext sorter cfg@ListedConfig { .. } = do
fp <- loadCurrentPath
thisItem <- getResourceBody
thisParentId <- getParentPage thisItem
allItems <- loadAll (fromGlob ("text/" <> section <> "/*.md") .&&. hasVersion "preprocess") >>= sorter
siblings <- filterM (isSibling thisParentId) allItems
children <- filterM (isDirectChild fp) allItems
shortDescrs <- buildFieldMap "shortdescr" children
let hasShortDescr = boolField "hasShortDescr" $ isJust . join . (`M.lookup` shortDescrs) . ident
parentCtx <- parentPageContext cfg allItems thisParentId
pure $ mconcat
[ listField "siblingSections" (isCurrentPageField fp <> defaultContext) (pure siblings)
, hasPagesField 1 "hasSiblingSections" siblings
, listField "childSections" (hasShortDescr <> defaultContext) (pure children)
, hasPagesField 0 "hasChildSections" children
, parentCtx
]
where hasPagesField len name = boolField name . const . (> len) . length
parentPageContext :: (HasMetadata it, MonadMetadata m) => ListedConfig -> [it] -> Maybe String -> m (Context b)
parentPageContext ListedConfig { .. } _ Nothing = pure $ mconcat
[ constField "parentPageTitle" listTitle
, constField "parentPageUrl" section
]
parentPageContext _ allItems (Just itemId) = do
title <- getMetadataField id' "title"
pure $ mconcat
[ constField "parentPageTitle" $ fromJust title
, constField "parentPageUrl" itemId
]
where id' = ident $ head $ filter ((== itemId) . defaultTextRoute . ident) allItems
unmdize :: String -> String
unmdize s = take (length s - 3) s
sortItemsBy :: (HasMetadata a, MonadMetadata m, Ord b) => (a -> m b) -> [a] -> m [a]
sortItemsBy cmp items = do
items' <- zip items <$> mapM cmp items
pure $ fst <$> sortOn snd items'
type Sorter = forall m a. (HasMetadata a, MonadMetadata m) => [a] -> m [a]
sortBookOrder :: Sorter
sortBookOrder = sortItemsBy $ getBookOrder' 0
|
90cb4f8463afae09073c27c7d71cd4a7bc2d182d8b2ba3e74ea59229870aeea0 | larcenists/larceny | num-iters.scm | (define boyer-iters 10)
(define browse-iters 600)
(define conform-iters 20)
(define cpstak-iters 300)
(define ctak-iters 30)
(define dderiv-iters 800000)
(define deriv-iters 800000)
(define destruc-iters 300)
(define diviter-iters 400000)
(define divrec-iters 400000)
(define earley-iters 150)
(define fft-iters 2000)
(define fib-iters 50)
(define fibfp-iters 50)
(define maze-iters 2500)
(define mazefun-iters 100)
(define mbrot-iters 30)
(define nucleic-iters 10)
(define peval-iters 100)
(define pnpoly-iters 10000)
(define puzzle-iters 100)
(define ray-iters 10)
(define scheme-iters 3000)
(define simplex-iters 60000)
(define slatex-iters 20)
(define sum-iters 10000)
(define sumfp-iters 10000)
(define tak-iters 1000)
(define takl-iters 200)
(define trav1-iters 50)
(define trav2-iters 10)
(define triangl-iters 10)
(define smlboyer-iters 10)
(define nboyer-iters 3) ; problem size, not iterations
(define dynamic-iters 10)
(define graphs-iters 10)
(define lattice-iters 1)
(define nbody-iters 1)
(define quicksort-iters 10)
(define perm9-iters 5)
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/test/Stress/src/num-iters.scm | scheme | problem size, not iterations | (define boyer-iters 10)
(define browse-iters 600)
(define conform-iters 20)
(define cpstak-iters 300)
(define ctak-iters 30)
(define dderiv-iters 800000)
(define deriv-iters 800000)
(define destruc-iters 300)
(define diviter-iters 400000)
(define divrec-iters 400000)
(define earley-iters 150)
(define fft-iters 2000)
(define fib-iters 50)
(define fibfp-iters 50)
(define maze-iters 2500)
(define mazefun-iters 100)
(define mbrot-iters 30)
(define nucleic-iters 10)
(define peval-iters 100)
(define pnpoly-iters 10000)
(define puzzle-iters 100)
(define ray-iters 10)
(define scheme-iters 3000)
(define simplex-iters 60000)
(define slatex-iters 20)
(define sum-iters 10000)
(define sumfp-iters 10000)
(define tak-iters 1000)
(define takl-iters 200)
(define trav1-iters 50)
(define trav2-iters 10)
(define triangl-iters 10)
(define smlboyer-iters 10)
(define dynamic-iters 10)
(define graphs-iters 10)
(define lattice-iters 1)
(define nbody-iters 1)
(define quicksort-iters 10)
(define perm9-iters 5)
|
fa069f5eadfcec9589eef05a68753f8b08d9026ab82059c1b4e7351b15ad3406 | lispbuilder/lispbuilder | sdl-util.lisp | SDL ( Simple Media Layer ) library using CFFI for foreign function interfacing ...
( C)2006 Justin Heyes - Jones < > and < >
Thanks to and
;; see COPYING for license
This file contains some useful functions for using SDL from Common lisp
;; using sdl.lisp (the CFFI wrapper)
(in-package #:lispbuilder-sdl-base)
;;; w
(defmacro with-init (init-flags &body body)
"Attempts to initialize the SDL subsystems using SDL-Init.
Automatically shuts down the SDL subsystems using SDL-Quit upon normal application termination or
if any fatal error occurs within &body.
init-flags can be any combination of SDL-INIT-TIMER, SDL-INIT-AUDIO, SDL-INIT-VIDEO, SDL-INIT-CDROM,
SDL-INIT-JOYSTICK, SDL-INIT-NOPARACHUTE, SDL-INIT-EVENTTHREAD or SDL-INIT-EVERYTHING."
`(block nil
(unwind-protect
(when (init-sdl ,@(when init-flags
`(:flags (list ,@init-flags))))
,@body)
(sdl-cffi::SDL-Quit))))
(defun init-sdl (&key (flags nil))
(if (equal 0 (sdl-cffi::SDL-Init (set-flags flags)))
t
nil))
(defun init-p (&key (flags))
(if (equal (set-flags flags)
(sdl-cffi::sdl-was-init (set-flags flags)))
t
nil))
(defun set-flags (&rest keyword-args)
(if (listp (first keyword-args))
(let ((keywords
(mapcar #'(lambda (x)
(eval x))
(first keyword-args))))
(apply #'logior keywords))
(apply #'logior keyword-args)))
(defun load-image (filename)
"load in the supplied filename, must be a bmp file"
( format t " loading ~a~% " filename )
(let ((file (namestring filename)))
(if (and (stringp file) (probe-file file)) ; LJC: Make sure filename is a string and the filename exists.
(sdl-cffi::SDL-Load-BMP-RW (sdl-cffi::sdl-RW-From-File file "rb") 1)
(error "File ~A does not exist." file))))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-sdl/base/sdl-util.lisp | lisp | see COPYING for license
using sdl.lisp (the CFFI wrapper)
w
LJC: Make sure filename is a string and the filename exists. | SDL ( Simple Media Layer ) library using CFFI for foreign function interfacing ...
( C)2006 Justin Heyes - Jones < > and < >
Thanks to and
This file contains some useful functions for using SDL from Common lisp
(in-package #:lispbuilder-sdl-base)
(defmacro with-init (init-flags &body body)
"Attempts to initialize the SDL subsystems using SDL-Init.
Automatically shuts down the SDL subsystems using SDL-Quit upon normal application termination or
if any fatal error occurs within &body.
init-flags can be any combination of SDL-INIT-TIMER, SDL-INIT-AUDIO, SDL-INIT-VIDEO, SDL-INIT-CDROM,
SDL-INIT-JOYSTICK, SDL-INIT-NOPARACHUTE, SDL-INIT-EVENTTHREAD or SDL-INIT-EVERYTHING."
`(block nil
(unwind-protect
(when (init-sdl ,@(when init-flags
`(:flags (list ,@init-flags))))
,@body)
(sdl-cffi::SDL-Quit))))
(defun init-sdl (&key (flags nil))
(if (equal 0 (sdl-cffi::SDL-Init (set-flags flags)))
t
nil))
(defun init-p (&key (flags))
(if (equal (set-flags flags)
(sdl-cffi::sdl-was-init (set-flags flags)))
t
nil))
(defun set-flags (&rest keyword-args)
(if (listp (first keyword-args))
(let ((keywords
(mapcar #'(lambda (x)
(eval x))
(first keyword-args))))
(apply #'logior keywords))
(apply #'logior keyword-args)))
(defun load-image (filename)
"load in the supplied filename, must be a bmp file"
( format t " loading ~a~% " filename )
(let ((file (namestring filename)))
(sdl-cffi::SDL-Load-BMP-RW (sdl-cffi::sdl-RW-From-File file "rb") 1)
(error "File ~A does not exist." file))))
|
88a571e818ed767bf6f9cab305c8bdcfeb863b52ad189c8d563dcaa6ee3f8501 | facebook/pyre-check | taintTransforms.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = TaintTransform.t list [@@deriving compare, eq, hash, sexp]
module Order : sig
type t =
(* A:B:C represents the transforms for x in `x = A(B(C(taint)))` *)
| Forward
(* A:B:C represents the transforms for x in `taint = C(B(A(x)))` *)
| Backward
[@@deriving show]
end
val empty : t
val is_empty : t -> bool
val merge : local:t -> global:t -> t
val of_named_transforms : TaintTransform.t list -> t
val get_named_transforms : t -> TaintTransform.t list
(* Split a list of transforms into sanitizers present at the beginning and the rest. *)
val split_sanitizers : t -> SanitizeTransformSet.t * t
(* Return sanitizers that are still valid (i.e, before a named transform. *)
val get_sanitize_transforms : t -> SanitizeTransformSet.t
(* Discard all sanitizers, regardless of whether they are still valid or not. *)
val discard_sanitize_transforms : t -> t
val discard_sanitize_source_transforms : t -> t
val discard_sanitize_sink_transforms : t -> t
val pp_kind
: formatter:Format.formatter ->
pp_base:(Format.formatter -> 'a -> unit) ->
local:t ->
global:t ->
base:'a ->
unit
val show_transforms : t -> string
(* See transform operations in `taintTransformOperation.mli`. *)
module Set : Stdlib.Set.S with type elt = t
| null | https://raw.githubusercontent.com/facebook/pyre-check/fec4eee4c4c49027b624d02a2191f76fe798213c/source/interprocedural_analyses/taint/taintTransforms.mli | ocaml | A:B:C represents the transforms for x in `x = A(B(C(taint)))`
A:B:C represents the transforms for x in `taint = C(B(A(x)))`
Split a list of transforms into sanitizers present at the beginning and the rest.
Return sanitizers that are still valid (i.e, before a named transform.
Discard all sanitizers, regardless of whether they are still valid or not.
See transform operations in `taintTransformOperation.mli`. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = TaintTransform.t list [@@deriving compare, eq, hash, sexp]
module Order : sig
type t =
| Forward
| Backward
[@@deriving show]
end
val empty : t
val is_empty : t -> bool
val merge : local:t -> global:t -> t
val of_named_transforms : TaintTransform.t list -> t
val get_named_transforms : t -> TaintTransform.t list
val split_sanitizers : t -> SanitizeTransformSet.t * t
val get_sanitize_transforms : t -> SanitizeTransformSet.t
val discard_sanitize_transforms : t -> t
val discard_sanitize_source_transforms : t -> t
val discard_sanitize_sink_transforms : t -> t
val pp_kind
: formatter:Format.formatter ->
pp_base:(Format.formatter -> 'a -> unit) ->
local:t ->
global:t ->
base:'a ->
unit
val show_transforms : t -> string
module Set : Stdlib.Set.S with type elt = t
|
eabd4ade79c1884bb346f9c5f4df4795d5094db45c1f8fbbdfefbce2a930c980 | Innf107/polaris | modules.ml | open Syntax
let _export_category, trace_exports = Trace.make ~flag:"exports" ~prefix:"Exports"
let extract_import_paths_mod =
Parsed.MExpr.collect_list begin function
| Import (_, path) -> [path]
| _ -> []
end
let extract_import_paths =
Parsed.Expr.collect_list begin function
| LetModuleSeq (_, _, mexpr) -> extract_import_paths_mod mexpr
| _ -> []
end
let build_export_map header exprs rename_scope global_env =
let variable_to_map_entry = function
| Typed.ExportVal (_, name) ->
let name_entry = (Name.original_name name, name) in
let ty_entry = match NameMap.find_opt name Types.(global_env.var_types) with
| Some ty -> (name, ty)
| None -> Util.panic __LOC__ ("Exported variable without inferred global type: " ^ Name.pretty name)
in
Some (name_entry, ty_entry)
| _ -> None
in
let data_con_to_map_entry = function
| Typed.ExportType (_, name) ->
begin match NameMap.find_opt name Types.(global_env.data_definitions) with
| None -> None
| Some (params, underlying) ->
let tycon_entry = (name.name, (name, List.length params, DataConSort)) in
let data_con_entry = (name, (params, underlying)) in
Some (tycon_entry, data_con_entry)
end
| _ -> None
in
let type_alias_to_map_entry = function
| Typed.ExportType (_, name) ->
begin match NameMap.find_opt name Types.(global_env.type_aliases) with
| None -> None
| Some (params, underlying) ->
let tycon_entry = (name.name, (name, List.length params, TypeAliasSort)) in
let data_con_entry = (name, (params, underlying)) in
Some (tycon_entry, data_con_entry)
end
| _ -> None
in
let exported_variables, exported_variable_types = List.split (List.filter_map variable_to_map_entry Typed.(header.exports)) in
let exported_ty_constrs_data, exported_data_definitions = List.split (List.filter_map data_con_to_map_entry Typed.(header.exports)) in
let exported_ty_constrs_alias, exported_type_aliases = List.split (List.filter_map type_alias_to_map_entry header.exports) in
let exported_ty_constructors = exported_ty_constrs_data @ exported_ty_constrs_alias in
trace_exports (lazy ("Exported variables: [" ^ String.concat ", " (List.map (fun (x, ty) -> "(" ^ Name.pretty x ^ " : " ^ Typed.pretty_type ty ^ ")") exported_variable_types) ^ "]"));
trace_exports (lazy ("Exported type constructors: [" ^ String.concat ", " (List.map (fun (_, (x, _, _)) -> Name.pretty x) exported_ty_constructors) ^ "]"));
Typed.{
exported_variables = StringMap.of_seq (List.to_seq exported_variables);
exported_variable_types = NameMap.of_seq (List.to_seq exported_variable_types);
exported_ty_constructors = StringMap.of_seq (List.to_seq exported_ty_constructors);
exported_data_definitions = NameMap.of_seq (List.to_seq exported_data_definitions);
exported_type_aliases = NameMap.of_seq (List.to_seq exported_type_aliases);
}
| null | https://raw.githubusercontent.com/Innf107/polaris/d273b97fa607dd4c70a17a07abd230a921e3817c/src/modules.ml | ocaml | open Syntax
let _export_category, trace_exports = Trace.make ~flag:"exports" ~prefix:"Exports"
let extract_import_paths_mod =
Parsed.MExpr.collect_list begin function
| Import (_, path) -> [path]
| _ -> []
end
let extract_import_paths =
Parsed.Expr.collect_list begin function
| LetModuleSeq (_, _, mexpr) -> extract_import_paths_mod mexpr
| _ -> []
end
let build_export_map header exprs rename_scope global_env =
let variable_to_map_entry = function
| Typed.ExportVal (_, name) ->
let name_entry = (Name.original_name name, name) in
let ty_entry = match NameMap.find_opt name Types.(global_env.var_types) with
| Some ty -> (name, ty)
| None -> Util.panic __LOC__ ("Exported variable without inferred global type: " ^ Name.pretty name)
in
Some (name_entry, ty_entry)
| _ -> None
in
let data_con_to_map_entry = function
| Typed.ExportType (_, name) ->
begin match NameMap.find_opt name Types.(global_env.data_definitions) with
| None -> None
| Some (params, underlying) ->
let tycon_entry = (name.name, (name, List.length params, DataConSort)) in
let data_con_entry = (name, (params, underlying)) in
Some (tycon_entry, data_con_entry)
end
| _ -> None
in
let type_alias_to_map_entry = function
| Typed.ExportType (_, name) ->
begin match NameMap.find_opt name Types.(global_env.type_aliases) with
| None -> None
| Some (params, underlying) ->
let tycon_entry = (name.name, (name, List.length params, TypeAliasSort)) in
let data_con_entry = (name, (params, underlying)) in
Some (tycon_entry, data_con_entry)
end
| _ -> None
in
let exported_variables, exported_variable_types = List.split (List.filter_map variable_to_map_entry Typed.(header.exports)) in
let exported_ty_constrs_data, exported_data_definitions = List.split (List.filter_map data_con_to_map_entry Typed.(header.exports)) in
let exported_ty_constrs_alias, exported_type_aliases = List.split (List.filter_map type_alias_to_map_entry header.exports) in
let exported_ty_constructors = exported_ty_constrs_data @ exported_ty_constrs_alias in
trace_exports (lazy ("Exported variables: [" ^ String.concat ", " (List.map (fun (x, ty) -> "(" ^ Name.pretty x ^ " : " ^ Typed.pretty_type ty ^ ")") exported_variable_types) ^ "]"));
trace_exports (lazy ("Exported type constructors: [" ^ String.concat ", " (List.map (fun (_, (x, _, _)) -> Name.pretty x) exported_ty_constructors) ^ "]"));
Typed.{
exported_variables = StringMap.of_seq (List.to_seq exported_variables);
exported_variable_types = NameMap.of_seq (List.to_seq exported_variable_types);
exported_ty_constructors = StringMap.of_seq (List.to_seq exported_ty_constructors);
exported_data_definitions = NameMap.of_seq (List.to_seq exported_data_definitions);
exported_type_aliases = NameMap.of_seq (List.to_seq exported_type_aliases);
}
|
|
4e9e96204ca6016084d5f18c675464fda44cda331ab2fe03ae033fe705717190 | olleharstedt/pholyglot | Infer.ml | (*
* Module to infer types of local variables
* Both inferring types of expression, but also iterating the ast to replace Infer_me with proper types.
*)
open Printf
open Ast
module Log = Dolog.Log
exception Type_error of string
(**
* Global variable
* Should only be used by Function_call expression to replace type variables in Function_type
* TODO: Would this work when wrapping multiple generic functions in one call?
*)
let t_vars_tbl : (string, typ) Hashtbl.t = Hashtbl.create 10
let rec typ_of_lvalue ns lv : typ =
Log.debug "%s %s" "typ_of_lvalue" (show_lvalue lv);
match lv with
| Variable id ->
begin match Namespace.find_identifier ns id with
| Some typ -> typ
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find function type %s in namespace" id))
end
(* TODO: Access chain like $a->b->c *)
(* TODO: id is expression? *)
| Object_access (id, Property_access prop_name) ->
let class_type_name = match Namespace.find_identifier ns id with
| Some (Class_type (c, a)) -> c
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find class type %s in namespace" id))
in
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_lvalue: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun (name, t) -> prop_name = name) props with
| Some (n, t) -> t
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find propert with name %s in class %s" prop_name id))
let rec typ_of_expression (ns : Namespace.t) (expr : expression) : typ =
Log.debug "%s %s" "typ_of_expression" (show_expression expr);
match expr with
| Num _ -> Int
| Num_float _ -> Float
| String s -> String
| Plus (e, f)
| Minus (e, f)
| Times (e, f)
| Div (e, f) ->
let e_typ = typ_of_expression ns e in
let f_typ = typ_of_expression ns f in
if e_typ <> f_typ then
raise (Type_error "typ_of_expression: Mixing float and int in arith expression")
else
e_typ
| Concat (e, f) ->
let check e =
match typ_of_expression ns e with
| String -> ()
| _ -> raise (Type_error "typ_of_expression: Found non-string in concat")
in
check e;
check f;
String
| Parenth e -> typ_of_expression ns e
| Array_init (Infer_me, length, exprs) ->
if List.length exprs = 0 then raise (Type_error "array_init cannot be empty list");
let first_elem = List.nth exprs 0 in
if List.for_all (fun x -> typ_of_expression ns x = typ_of_expression ns first_elem) exprs then
(* TODO: Should be able to update this to Dynamic_array *)
Fixed_array (typ_of_expression ns first_elem, Some (List.length exprs))
else
(* TODO: Tuple here *)
raise (Type_error "not all element in array_init have the same type")
| Array_init (t, _, _) -> t
| New (alloc_strat, t, exprs) -> t
(* $point[0]-> ? *)
| Object_access (Array_access (id, _), Property_access prop_name)
$ point->x
| Object_access (Variable id, Property_access prop_name) -> begin
match Namespace.find_identifier ns id with
| Some (Fixed_array (Class_type (class_type_name, _), _))
| Some (Class_type (class_type_name, _)) -> begin
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_expression: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun (name, t) -> prop_name = name) props with
| Some (n, t) -> t
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find propert with name %s in class %s" prop_name id))
end
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find class type %s in namespace" id))
end
(* $point->getX() *)
| Method_call {return_type = Infer_me; method_name; left_hand = Variable class_name}
(*| Object_access (Array_access (class_name, _), Method_call {return_type = Infer_me; method_name})*)
| Object_access (Variable class_name, Method_call {return_type = Infer_me; method_name}) -> begin
let class_type_name = match Namespace.find_identifier ns class_name with
| Some (Class_type (c, a)) -> c
| None -> begin
raise (Type_error (sprintf "typ_of_expression method call: Could not find identifier %s in namespace" class_name))
end
in
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some class_decl -> class_decl
| None -> raise (Type_error (sprintf "typ_of_expression: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun {name} -> method_name = name) methods with
| Some {
function_type = Function_type {return_type; arguments}
}
-> return_type
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find method with name %s in class %s" method_name class_type_name))
end
(* TODO: Will this work with chained calls, like $obj->foo()->moo()? *)
| Object_access (class_id, Method_call {return_type}) -> return_type
| Variable id -> begin
match Namespace.find_identifier ns id with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find variable with name %s" id))
end
| Function_call (_, id, _) -> begin
match Namespace.find_function ns id with
| Some (Function_type {return_type; arguments}) -> return_type
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("found no function declared with name " ^ id)
end
| Array_access (id, expr) -> begin
Log.debug "%s %s" "Array_access " id;
match Namespace.find_identifier ns id with
| Some (Fixed_array (t, length)) -> t
| Some (Dynamic_array t) -> t
| _ -> raise (Type_error (sprintf "typ_of_expression: Found no array with id %s in namespace, or could not infer type" id))
end
| e -> failwith ("typ_of_expression: " ^ (show_expression e))
*
* Params always have Polymorph alloc strategy for now .
* Params always have Polymorph alloc strategy for now.
*)
and infer_arg_typ t def =
Log.debug "infer_arg_typ %s" (show_typ t);
match t with
| Class_type (s, alloc_strat) -> begin
Log.debug "infer_arg_typ Found Class_type";
Class_type (s, def)
end
| Fixed_array (t, n) -> Fixed_array (infer_arg_typ t def, n)
| Dynamic_array t -> Dynamic_array (infer_arg_typ t def)
| t -> t
let typ_to_constant (t : Ast.typ) : Ast.expression = match t with
| Int -> Constant "int"
| Float -> Constant "float"
| String -> Constant "string"
| Class_type (s, _) -> Constant s
| _ -> raise (Type_error ("typ_to_constant: Not supported type: " ^ show_typ t))
let rec typ_contains_type_variable (t : typ): bool =
Log.debug "typ_contains_type_variable %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
typ_contains_type_variable return_type || List.exists (fun t -> typ_contains_type_variable t) arguments
| Type_variable _ -> true
| Dynamic_array t -> typ_contains_type_variable t
| Fixed_array (t, _) -> typ_contains_type_variable t
| _ -> false
let rec get_type_variable (t : typ): string option = match t with
| Function_type {return_type; arguments} -> failwith "get_type_variable: not supported: Function_type"
| Type_variable s -> Some s
| Dynamic_array t -> get_type_variable t
| Fixed_array (t, _) -> get_type_variable t
| _ -> None
(* Takes a typ and a type variable hashtable and replaces type variables in typ *)
let rec replace_type_variables t : typ =
Log.debug "replace_type_variables %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
Function_type {
return_type = replace_type_variables return_type;
arguments = List.map (fun a -> replace_type_variables a) arguments;
}
| Type_variable s -> begin
match Hashtbl.find_opt t_vars_tbl s with
| Some t -> t
| None -> raise (Type_error ("Found no resolved type variable with name " ^ s))
end
| Dynamic_array t -> replace_type_variables t
| t -> t
(*| t -> raise (Type_error ("replace_type_variables: Can only replace type variables in Function_type but got " ^ (show_typ t)))*)
(**
* Figure out the typ of type variables using namespace, typ and expression list
* Used for Function_type
*)
let resolve_type_variable ns t exprs : typ =
Log.debug "resolve_type_variable %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
Hashtbl.clear t_vars_tbl;
let populate_type_variables = fun arg_t expr ->
match get_type_variable arg_t with
| Some t_var_name -> begin
let t = typ_of_expression ns expr in
Log.debug "resolve_type_variable t = %s" (show_typ t);
Hashtbl.add t_vars_tbl t_var_name t
end
| None -> ()
in
List.iter2 populate_type_variables arguments exprs;
replace_type_variables t
| _ -> raise (Type_error "resolve_type_variable: No Function_type")
(**
* Replace Infer_me and type variables inside expr using bindings in namespace ns
*)
let rec infer_expression ns expr : expression =
Log.debug "%s %s" "infer_expression" (show_expression expr);
match expr with
(* This is allowed to enable infering aliasing, like $b = $a *)
| Variable id -> Variable id
| Function_call (Infer_me, name, params) -> begin
let inf = fun e -> infer_expression ns e in
let params = List.map inf params in
match Namespace.find_function ns name with
| Some (Function_type {return_type; arguments} as fun_t) ->
if typ_contains_type_variable fun_t then begin
let resolved_fun_t = resolve_type_variable ns fun_t params in
Log.debug "resolved_fun_t = %s" (show_typ resolved_fun_t);
Function_call (resolved_fun_t, name, params)
end else
Function_call (fun_t, name, params)
(* TODO: Type variable here *)
(*Function_call (Function_type {return_type; arguments}, name, params)*)
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("infer_expression: found no function declared with name " ^ name)
end
| Method_call {return_type = Infer_me; method_name; left_hand = Variable object_name; args} as e -> begin
let t = typ_of_expression ns e in
Method_call {return_type = t; method_name; left_hand = Variable object_name; args}
end
| Object_access (leftside_expr, expr) -> Object_access (infer_expression ns leftside_expr, infer_expression ns expr)
| Array_init (Infer_me, _, exprs) as e ->
let length = List.length exprs in
let inf = fun e -> typ_of_expression ns e in
let exprs_types = List.map inf exprs in
let t = typ_of_expression ns e in
(* TODO: Why is this needed? *)
let tt = match t with Fixed_array (t, _) -> t in
Function_call (
Function_type {
return_type = t;
arguments = Constant :: Int :: exprs_types;
},
"array_make",
typ_to_constant tt :: Num length :: exprs
)
| Array_access (id, expr) as e ->
let t = typ_of_expression ns e in
Function_call (
Function_type {
return_type = t;
arguments = Constant :: Dynamic_array t :: Int :: [];
},
"array_get",
typ_to_constant t :: Variable id :: expr :: [];
)
(* TODO: Memory context *)
(* TODO: /** @mem moo */ *)
| New (alloc_strat, Class_type (class_name, Infer_allocation_strategy), args) -> New (alloc_strat, Class_type (class_name, Boehm), args)
| e -> e
(*| e -> failwith ("infer_expression " ^ show_expression expr)*)
let infer_expressions ns exprs =
let inf = fun e -> infer_expression ns e in
List.map inf exprs
(**
* Parse format string from printf and return a list of types
*)
let infer_printf (s : string) : Ast.typ list =
Log.debug "infer_printf";
let s = Str.global_replace (Str.regexp "%%") "" s in
let regexp = Str.regexp "%[sdf]" in
let rec get_all_matches i = match Str.search_forward regexp s i with
| i ->
let m = Str.matched_string s in
(match m with
| "%s" -> String_literal
| "%d" -> Int
| "%f" -> Float
) :: get_all_matches (i + 1)
| exception Not_found -> []
in
get_all_matches 0
(**
* Returns string list of all type variables in t
*)
let rec find_all_type_variables t : string list = match t with
| Function_type { return_type ; arguments } - >
find_all_type_variables return_type @ List.map ( fun x - > find_all_type_variables x ) arguments
| Type_variable tv - > [ tv ]
| _ - > [ ]
let rec find_all_type_variables t : string list = match t with
| Function_type {return_type; arguments} ->
find_all_type_variables return_type @ List.map (fun x -> find_all_type_variables x) arguments
| Type_variable tv -> [tv]
| _ -> []
*)
let find_docblock (l : docblock_comment list) (id : string) : docblock_comment option =
List.find_opt (fun docblock_comment -> match docblock_comment with
| DocParam (id_, _) -> id = id_
| _ -> false
) l
(**
* docblock takes precedence, because it's more precise, unless there's a conflict
*)
let unify_params_with_docblock (params : param list) (comments : docblock_comment list) : param list =
(* Are all params represented in the docblock? *)
let map = (fun p -> match p with
| RefParam (id, Fixed_array (t, size_option)) ->
begin match find_docblock comments id with
| Some (DocParam (_, Dynamic_array (t_))) -> RefParam (id, Dynamic_array (infer_arg_typ t_ Polymorph))
| None -> p
end
| RefParam (id, t) -> RefParam (id, infer_arg_typ t Polymorph)
| Param (id, t) -> Param (id, infer_arg_typ t Polymorph)
) in
List.map map params
* Infer typ inside Param / RefParam
let infer_arg_typ_param p : param =
Log.debug "infer_arg_typ_param %s" (show_param p);
match p with
| Param (id, t) ->
let new_t = infer_arg_typ t Polymorph in
Param (id, new_t)
| RefParam (id, t) ->
let new_t = infer_arg_typ t Polymorph in
RefParam (id, new_t)
(**
* Infer types inside Ast.statement
*)
let rec infer_stmt (s : statement) (ns : Namespace.t) : statement =
Log.debug "infer_stmt: %s" (show_statement s);
match s with
| Assignment (Infer_me, Variable id, expr) ->
Log.debug "infer_stmt: assignment to id %s" id;
let t = typ_of_expression ns expr in
let expr = infer_expression ns expr in
let t = replace_type_variables t in
let t = infer_arg_typ t Boehm in
Log.debug "id %s typ = %s" id (show_typ t);
Namespace.add_identifier ns id t;
Assignment (t, Variable id, expr)
TODO : this with lvalue
(* TODO: variable_name is expression? *)
| Assignment (Infer_me, Object_access (variable_name, Property_access prop_name), expr) ->
let t = typ_of_expression ns expr in
(* Check if class def has property with name and type *)
let class_name = match Namespace.find_identifier ns variable_name with
| Some (Class_type (s, alloc_strat)) -> s
| None -> failwith ("infer_stmt: Could not find identifier " ^ variable_name)
in
let (k, props, methods) = match Namespace.find_class ns class_name with
| Some v -> v
| None -> failwith ("infer_stmt: Could not find class type " ^ class_name)
in
let prop_type = match List.find_opt (fun (prop_name2, p) -> prop_name = prop_name2) props with
| Some (name, t) -> t
| None -> failwith ("infer_stmt: Found no class property with name " ^ prop_name)
in
if not (prop_type = t) then raise (Type_error
(
sprintf
"Right-hand expression type %s is not the same as the defined property type %s : %s"
(show_typ t)
prop_name
(show_typ prop_type)
)
);
(* TODO: variable_name is expression? *)
Assignment (typ_of_expression ns expr, Object_access (variable_name, Property_access prop_name), infer_expression ns expr)
(* printf is hard-coded *)
(* Head of expressions is always a format string to printf *)
| Function_call (Infer_me, "printf", String s :: xs) ->
Log.debug "infer_stmt: printf";
let expected_types = infer_printf s in
(* Convert %d to %ld etc for long *)
let adapted_s = Str.global_replace (Str.regexp "%d") "%ld" s in
let exprs : expression list = Coerce (String_literal, String adapted_s) :: List.map2 (fun e t -> match e, t with
Match on xs and expected_types to check that it matches
| String s, String_literal -> Coerce (String_literal, e)
| e, t -> begin
match typ_of_expression ns e with
| String -> Coerce (String_literal, infer_expression ns e)
| expr_typ when expr_typ <> t -> raise (
Type_error (
sprintf
"infer_stmt: Wrong argument given to printf: Got %s but expected %s (expression = %s?)"
(show_typ expr_typ)
(show_typ t)
(show_expression e)
)
)
| _ -> infer_expression ns e
end
) xs expected_types in
Function_call (Function_type {return_type = Void; arguments = String_literal :: expected_types}, "printf", exprs)
| Function_call (Infer_me, "printf", _ :: xs) ->
failwith "infer_stmt: printf must have a string literal as first argument"
| Function_call (Infer_me, id, e) ->
let t = match Namespace.find_function ns id with
| Some (Function_type {return_type; arguments} as t) -> t
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("found no function declared with name " ^ id)
in
Function_call (t, id, infer_expressions ns e)
| Foreach {arr (* Array expression *) ; key; value = Variable value_name; body = stmts} as e -> begin
let t = typ_of_expression ns arr in
begin match t with
| Fixed_array _
| Dynamic_array _ -> ()
| _ -> raise (Type_error ("Array given to foreach does not have an array type, but instead " ^ show_typ t))
end;
let array_internal_type = match t with
| Fixed_array (t, _) -> t
| Dynamic_array t -> t
in
NB : Since PHP lack block scope , we do n't have to clone the namespace or remove variable after
Namespace.add_identifier ns value_name array_internal_type;
begin match key with Some (Variable s) -> Namespace.add_identifier ns s Int | _ -> () end;
let f = fun s -> infer_stmt s ns in
let value_typ = typ_of_expression ns (Variable value_name) in
Foreach {arr; key; value = Variable value_name; value_typ; value_typ_constant = typ_to_constant value_typ; body = List.map f stmts}
end
| Dowhile {condition; body;} ->
let inf = fun s -> infer_stmt s ns in
let new_body = List.map inf body in
Dowhile {condition = infer_expression ns condition; body = new_body;}
(** TODO: This is not so good *)
| s -> s
let rec kind_of_typ ns t : kind = match t with
| Int | Float | Void -> Val
| String -> Ref
| Class_type (s, alloc_strat) -> begin
match Namespace.find_class ns s with
| Some (Infer_kind, props, methods) -> infer_kind ns Infer_kind props
| Some (k, _, _) -> k
| None -> failwith ("kind_of_typ: Cannot find class " ^ s)
end
| t -> failwith ("kind_of_typ: " ^ show_typ t)
(**
* @param namespace
* @param kind
* @param prop list
* @return kind
*)
and infer_kind ns k (props : (string * typ) list) : kind =
let all_props_are_val props =
let l = List.filter (fun (_, t) -> kind_of_typ ns t = Val) props in
List.length l = List.length props
in
match k with
| Infer_kind -> begin
if all_props_are_val props then Val else Ref
end
| k -> k
(** Check if return type is correct, in relation to declared function type *)
let check_return_type ns stmt typ =
match stmt with
| Return exp ->
Log.debug "%s %s" "check_return_type" (show_statement stmt);
let return_type = typ_of_expression ns exp in
if (kind_of_typ ns return_type) = Ref then raise (Type_error "A function cannot return a Ref kind");
if compare_typ typ return_type = 0 then
()
else
failwith (sprintf "Return type %s is not expected type %s" (show_typ return_type) (show_typ typ))
| _ -> ()
(* TODO: If, foreach, etc *)
(**
* Infer and resolve conflicts between docblock, params and function type.
*)
let unify_params_with_function_type params (Function_type {return_type; arguments}) =
Log.debug "unify_params_with_function_type";
let map = (fun param arg ->
let param = infer_arg_typ_param param in
Log.debug "unify_params_with_function_type inferred param = %s" (show_param param);
let arg = infer_arg_typ arg Polymorph in
Log.debug "unify_params_with_function_type inferred arg = %s" (show_typ arg);
match param, arg with
Dynamic_array from docblock always wins over non - yet inferred Fixed_array
| RefParam (id, Dynamic_array t), Fixed_array (Infer_me, None) -> begin
Log.debug "unify_params_with_function_type Picking dynamic_array with typ %s" (show_typ t);
Dynamic_array (t)
end
| _, Fixed_array (Infer_me, _) -> arg
| _, _ -> arg
) in
Function_type {
return_type;
arguments = List.map2 map params arguments;
}
(**
* Replace Infer_allocation_strategy inside docblock.
*)
let infer_docblock d : docblock_comment =
match d with
| DocParam (id, t) -> DocParam (id, infer_arg_typ t Polymorph)
(**
* Replace Infer_me inside statements in method.
*)
let infer_method (c_orig : Ast.declaration) meth ns : function_def = match meth with
| {
name;
docblock;
params;
stmts;
function_type = Function_type {return_type; arguments};
} ->
let class_name = match c_orig with Class {name;} -> name in
let params : Ast.param list = unify_params_with_docblock params docblock in
let ftyp =
unify_params_with_function_type
params
(Function_type {return_type; arguments})
in
let ns = Namespace.reset_identifiers ns in
(* Add method args to namespace *)
List.iter (fun p -> match p with
| Param (id, typ)
| RefParam (id, typ) -> Namespace.add_identifier ns id typ
) params;
(* TODO: Does alloc strat matter here? *)
Namespace.add_identifier ns "this" (Class_type (class_name, Boehm));
let inf = fun s -> infer_stmt s ns in
let new_stmts = List.map inf stmts in
{name; docblock; params; stmts = new_stmts; function_type = ftyp}
let infer_declaration decl ns : declaration =
Log.debug "infer_declaration %s" (show_declaration decl);
match decl with
| Function of function_name * param list * statement list * typ
| Struct of struct_name * list
| Function of function_name * param list * statement list * typ
| Struct of struct_name * struct_field list
*)
| Function {
name;
docblock;
params;
stmts;
function_type = Function_type {return_type; arguments};
} ->
if (kind_of_typ ns return_type) = Ref then raise (Type_error "A function cannot have a Ref kind as return type");
let docblock = List.map infer_docblock docblock in
let params = unify_params_with_docblock params docblock in
let ftyp =
unify_params_with_function_type
params
(Function_type {return_type; arguments})
in
Log.debug "infer_declaration: ftyp = %s" (show_typ ftyp);
Namespace.add_function_type ns name ftyp;
let ns = Namespace.reset_identifiers ns in
Namespace.add_params ns params;
let inf = fun s -> infer_stmt s ns in
let new_stmts = List.map inf stmts in
let _ = List.map (fun s -> check_return_type ns s return_type) new_stmts in
Function {name; docblock; params; stmts = new_stmts; function_type = ftyp}
| Function {function_type = ftyp} -> failwith ("infer_declaration function typ " ^ show_typ ftyp)
| Class {name; kind; properties = props; methods} as c_orig when kind = Infer_kind ->
(* Temporary class type during inference *)
Namespace.add_class_type ns c_orig;
let k = infer_kind ns Infer_kind props in
let methods = List.map (fun m -> infer_method c_orig m ns) methods in
let c = Class {name; kind = k; properties = props; methods} in
Namespace.remove_class_type ns c;
Namespace.add_class_type ns c;
c
| Class {name; kind; properties; methods} -> failwith ("infer_declaration: Class with kind " ^ show_kind kind ^ " " ^ name)
let run (ns : Namespace.t) (p : program): program =
Log.debug "Infer.run";
match p with
| Declaration_list decls -> Declaration_list (List.map (fun d -> infer_declaration d ns) decls)
| null | https://raw.githubusercontent.com/olleharstedt/pholyglot/644dc134d98091bcb7e2946cd98d9f4775262afd/src/lib/Infer.ml | ocaml |
* Module to infer types of local variables
* Both inferring types of expression, but also iterating the ast to replace Infer_me with proper types.
*
* Global variable
* Should only be used by Function_call expression to replace type variables in Function_type
* TODO: Would this work when wrapping multiple generic functions in one call?
TODO: Access chain like $a->b->c
TODO: id is expression?
TODO: Should be able to update this to Dynamic_array
TODO: Tuple here
$point[0]-> ?
$point->getX()
| Object_access (Array_access (class_name, _), Method_call {return_type = Infer_me; method_name})
TODO: Will this work with chained calls, like $obj->foo()->moo()?
Takes a typ and a type variable hashtable and replaces type variables in typ
| t -> raise (Type_error ("replace_type_variables: Can only replace type variables in Function_type but got " ^ (show_typ t)))
*
* Figure out the typ of type variables using namespace, typ and expression list
* Used for Function_type
*
* Replace Infer_me and type variables inside expr using bindings in namespace ns
This is allowed to enable infering aliasing, like $b = $a
TODO: Type variable here
Function_call (Function_type {return_type; arguments}, name, params)
TODO: Why is this needed?
TODO: Memory context
TODO: /** @mem moo */
| e -> failwith ("infer_expression " ^ show_expression expr)
*
* Parse format string from printf and return a list of types
*
* Returns string list of all type variables in t
*
* docblock takes precedence, because it's more precise, unless there's a conflict
Are all params represented in the docblock?
*
* Infer types inside Ast.statement
TODO: variable_name is expression?
Check if class def has property with name and type
TODO: variable_name is expression?
printf is hard-coded
Head of expressions is always a format string to printf
Convert %d to %ld etc for long
Array expression
* TODO: This is not so good
*
* @param namespace
* @param kind
* @param prop list
* @return kind
* Check if return type is correct, in relation to declared function type
TODO: If, foreach, etc
*
* Infer and resolve conflicts between docblock, params and function type.
*
* Replace Infer_allocation_strategy inside docblock.
*
* Replace Infer_me inside statements in method.
Add method args to namespace
TODO: Does alloc strat matter here?
Temporary class type during inference |
open Printf
open Ast
module Log = Dolog.Log
exception Type_error of string
let t_vars_tbl : (string, typ) Hashtbl.t = Hashtbl.create 10
let rec typ_of_lvalue ns lv : typ =
Log.debug "%s %s" "typ_of_lvalue" (show_lvalue lv);
match lv with
| Variable id ->
begin match Namespace.find_identifier ns id with
| Some typ -> typ
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find function type %s in namespace" id))
end
| Object_access (id, Property_access prop_name) ->
let class_type_name = match Namespace.find_identifier ns id with
| Some (Class_type (c, a)) -> c
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find class type %s in namespace" id))
in
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_lvalue: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun (name, t) -> prop_name = name) props with
| Some (n, t) -> t
| None -> raise (Type_error (sprintf "typ_of_lvalue: Could not find propert with name %s in class %s" prop_name id))
let rec typ_of_expression (ns : Namespace.t) (expr : expression) : typ =
Log.debug "%s %s" "typ_of_expression" (show_expression expr);
match expr with
| Num _ -> Int
| Num_float _ -> Float
| String s -> String
| Plus (e, f)
| Minus (e, f)
| Times (e, f)
| Div (e, f) ->
let e_typ = typ_of_expression ns e in
let f_typ = typ_of_expression ns f in
if e_typ <> f_typ then
raise (Type_error "typ_of_expression: Mixing float and int in arith expression")
else
e_typ
| Concat (e, f) ->
let check e =
match typ_of_expression ns e with
| String -> ()
| _ -> raise (Type_error "typ_of_expression: Found non-string in concat")
in
check e;
check f;
String
| Parenth e -> typ_of_expression ns e
| Array_init (Infer_me, length, exprs) ->
if List.length exprs = 0 then raise (Type_error "array_init cannot be empty list");
let first_elem = List.nth exprs 0 in
if List.for_all (fun x -> typ_of_expression ns x = typ_of_expression ns first_elem) exprs then
Fixed_array (typ_of_expression ns first_elem, Some (List.length exprs))
else
raise (Type_error "not all element in array_init have the same type")
| Array_init (t, _, _) -> t
| New (alloc_strat, t, exprs) -> t
| Object_access (Array_access (id, _), Property_access prop_name)
$ point->x
| Object_access (Variable id, Property_access prop_name) -> begin
match Namespace.find_identifier ns id with
| Some (Fixed_array (Class_type (class_type_name, _), _))
| Some (Class_type (class_type_name, _)) -> begin
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_expression: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun (name, t) -> prop_name = name) props with
| Some (n, t) -> t
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find propert with name %s in class %s" prop_name id))
end
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find class type %s in namespace" id))
end
| Method_call {return_type = Infer_me; method_name; left_hand = Variable class_name}
| Object_access (Variable class_name, Method_call {return_type = Infer_me; method_name}) -> begin
let class_type_name = match Namespace.find_identifier ns class_name with
| Some (Class_type (c, a)) -> c
| None -> begin
raise (Type_error (sprintf "typ_of_expression method call: Could not find identifier %s in namespace" class_name))
end
in
let (k, props, methods) = match Namespace.find_class ns class_type_name with
| Some class_decl -> class_decl
| None -> raise (Type_error (sprintf "typ_of_expression: Found no class declarion %s in namespace" class_type_name))
in
match List.find_opt (fun {name} -> method_name = name) methods with
| Some {
function_type = Function_type {return_type; arguments}
}
-> return_type
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find method with name %s in class %s" method_name class_type_name))
end
| Object_access (class_id, Method_call {return_type}) -> return_type
| Variable id -> begin
match Namespace.find_identifier ns id with
| Some p -> p
| None -> raise (Type_error (sprintf "typ_of_expression: Could not find variable with name %s" id))
end
| Function_call (_, id, _) -> begin
match Namespace.find_function ns id with
| Some (Function_type {return_type; arguments}) -> return_type
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("found no function declared with name " ^ id)
end
| Array_access (id, expr) -> begin
Log.debug "%s %s" "Array_access " id;
match Namespace.find_identifier ns id with
| Some (Fixed_array (t, length)) -> t
| Some (Dynamic_array t) -> t
| _ -> raise (Type_error (sprintf "typ_of_expression: Found no array with id %s in namespace, or could not infer type" id))
end
| e -> failwith ("typ_of_expression: " ^ (show_expression e))
*
* Params always have Polymorph alloc strategy for now .
* Params always have Polymorph alloc strategy for now.
*)
and infer_arg_typ t def =
Log.debug "infer_arg_typ %s" (show_typ t);
match t with
| Class_type (s, alloc_strat) -> begin
Log.debug "infer_arg_typ Found Class_type";
Class_type (s, def)
end
| Fixed_array (t, n) -> Fixed_array (infer_arg_typ t def, n)
| Dynamic_array t -> Dynamic_array (infer_arg_typ t def)
| t -> t
let typ_to_constant (t : Ast.typ) : Ast.expression = match t with
| Int -> Constant "int"
| Float -> Constant "float"
| String -> Constant "string"
| Class_type (s, _) -> Constant s
| _ -> raise (Type_error ("typ_to_constant: Not supported type: " ^ show_typ t))
let rec typ_contains_type_variable (t : typ): bool =
Log.debug "typ_contains_type_variable %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
typ_contains_type_variable return_type || List.exists (fun t -> typ_contains_type_variable t) arguments
| Type_variable _ -> true
| Dynamic_array t -> typ_contains_type_variable t
| Fixed_array (t, _) -> typ_contains_type_variable t
| _ -> false
let rec get_type_variable (t : typ): string option = match t with
| Function_type {return_type; arguments} -> failwith "get_type_variable: not supported: Function_type"
| Type_variable s -> Some s
| Dynamic_array t -> get_type_variable t
| Fixed_array (t, _) -> get_type_variable t
| _ -> None
let rec replace_type_variables t : typ =
Log.debug "replace_type_variables %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
Function_type {
return_type = replace_type_variables return_type;
arguments = List.map (fun a -> replace_type_variables a) arguments;
}
| Type_variable s -> begin
match Hashtbl.find_opt t_vars_tbl s with
| Some t -> t
| None -> raise (Type_error ("Found no resolved type variable with name " ^ s))
end
| Dynamic_array t -> replace_type_variables t
| t -> t
let resolve_type_variable ns t exprs : typ =
Log.debug "resolve_type_variable %s" (show_typ t);
match t with
| Function_type {return_type; arguments} ->
Hashtbl.clear t_vars_tbl;
let populate_type_variables = fun arg_t expr ->
match get_type_variable arg_t with
| Some t_var_name -> begin
let t = typ_of_expression ns expr in
Log.debug "resolve_type_variable t = %s" (show_typ t);
Hashtbl.add t_vars_tbl t_var_name t
end
| None -> ()
in
List.iter2 populate_type_variables arguments exprs;
replace_type_variables t
| _ -> raise (Type_error "resolve_type_variable: No Function_type")
let rec infer_expression ns expr : expression =
Log.debug "%s %s" "infer_expression" (show_expression expr);
match expr with
| Variable id -> Variable id
| Function_call (Infer_me, name, params) -> begin
let inf = fun e -> infer_expression ns e in
let params = List.map inf params in
match Namespace.find_function ns name with
| Some (Function_type {return_type; arguments} as fun_t) ->
if typ_contains_type_variable fun_t then begin
let resolved_fun_t = resolve_type_variable ns fun_t params in
Log.debug "resolved_fun_t = %s" (show_typ resolved_fun_t);
Function_call (resolved_fun_t, name, params)
end else
Function_call (fun_t, name, params)
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("infer_expression: found no function declared with name " ^ name)
end
| Method_call {return_type = Infer_me; method_name; left_hand = Variable object_name; args} as e -> begin
let t = typ_of_expression ns e in
Method_call {return_type = t; method_name; left_hand = Variable object_name; args}
end
| Object_access (leftside_expr, expr) -> Object_access (infer_expression ns leftside_expr, infer_expression ns expr)
| Array_init (Infer_me, _, exprs) as e ->
let length = List.length exprs in
let inf = fun e -> typ_of_expression ns e in
let exprs_types = List.map inf exprs in
let t = typ_of_expression ns e in
let tt = match t with Fixed_array (t, _) -> t in
Function_call (
Function_type {
return_type = t;
arguments = Constant :: Int :: exprs_types;
},
"array_make",
typ_to_constant tt :: Num length :: exprs
)
| Array_access (id, expr) as e ->
let t = typ_of_expression ns e in
Function_call (
Function_type {
return_type = t;
arguments = Constant :: Dynamic_array t :: Int :: [];
},
"array_get",
typ_to_constant t :: Variable id :: expr :: [];
)
| New (alloc_strat, Class_type (class_name, Infer_allocation_strategy), args) -> New (alloc_strat, Class_type (class_name, Boehm), args)
| e -> e
let infer_expressions ns exprs =
let inf = fun e -> infer_expression ns e in
List.map inf exprs
let infer_printf (s : string) : Ast.typ list =
Log.debug "infer_printf";
let s = Str.global_replace (Str.regexp "%%") "" s in
let regexp = Str.regexp "%[sdf]" in
let rec get_all_matches i = match Str.search_forward regexp s i with
| i ->
let m = Str.matched_string s in
(match m with
| "%s" -> String_literal
| "%d" -> Int
| "%f" -> Float
) :: get_all_matches (i + 1)
| exception Not_found -> []
in
get_all_matches 0
let rec find_all_type_variables t : string list = match t with
| Function_type { return_type ; arguments } - >
find_all_type_variables return_type @ List.map ( fun x - > find_all_type_variables x ) arguments
| Type_variable tv - > [ tv ]
| _ - > [ ]
let rec find_all_type_variables t : string list = match t with
| Function_type {return_type; arguments} ->
find_all_type_variables return_type @ List.map (fun x -> find_all_type_variables x) arguments
| Type_variable tv -> [tv]
| _ -> []
*)
let find_docblock (l : docblock_comment list) (id : string) : docblock_comment option =
List.find_opt (fun docblock_comment -> match docblock_comment with
| DocParam (id_, _) -> id = id_
| _ -> false
) l
let unify_params_with_docblock (params : param list) (comments : docblock_comment list) : param list =
let map = (fun p -> match p with
| RefParam (id, Fixed_array (t, size_option)) ->
begin match find_docblock comments id with
| Some (DocParam (_, Dynamic_array (t_))) -> RefParam (id, Dynamic_array (infer_arg_typ t_ Polymorph))
| None -> p
end
| RefParam (id, t) -> RefParam (id, infer_arg_typ t Polymorph)
| Param (id, t) -> Param (id, infer_arg_typ t Polymorph)
) in
List.map map params
* Infer typ inside Param / RefParam
let infer_arg_typ_param p : param =
Log.debug "infer_arg_typ_param %s" (show_param p);
match p with
| Param (id, t) ->
let new_t = infer_arg_typ t Polymorph in
Param (id, new_t)
| RefParam (id, t) ->
let new_t = infer_arg_typ t Polymorph in
RefParam (id, new_t)
let rec infer_stmt (s : statement) (ns : Namespace.t) : statement =
Log.debug "infer_stmt: %s" (show_statement s);
match s with
| Assignment (Infer_me, Variable id, expr) ->
Log.debug "infer_stmt: assignment to id %s" id;
let t = typ_of_expression ns expr in
let expr = infer_expression ns expr in
let t = replace_type_variables t in
let t = infer_arg_typ t Boehm in
Log.debug "id %s typ = %s" id (show_typ t);
Namespace.add_identifier ns id t;
Assignment (t, Variable id, expr)
TODO : this with lvalue
| Assignment (Infer_me, Object_access (variable_name, Property_access prop_name), expr) ->
let t = typ_of_expression ns expr in
let class_name = match Namespace.find_identifier ns variable_name with
| Some (Class_type (s, alloc_strat)) -> s
| None -> failwith ("infer_stmt: Could not find identifier " ^ variable_name)
in
let (k, props, methods) = match Namespace.find_class ns class_name with
| Some v -> v
| None -> failwith ("infer_stmt: Could not find class type " ^ class_name)
in
let prop_type = match List.find_opt (fun (prop_name2, p) -> prop_name = prop_name2) props with
| Some (name, t) -> t
| None -> failwith ("infer_stmt: Found no class property with name " ^ prop_name)
in
if not (prop_type = t) then raise (Type_error
(
sprintf
"Right-hand expression type %s is not the same as the defined property type %s : %s"
(show_typ t)
prop_name
(show_typ prop_type)
)
);
Assignment (typ_of_expression ns expr, Object_access (variable_name, Property_access prop_name), infer_expression ns expr)
| Function_call (Infer_me, "printf", String s :: xs) ->
Log.debug "infer_stmt: printf";
let expected_types = infer_printf s in
let adapted_s = Str.global_replace (Str.regexp "%d") "%ld" s in
let exprs : expression list = Coerce (String_literal, String adapted_s) :: List.map2 (fun e t -> match e, t with
Match on xs and expected_types to check that it matches
| String s, String_literal -> Coerce (String_literal, e)
| e, t -> begin
match typ_of_expression ns e with
| String -> Coerce (String_literal, infer_expression ns e)
| expr_typ when expr_typ <> t -> raise (
Type_error (
sprintf
"infer_stmt: Wrong argument given to printf: Got %s but expected %s (expression = %s?)"
(show_typ expr_typ)
(show_typ t)
(show_expression e)
)
)
| _ -> infer_expression ns e
end
) xs expected_types in
Function_call (Function_type {return_type = Void; arguments = String_literal :: expected_types}, "printf", exprs)
| Function_call (Infer_me, "printf", _ :: xs) ->
failwith "infer_stmt: printf must have a string literal as first argument"
| Function_call (Infer_me, id, e) ->
let t = match Namespace.find_function ns id with
| Some (Function_type {return_type; arguments} as t) -> t
| Some t -> failwith ("not a function: " ^ show_typ t)
| _ -> failwith ("found no function declared with name " ^ id)
in
Function_call (t, id, infer_expressions ns e)
let t = typ_of_expression ns arr in
begin match t with
| Fixed_array _
| Dynamic_array _ -> ()
| _ -> raise (Type_error ("Array given to foreach does not have an array type, but instead " ^ show_typ t))
end;
let array_internal_type = match t with
| Fixed_array (t, _) -> t
| Dynamic_array t -> t
in
NB : Since PHP lack block scope , we do n't have to clone the namespace or remove variable after
Namespace.add_identifier ns value_name array_internal_type;
begin match key with Some (Variable s) -> Namespace.add_identifier ns s Int | _ -> () end;
let f = fun s -> infer_stmt s ns in
let value_typ = typ_of_expression ns (Variable value_name) in
Foreach {arr; key; value = Variable value_name; value_typ; value_typ_constant = typ_to_constant value_typ; body = List.map f stmts}
end
| Dowhile {condition; body;} ->
let inf = fun s -> infer_stmt s ns in
let new_body = List.map inf body in
Dowhile {condition = infer_expression ns condition; body = new_body;}
| s -> s
let rec kind_of_typ ns t : kind = match t with
| Int | Float | Void -> Val
| String -> Ref
| Class_type (s, alloc_strat) -> begin
match Namespace.find_class ns s with
| Some (Infer_kind, props, methods) -> infer_kind ns Infer_kind props
| Some (k, _, _) -> k
| None -> failwith ("kind_of_typ: Cannot find class " ^ s)
end
| t -> failwith ("kind_of_typ: " ^ show_typ t)
and infer_kind ns k (props : (string * typ) list) : kind =
let all_props_are_val props =
let l = List.filter (fun (_, t) -> kind_of_typ ns t = Val) props in
List.length l = List.length props
in
match k with
| Infer_kind -> begin
if all_props_are_val props then Val else Ref
end
| k -> k
let check_return_type ns stmt typ =
match stmt with
| Return exp ->
Log.debug "%s %s" "check_return_type" (show_statement stmt);
let return_type = typ_of_expression ns exp in
if (kind_of_typ ns return_type) = Ref then raise (Type_error "A function cannot return a Ref kind");
if compare_typ typ return_type = 0 then
()
else
failwith (sprintf "Return type %s is not expected type %s" (show_typ return_type) (show_typ typ))
| _ -> ()
let unify_params_with_function_type params (Function_type {return_type; arguments}) =
Log.debug "unify_params_with_function_type";
let map = (fun param arg ->
let param = infer_arg_typ_param param in
Log.debug "unify_params_with_function_type inferred param = %s" (show_param param);
let arg = infer_arg_typ arg Polymorph in
Log.debug "unify_params_with_function_type inferred arg = %s" (show_typ arg);
match param, arg with
Dynamic_array from docblock always wins over non - yet inferred Fixed_array
| RefParam (id, Dynamic_array t), Fixed_array (Infer_me, None) -> begin
Log.debug "unify_params_with_function_type Picking dynamic_array with typ %s" (show_typ t);
Dynamic_array (t)
end
| _, Fixed_array (Infer_me, _) -> arg
| _, _ -> arg
) in
Function_type {
return_type;
arguments = List.map2 map params arguments;
}
let infer_docblock d : docblock_comment =
match d with
| DocParam (id, t) -> DocParam (id, infer_arg_typ t Polymorph)
let infer_method (c_orig : Ast.declaration) meth ns : function_def = match meth with
| {
name;
docblock;
params;
stmts;
function_type = Function_type {return_type; arguments};
} ->
let class_name = match c_orig with Class {name;} -> name in
let params : Ast.param list = unify_params_with_docblock params docblock in
let ftyp =
unify_params_with_function_type
params
(Function_type {return_type; arguments})
in
let ns = Namespace.reset_identifiers ns in
List.iter (fun p -> match p with
| Param (id, typ)
| RefParam (id, typ) -> Namespace.add_identifier ns id typ
) params;
Namespace.add_identifier ns "this" (Class_type (class_name, Boehm));
let inf = fun s -> infer_stmt s ns in
let new_stmts = List.map inf stmts in
{name; docblock; params; stmts = new_stmts; function_type = ftyp}
let infer_declaration decl ns : declaration =
Log.debug "infer_declaration %s" (show_declaration decl);
match decl with
| Function of function_name * param list * statement list * typ
| Struct of struct_name * list
| Function of function_name * param list * statement list * typ
| Struct of struct_name * struct_field list
*)
| Function {
name;
docblock;
params;
stmts;
function_type = Function_type {return_type; arguments};
} ->
if (kind_of_typ ns return_type) = Ref then raise (Type_error "A function cannot have a Ref kind as return type");
let docblock = List.map infer_docblock docblock in
let params = unify_params_with_docblock params docblock in
let ftyp =
unify_params_with_function_type
params
(Function_type {return_type; arguments})
in
Log.debug "infer_declaration: ftyp = %s" (show_typ ftyp);
Namespace.add_function_type ns name ftyp;
let ns = Namespace.reset_identifiers ns in
Namespace.add_params ns params;
let inf = fun s -> infer_stmt s ns in
let new_stmts = List.map inf stmts in
let _ = List.map (fun s -> check_return_type ns s return_type) new_stmts in
Function {name; docblock; params; stmts = new_stmts; function_type = ftyp}
| Function {function_type = ftyp} -> failwith ("infer_declaration function typ " ^ show_typ ftyp)
| Class {name; kind; properties = props; methods} as c_orig when kind = Infer_kind ->
Namespace.add_class_type ns c_orig;
let k = infer_kind ns Infer_kind props in
let methods = List.map (fun m -> infer_method c_orig m ns) methods in
let c = Class {name; kind = k; properties = props; methods} in
Namespace.remove_class_type ns c;
Namespace.add_class_type ns c;
c
| Class {name; kind; properties; methods} -> failwith ("infer_declaration: Class with kind " ^ show_kind kind ^ " " ^ name)
let run (ns : Namespace.t) (p : program): program =
Log.debug "Infer.run";
match p with
| Declaration_list decls -> Declaration_list (List.map (fun d -> infer_declaration d ns) decls)
|
490fc8a5a7b079061627fdadac2f7d5233a5c9f2585d0fc74b671de9d6b9c733 | janestreet/universe | managed_on_failure.ml | open Core
open Async
module T = struct
type 'worker functions = { fail : ('worker, unit, unit) Rpc_parallel.Function.t }
module Worker_state = struct
type init_arg = unit [@@deriving bin_io]
type t = unit
end
module Connection_state = struct
type init_arg = unit [@@deriving bin_io]
type t = unit
end
module Functions
(C : Rpc_parallel.Creator
with type worker_state := Worker_state.t
and type connection_state := Connection_state.t) =
struct
let fail =
C.create_one_way
~f:(fun ~worker_state:() ~conn_state:() () ->
Make sure this exception is raised asynchronously . I 'm not sure how to do
this in a non - racy way , but hopefully 0.01 seconds strikes the right balance
of not being racy but not introducing too much of a delay .
this in a non-racy way, but hopefully 0.01 seconds strikes the right balance
of not being racy but not introducing too much of a delay. *)
upon (after (sec 0.01)) (fun () -> failwith "asynchronous exception"))
~bin_input:Unit.bin_t
()
;;
let functions = { fail }
let init_worker_state () = Deferred.unit
let init_connection_state ~connection:_ ~worker_state:_ = return
end
end
include Rpc_parallel.Managed.Make [@alert "-legacy"] (T)
let uuid_re =
Re.Pcre.re "[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" |> Re.compile
;;
let uuid_replacement = Uuid.Stable.V1.for_testing |> Uuid.to_string
let error_to_string_masking_uuid error =
Re.replace_string uuid_re ~by:uuid_replacement (Error.to_string_hum error)
;;
let main () =
let errors = Transaction.Var.create [] in
let add_error ~tag error =
Transaction.Var.replace_now errors (fun errors -> Error.tag ~tag error :: errors)
in
let%bind worker =
spawn
~on_failure:(add_error ~tag:"on_failure")
~on_connection_to_worker_closed:(add_error ~tag:"on_connection_to_worker_closed")
~redirect_stdout:`Dev_null
~redirect_stderr:`Dev_null
()
()
>>| ok_exn
in
let%bind () = run_exn worker ~f:functions.fail ~arg:() in
match%bind
(let open Transaction.Let_syntax in
match%bind Transaction.Var.get errors with
| _ :: _ :: _ as errors -> return errors
| _ -> Transaction.retry ())
|> Transaction.run_with_timeout (Time_ns.Span.of_sec 10.)
with
| Result errors ->
let errors =
errors
|> List.map ~f:error_to_string_masking_uuid
|> List.sort ~compare:String.compare
in
print_s [%message (errors : string list)];
return ()
| Timeout () ->
print_s [%message "Timeout"];
return ()
;;
let () = Rpc_parallel.For_testing.initialize [%here]
let%expect_test "" =
let%bind () = main () in
[%expect
{|
(errors
("(on_connection_to_worker_closed \"Lost connection with worker\")"
"(on_failure\
\n (5a863fc1-67b7-3a0a-dc90-aca2995afbf9\
\n (monitor.ml.Error (Failure \"asynchronous exception\")\
\n (\"<backtrace elided in test>\"))))")) |}]
;;
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/rpc_parallel/expect_test/managed_on_failure.ml | ocaml | open Core
open Async
module T = struct
type 'worker functions = { fail : ('worker, unit, unit) Rpc_parallel.Function.t }
module Worker_state = struct
type init_arg = unit [@@deriving bin_io]
type t = unit
end
module Connection_state = struct
type init_arg = unit [@@deriving bin_io]
type t = unit
end
module Functions
(C : Rpc_parallel.Creator
with type worker_state := Worker_state.t
and type connection_state := Connection_state.t) =
struct
let fail =
C.create_one_way
~f:(fun ~worker_state:() ~conn_state:() () ->
Make sure this exception is raised asynchronously . I 'm not sure how to do
this in a non - racy way , but hopefully 0.01 seconds strikes the right balance
of not being racy but not introducing too much of a delay .
this in a non-racy way, but hopefully 0.01 seconds strikes the right balance
of not being racy but not introducing too much of a delay. *)
upon (after (sec 0.01)) (fun () -> failwith "asynchronous exception"))
~bin_input:Unit.bin_t
()
;;
let functions = { fail }
let init_worker_state () = Deferred.unit
let init_connection_state ~connection:_ ~worker_state:_ = return
end
end
include Rpc_parallel.Managed.Make [@alert "-legacy"] (T)
let uuid_re =
Re.Pcre.re "[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" |> Re.compile
;;
let uuid_replacement = Uuid.Stable.V1.for_testing |> Uuid.to_string
let error_to_string_masking_uuid error =
Re.replace_string uuid_re ~by:uuid_replacement (Error.to_string_hum error)
;;
let main () =
let errors = Transaction.Var.create [] in
let add_error ~tag error =
Transaction.Var.replace_now errors (fun errors -> Error.tag ~tag error :: errors)
in
let%bind worker =
spawn
~on_failure:(add_error ~tag:"on_failure")
~on_connection_to_worker_closed:(add_error ~tag:"on_connection_to_worker_closed")
~redirect_stdout:`Dev_null
~redirect_stderr:`Dev_null
()
()
>>| ok_exn
in
let%bind () = run_exn worker ~f:functions.fail ~arg:() in
match%bind
(let open Transaction.Let_syntax in
match%bind Transaction.Var.get errors with
| _ :: _ :: _ as errors -> return errors
| _ -> Transaction.retry ())
|> Transaction.run_with_timeout (Time_ns.Span.of_sec 10.)
with
| Result errors ->
let errors =
errors
|> List.map ~f:error_to_string_masking_uuid
|> List.sort ~compare:String.compare
in
print_s [%message (errors : string list)];
return ()
| Timeout () ->
print_s [%message "Timeout"];
return ()
;;
let () = Rpc_parallel.For_testing.initialize [%here]
let%expect_test "" =
let%bind () = main () in
[%expect
{|
(errors
("(on_connection_to_worker_closed \"Lost connection with worker\")"
"(on_failure\
\n (5a863fc1-67b7-3a0a-dc90-aca2995afbf9\
\n (monitor.ml.Error (Failure \"asynchronous exception\")\
\n (\"<backtrace elided in test>\"))))")) |}]
;;
|
|
3e8ff0d7569b9e5325f3f31fca27b35fa786ae6c31950e262fd622f7c79e72be | reach-sh/reach-lang | ETH_solc.hs | module Reach.Connector.ETH_solc
( compile_sol_
, compile_sol_extract
, CompiledSolRec(..)
) where
import Control.Monad.Reader
import Data.Aeson as Aeson
import Data.Aeson.Encode.Pretty
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as LB
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.String
import qualified Data.Text as T
import Reach.Util
import Reach.Warning
import System.Exit
import System.FilePath
import System.Process.ByteString
maxContractLen :: Int
maxContractLen = 24576
newtype CompiledSolRecs = CompiledSolRecs CompiledSolRecsM
type CompiledSolRecsM = M.Map T.Text CompiledSolRec
instance FromJSON CompiledSolRecs where
parseJSON = withObject "CompiledSolRecs" $ \o -> do
let ctcs' = kmToM o
CompiledSolRecs <$> mapM parseJSON ctcs'
data CompiledSolRec = CompiledSolRec
{ csrAbi :: T.Text
, csrCode :: T.Text
}
instance FromJSON CompiledSolRec where
parseJSON = withObject "CompiledSolRec" $ \ctc -> do
(abio :: Value) <- ctc .: "abi"
Why are we re - encoding ? ethers takes the ABI as a string , not an object .
let cfg = defConfig {confIndent = Spaces 0, confCompare = compare}
let csrAbi = T.pack $ LB.unpack $ encodePretty' cfg abio
ma <- ctc .:? "evm"
case ma of
Just a -> do
b <- a .: "bytecode"
csrCode <- b .: "object"
return $ CompiledSolRec {..}
Nothing -> do
csrCode <- ctc .: "bin"
return $ CompiledSolRec {..}
newtype SolOutputErrMsg = SolOutputErrMsg T.Text
instance FromJSON SolOutputErrMsg where
parseJSON = withObject "SolOutputErrMsg" $ \o -> do
x <- o .: "formattedMessage"
return $ SolOutputErrMsg x
data SolOutputCmd
= SolOutputCmd CompiledSolRecs
| SolOutputErr [SolOutputErrMsg]
instance FromJSON SolOutputCmd where
parseJSON = withObject "SolOutputCmd" $ \o -> do
xm <- o .:? "contracts"
case xm of
Just x -> return $ SolOutputCmd x
Nothing -> do
y <- o .: "errors"
return $ SolOutputErr y
data SolOutputFull
= SolOutputFull
{ sofContracts :: M.Map T.Text CompiledSolRecs
}
| SolOutputFail [SolOutputErrMsg]
instance FromJSON SolOutputFull where
parseJSON = withObject "SolOutputFull" $ \o -> do
xm <- o .:? "contracts"
case xm of
Just sofContracts -> return $ SolOutputFull {..}
Nothing -> do
y <- o .: "errors"
return $ SolOutputFail y
theKey :: T.Text
theKey = "theReachKey"
type E x = Either String x
compile_sol_parse :: Bool -> BS.ByteString -> E CompiledSolRecsM
compile_sol_parse isCmdLine stdout =
case isCmdLine of
True ->
case eitherDecodeStrict stdout of
Left m -> bad m
Right (SolOutputErr es) -> baddies es
Right (SolOutputCmd (CompiledSolRecs xs)) -> Right xs
False ->
case eitherDecodeStrict stdout of
Left m -> bad m
Right (SolOutputFail es) -> baddies es
Right (SolOutputFull cs) ->
case M.lookup theKey cs of
Nothing -> Left $ "The compilation key was missing"
Just (CompiledSolRecs xs) -> Right xs
where
bad m = Left $ "It produced invalid JSON output, which failed to decode with the message:\n" <> m
baddies es = Left $ "It failed to compile with the message:\n" <> concatMap f es
where
f (SolOutputErrMsg t) = T.unpack t
compile_sol_extract :: Bool -> String -> String -> BS.ByteString -> E CompiledSolRec
compile_sol_extract isCmdLine solf cn stdout = do
xs <- compile_sol_parse isCmdLine stdout
let k = s2t $ solf <> ":" <> cn
let ks = M.keys xs
let xs' = M.filterWithKey (\k' _ -> T.isSuffixOf k' k) xs
case M.toAscList xs' of
[ (_, x) ] -> Right x
_ -> Left $ "Expected contracts object to have unique key " <> show k <> " but had " <> show (M.keys xs') <> " from " <> show ks
array :: ToJSON a => [a] -> Value
array = toJSONList
tj :: ToJSON a => a -> Value
tj = toJSON
data OptimizationPolicy = OP
{ opEnabled :: Bool
, opRuns :: Int
, opInliner :: Bool
, opIR :: Bool
, opSpecialSeq :: Bool
}
policies :: [OptimizationPolicy]
policies =
[ OP { opRuns = 1, .. }
, OP { opInliner = False , opRuns = 1 , opSpecialSeq = True , .. }
--, OP { .. }
--, OP { opInliner = False, .. }
, OP { opInliner = False , opRuns = 1 , .. }
, OP { opIR = False, .. }
, OP { opEnabled = False, .. }
]
where
opIR = True
opEnabled = True
opRuns = 1
opInliner = True
opSpecialSeq = False
try_compile_sol :: FilePath -> String -> OptimizationPolicy -> IO (E CompiledSolRec)
try_compile_sol solf cn (OP {..}) = do
let theKey' = fromString $ T.unpack theKey
let msteps =
case opSpecialSeq of
False -> []
True -> [("optimizerSteps", tj $ concat
Copied from #L44
-- The names come from #L248
[ "dhfoDgvulfnTUtnIf" -- None of these can make stack problems worse
, "["
, "xa[r]EscLM" -- Turn into SSA and simplify
, "cCTUtTOntnfDIul" -- Perform structural simplification
, "Lcul" -- Simplify again
, "Vcul [j]" -- Reverse SSA
-- should have good "compilability" property here.
, "Tpeul" -- Run functional expression inliner
, "xa[rul]" -- Prune a bit more in SSA
, "xa[r]cL" -- Turn into SSA again and simplify
--, "gvif" -- Run full inliner
SSA plus simplify
, "]"
, "jmul[jul] VcTOcul jmul" -- Make source short and pretty
])]
let spec = object $
[ ("language", "Solidity")
, ("sources", object $
[ (theKey', object $
[ ("urls", array [ solf ])
])
])
, ("settings", object $
[ ("optimizer", object $
[ ("enabled", tj opEnabled)
, ("runs", tj opRuns)
, ("details", object $
[ ("peephole", tj True)
, ("inliner", tj opInliner)
, ("jumpdestRemover", tj True)
, ("orderLiterals", tj True)
, ("deduplicate", tj True)
, ("cse", tj True)
, ("constantOptimizer", tj True)
, ("yul", tj True)
, ("yulDetails", object $
[ ("stackAllocation", tj True)
] <> msteps)
])
])
, ("viaIR", tj opIR)
, ("debug", object $
[ ("revertStrings", "strip")
, ("debugInfo", array ([]::[String]))
])
, ("metadata", object $
[ ("bytecodeHash", "none")
])
, ("outputSelection", object $
[ ("*", object $
[ ("*", array $
([ "abi"
, "evm.bytecode.object"
] :: [String]))
])
])
])
]
let bp = takeDirectory solf
(ec, stdout, stderr) <-
liftIO $ readProcessWithExitCode "solc" [ "--allow-paths", bp, "--standard-json"] $
LB.toStrict $ encode spec
BS.writeFile (solf <> ".solc.json") stdout
let show_output =
case stdout == "" of
True -> stderr
False -> "STDOUT:\n" <> stdout <> "\nSTDERR:\n" <> stderr
case ec of
ExitFailure _ -> return $ Left $ bunpack show_output
ExitSuccess -> return $ compile_sol_extract False solf cn stdout
checkLen :: E CompiledSolRec -> E CompiledSolRec
checkLen = \case
Left x -> Left x
Right x@(CompiledSolRec {..}) ->
case len <= maxContractLen of
True -> Right x
False -> Left $ "The bytecode exceeds the maximum limit; it is " <> show len <> ", but the limit is " <> show maxContractLen
where
len :: Int = floor $ (((fromIntegral $ T.length csrCode) / 2) :: Double)
compile_sol_ :: FilePath -> String -> IO (E CompiledSolRec)
compile_sol_ solf cn = try Nothing policies
where
try merr = \case
[] -> return $ Left $ "The Solidity compiler failed with the message:\n" <> (fromMaybe (impossible "compile_sol_") merr)
opt : more -> do
case merr of
Nothing -> return ()
Just e -> emitWarning Nothing $ W_SolidityOptimizeFailure e
let f = case more of
[] -> id
_ -> checkLen
(f <$> try_compile_sol solf cn opt) >>= \case
Right x -> return $ Right x
Left bado -> try (Just bado) more
| null | https://raw.githubusercontent.com/reach-sh/reach-lang/50e090b1c4134c33c8e3082844c31df1a397dd48/hs/src/Reach/Connector/ETH_solc.hs | haskell | , OP { .. }
, OP { opInliner = False, .. }
The names come from #L248
None of these can make stack problems worse
Turn into SSA and simplify
Perform structural simplification
Simplify again
Reverse SSA
should have good "compilability" property here.
Run functional expression inliner
Prune a bit more in SSA
Turn into SSA again and simplify
, "gvif" -- Run full inliner
Make source short and pretty | module Reach.Connector.ETH_solc
( compile_sol_
, compile_sol_extract
, CompiledSolRec(..)
) where
import Control.Monad.Reader
import Data.Aeson as Aeson
import Data.Aeson.Encode.Pretty
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as LB
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.String
import qualified Data.Text as T
import Reach.Util
import Reach.Warning
import System.Exit
import System.FilePath
import System.Process.ByteString
maxContractLen :: Int
maxContractLen = 24576
newtype CompiledSolRecs = CompiledSolRecs CompiledSolRecsM
type CompiledSolRecsM = M.Map T.Text CompiledSolRec
instance FromJSON CompiledSolRecs where
parseJSON = withObject "CompiledSolRecs" $ \o -> do
let ctcs' = kmToM o
CompiledSolRecs <$> mapM parseJSON ctcs'
data CompiledSolRec = CompiledSolRec
{ csrAbi :: T.Text
, csrCode :: T.Text
}
instance FromJSON CompiledSolRec where
parseJSON = withObject "CompiledSolRec" $ \ctc -> do
(abio :: Value) <- ctc .: "abi"
Why are we re - encoding ? ethers takes the ABI as a string , not an object .
let cfg = defConfig {confIndent = Spaces 0, confCompare = compare}
let csrAbi = T.pack $ LB.unpack $ encodePretty' cfg abio
ma <- ctc .:? "evm"
case ma of
Just a -> do
b <- a .: "bytecode"
csrCode <- b .: "object"
return $ CompiledSolRec {..}
Nothing -> do
csrCode <- ctc .: "bin"
return $ CompiledSolRec {..}
newtype SolOutputErrMsg = SolOutputErrMsg T.Text
instance FromJSON SolOutputErrMsg where
parseJSON = withObject "SolOutputErrMsg" $ \o -> do
x <- o .: "formattedMessage"
return $ SolOutputErrMsg x
data SolOutputCmd
= SolOutputCmd CompiledSolRecs
| SolOutputErr [SolOutputErrMsg]
instance FromJSON SolOutputCmd where
parseJSON = withObject "SolOutputCmd" $ \o -> do
xm <- o .:? "contracts"
case xm of
Just x -> return $ SolOutputCmd x
Nothing -> do
y <- o .: "errors"
return $ SolOutputErr y
data SolOutputFull
= SolOutputFull
{ sofContracts :: M.Map T.Text CompiledSolRecs
}
| SolOutputFail [SolOutputErrMsg]
instance FromJSON SolOutputFull where
parseJSON = withObject "SolOutputFull" $ \o -> do
xm <- o .:? "contracts"
case xm of
Just sofContracts -> return $ SolOutputFull {..}
Nothing -> do
y <- o .: "errors"
return $ SolOutputFail y
theKey :: T.Text
theKey = "theReachKey"
type E x = Either String x
compile_sol_parse :: Bool -> BS.ByteString -> E CompiledSolRecsM
compile_sol_parse isCmdLine stdout =
case isCmdLine of
True ->
case eitherDecodeStrict stdout of
Left m -> bad m
Right (SolOutputErr es) -> baddies es
Right (SolOutputCmd (CompiledSolRecs xs)) -> Right xs
False ->
case eitherDecodeStrict stdout of
Left m -> bad m
Right (SolOutputFail es) -> baddies es
Right (SolOutputFull cs) ->
case M.lookup theKey cs of
Nothing -> Left $ "The compilation key was missing"
Just (CompiledSolRecs xs) -> Right xs
where
bad m = Left $ "It produced invalid JSON output, which failed to decode with the message:\n" <> m
baddies es = Left $ "It failed to compile with the message:\n" <> concatMap f es
where
f (SolOutputErrMsg t) = T.unpack t
compile_sol_extract :: Bool -> String -> String -> BS.ByteString -> E CompiledSolRec
compile_sol_extract isCmdLine solf cn stdout = do
xs <- compile_sol_parse isCmdLine stdout
let k = s2t $ solf <> ":" <> cn
let ks = M.keys xs
let xs' = M.filterWithKey (\k' _ -> T.isSuffixOf k' k) xs
case M.toAscList xs' of
[ (_, x) ] -> Right x
_ -> Left $ "Expected contracts object to have unique key " <> show k <> " but had " <> show (M.keys xs') <> " from " <> show ks
array :: ToJSON a => [a] -> Value
array = toJSONList
tj :: ToJSON a => a -> Value
tj = toJSON
data OptimizationPolicy = OP
{ opEnabled :: Bool
, opRuns :: Int
, opInliner :: Bool
, opIR :: Bool
, opSpecialSeq :: Bool
}
policies :: [OptimizationPolicy]
policies =
[ OP { opRuns = 1, .. }
, OP { opInliner = False , opRuns = 1 , opSpecialSeq = True , .. }
, OP { opInliner = False , opRuns = 1 , .. }
, OP { opIR = False, .. }
, OP { opEnabled = False, .. }
]
where
opIR = True
opEnabled = True
opRuns = 1
opInliner = True
opSpecialSeq = False
try_compile_sol :: FilePath -> String -> OptimizationPolicy -> IO (E CompiledSolRec)
try_compile_sol solf cn (OP {..}) = do
let theKey' = fromString $ T.unpack theKey
let msteps =
case opSpecialSeq of
False -> []
True -> [("optimizerSteps", tj $ concat
Copied from #L44
, "["
SSA plus simplify
, "]"
])]
let spec = object $
[ ("language", "Solidity")
, ("sources", object $
[ (theKey', object $
[ ("urls", array [ solf ])
])
])
, ("settings", object $
[ ("optimizer", object $
[ ("enabled", tj opEnabled)
, ("runs", tj opRuns)
, ("details", object $
[ ("peephole", tj True)
, ("inliner", tj opInliner)
, ("jumpdestRemover", tj True)
, ("orderLiterals", tj True)
, ("deduplicate", tj True)
, ("cse", tj True)
, ("constantOptimizer", tj True)
, ("yul", tj True)
, ("yulDetails", object $
[ ("stackAllocation", tj True)
] <> msteps)
])
])
, ("viaIR", tj opIR)
, ("debug", object $
[ ("revertStrings", "strip")
, ("debugInfo", array ([]::[String]))
])
, ("metadata", object $
[ ("bytecodeHash", "none")
])
, ("outputSelection", object $
[ ("*", object $
[ ("*", array $
([ "abi"
, "evm.bytecode.object"
] :: [String]))
])
])
])
]
let bp = takeDirectory solf
(ec, stdout, stderr) <-
liftIO $ readProcessWithExitCode "solc" [ "--allow-paths", bp, "--standard-json"] $
LB.toStrict $ encode spec
BS.writeFile (solf <> ".solc.json") stdout
let show_output =
case stdout == "" of
True -> stderr
False -> "STDOUT:\n" <> stdout <> "\nSTDERR:\n" <> stderr
case ec of
ExitFailure _ -> return $ Left $ bunpack show_output
ExitSuccess -> return $ compile_sol_extract False solf cn stdout
checkLen :: E CompiledSolRec -> E CompiledSolRec
checkLen = \case
Left x -> Left x
Right x@(CompiledSolRec {..}) ->
case len <= maxContractLen of
True -> Right x
False -> Left $ "The bytecode exceeds the maximum limit; it is " <> show len <> ", but the limit is " <> show maxContractLen
where
len :: Int = floor $ (((fromIntegral $ T.length csrCode) / 2) :: Double)
compile_sol_ :: FilePath -> String -> IO (E CompiledSolRec)
compile_sol_ solf cn = try Nothing policies
where
try merr = \case
[] -> return $ Left $ "The Solidity compiler failed with the message:\n" <> (fromMaybe (impossible "compile_sol_") merr)
opt : more -> do
case merr of
Nothing -> return ()
Just e -> emitWarning Nothing $ W_SolidityOptimizeFailure e
let f = case more of
[] -> id
_ -> checkLen
(f <$> try_compile_sol solf cn opt) >>= \case
Right x -> return $ Right x
Left bado -> try (Just bado) more
|
e8529ba3a573a636843c1d2eee077839bbb2c8036c0288fdcc494c051b49d488 | phochste/clj-marc | marc4j.clj | ( c ) 2010 < >
(ns ^{:doc "A wrapper around Marc4j for parsing MARC21/MARCXML documents" :author "Patrick Hochstenbach"}
clj-marc.marc4j
(:import (java.io FileInputStream))
(:import (org.marc4j MarcStreamReader MarcXmlReader MarcReader))
(:import (org.marc4j.marc Record Leader DataField ControlField))
(:use [clojure.contrib.duck-streams :only (reader)])
(:use [clj-marc.defs]))
(defn- marc4j-seq
[^MarcReader reader]
(when (.hasNext reader)
(cons (.next reader) (lazy-seq (marc4j-seq reader)))))
(defmulti #^{:private true} parse-field class)
(defmethod #^{:private true} parse-field Leader [x]
(let [subfields (list (list :_ (.marshal x)))]
(struct marc-record-field "LDR" " " " " subfields)))
(defmethod #^{:private true} parse-field ControlField [x]
(let [tag (.getTag x)
subfields (list (list :_ (.getData x)))]
(struct marc-record-field tag " " " " subfields)))
(defmethod #^{:private true} parse-field DataField [x]
(let [tag (.getTag x)
ind1 (str (.getIndicator1 x))
ind2 (str (.getIndicator2 x))
subfields (map #(vector (keyword (str (.getCode %))) (.getData %)) (.getSubfields x))]
(struct marc-record-field tag ind1 ind2 subfields)))
(defn- contenthandler
[^Record record]
(let [leader (.getLeader record)
controlfields (.getControlFields record)
datafields (.getDataFields record)]
(for [field (concat [leader] controlfields datafields)]
(parse-field field))))
(defn- startparse
[s format]
(let [in (FileInputStream. s)
reader (cond (= :marc21 format) (MarcStreamReader. in)
(= :marcxml format) (MarcXmlReader. in)
true (MarcStreamReader. in))
records (marc4j-seq reader)]
(for [record records] (contenthandler record))))
(defn parse
"Parses and loads the source s which is a File. The second argument should be
the file format (:marc21 or :marcxml). Returns a Lazy Sequence
of records which are vectors of clj-marc/marc-record-field with keys :field,
:ind1, :ind2 and :subfields."
[s & args]
(let [format (first args)]
(startparse s format))) | null | https://raw.githubusercontent.com/phochste/clj-marc/70ba82dc378351ab7f520aad53515aa4342f60ec/src/clj_marc/marc4j.clj | clojure | ( c ) 2010 < >
(ns ^{:doc "A wrapper around Marc4j for parsing MARC21/MARCXML documents" :author "Patrick Hochstenbach"}
clj-marc.marc4j
(:import (java.io FileInputStream))
(:import (org.marc4j MarcStreamReader MarcXmlReader MarcReader))
(:import (org.marc4j.marc Record Leader DataField ControlField))
(:use [clojure.contrib.duck-streams :only (reader)])
(:use [clj-marc.defs]))
(defn- marc4j-seq
[^MarcReader reader]
(when (.hasNext reader)
(cons (.next reader) (lazy-seq (marc4j-seq reader)))))
(defmulti #^{:private true} parse-field class)
(defmethod #^{:private true} parse-field Leader [x]
(let [subfields (list (list :_ (.marshal x)))]
(struct marc-record-field "LDR" " " " " subfields)))
(defmethod #^{:private true} parse-field ControlField [x]
(let [tag (.getTag x)
subfields (list (list :_ (.getData x)))]
(struct marc-record-field tag " " " " subfields)))
(defmethod #^{:private true} parse-field DataField [x]
(let [tag (.getTag x)
ind1 (str (.getIndicator1 x))
ind2 (str (.getIndicator2 x))
subfields (map #(vector (keyword (str (.getCode %))) (.getData %)) (.getSubfields x))]
(struct marc-record-field tag ind1 ind2 subfields)))
(defn- contenthandler
[^Record record]
(let [leader (.getLeader record)
controlfields (.getControlFields record)
datafields (.getDataFields record)]
(for [field (concat [leader] controlfields datafields)]
(parse-field field))))
(defn- startparse
[s format]
(let [in (FileInputStream. s)
reader (cond (= :marc21 format) (MarcStreamReader. in)
(= :marcxml format) (MarcXmlReader. in)
true (MarcStreamReader. in))
records (marc4j-seq reader)]
(for [record records] (contenthandler record))))
(defn parse
"Parses and loads the source s which is a File. The second argument should be
the file format (:marc21 or :marcxml). Returns a Lazy Sequence
of records which are vectors of clj-marc/marc-record-field with keys :field,
:ind1, :ind2 and :subfields."
[s & args]
(let [format (first args)]
(startparse s format))) |
|
dfc11fca519967b73d4a483a937369889edba4500f98b766b1de6bf924d3302e | luno-lang/luno | scope.ml | open Batteries
open Frontend.Ast
exception NotInScope of string
(* Our scope which contains a mapping between symbol names and types *)
module Env = Map.Make (String)
let new_env = Env.empty
let lookup_symbol (env : 'a Env.t) sym =
try Env.find sym env with Not_found -> failwith "not in scope"
let has_symbol env sym =
match Env.find_opt sym env with
| Some _ -> true
| None -> false
let add_symbol env sym val' = Env.add sym val' env
| null | https://raw.githubusercontent.com/luno-lang/luno/e1b7db6ab70e5a74bf95110943c39511dd727585/lib/semant/scope.ml | ocaml | Our scope which contains a mapping between symbol names and types | open Batteries
open Frontend.Ast
exception NotInScope of string
module Env = Map.Make (String)
let new_env = Env.empty
let lookup_symbol (env : 'a Env.t) sym =
try Env.find sym env with Not_found -> failwith "not in scope"
let has_symbol env sym =
match Env.find_opt sym env with
| Some _ -> true
| None -> false
let add_symbol env sym val' = Env.add sym val' env
|
24ba993c7dcda9d1b9a8cf0c4d67c322c582ea9b7a9f0dfc74d6e76c2877b6f3 | graninas/Hydra | IOException2Spec.hs | # LANGUAGE FunctionalDependencies #
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE TemplateHaskell #-}
module Hydra.Tests.Integration.IOException2Spec where
import qualified Control.Exception as E
import qualified Prelude as P (writeFile, readFile)
import Hydra.Prelude
import qualified Hydra.Domain as D
import qualified Hydra.Language as L
import qualified "hydra-free" Hydra.Runtime as R
import qualified Hydra.Interpreters as R
import Hydra.Testing.Integrational
import Hydra.Testing.Wrappers
import Test.Hspec
import qualified GHC.IO.Exception as IOE
import Hydra.TestData
-- Samples for the book
type NativeResult a = Either IOE.IOException a
-- Language-level error types:
data FSError
= FileNotFound (Maybe FilePath)
| OtherError String
type FSResult a = Either FSError a
-- Language itself:
data FileSystemF next where
WriteFile :: FilePath -> String -> (NativeResult () -> next) -> FileSystemF next
ReadFile :: FilePath -> (NativeResult String -> next) -> FileSystemF next
instance Functor FileSystemF where
fmap f (WriteFile p c next) = WriteFile p c (f . next)
fmap f (ReadFile p next) = ReadFile p (f . next)
type FileSystem = Free FileSystemF
Smart constructor which returns a native error type :
writeFileIO :: FilePath -> String -> FileSystem (NativeResult ())
writeFileIO filePath content = liftF $ WriteFile filePath content id
Smart constructor which returns a custom error type :
writeFile' :: FilePath -> String -> FileSystem (FSResult ())
writeFile' filePath content = do
eRes <- writeFileIO filePath content
pure $ fromNativeResult eRes
Native error - > custom error :
fromNativeResult :: NativeResult a -> FSResult a
fromNativeResult (Right a) = Right a
fromNativeResult (Left ioException) = let
fileName = IOE.ioe_filename ioException
errType = IOE.ioe_type ioException
in case errType of
IOE.NoSuchThing -> Left $ FileNotFound fileName
_ -> Left $ OtherError $ show errType
readFile' filePath = error "Not implemented"
interpretFileSystemF :: FileSystemF a -> IO a
interpretFileSystemF (WriteFile p c next) =
next <$> (try $ P.writeFile p c)
interpretFileSystemF (ReadFile p next) = error "Not implemented"
runFileSystem :: FileSystem a -> IO a
runFileSystem = foldFree interpretFileSystemF
spec :: Spec
spec = pure ()
| null | https://raw.githubusercontent.com/graninas/Hydra/60d591b1300528f5ffd93efa205012eebdd0286c/lib/hydra-free/test/Hydra/Tests/Integration/IOException2Spec.hs | haskell | # LANGUAGE PackageImports #
# LANGUAGE TemplateHaskell #
Samples for the book
Language-level error types:
Language itself: | # LANGUAGE FunctionalDependencies #
module Hydra.Tests.Integration.IOException2Spec where
import qualified Control.Exception as E
import qualified Prelude as P (writeFile, readFile)
import Hydra.Prelude
import qualified Hydra.Domain as D
import qualified Hydra.Language as L
import qualified "hydra-free" Hydra.Runtime as R
import qualified Hydra.Interpreters as R
import Hydra.Testing.Integrational
import Hydra.Testing.Wrappers
import Test.Hspec
import qualified GHC.IO.Exception as IOE
import Hydra.TestData
type NativeResult a = Either IOE.IOException a
data FSError
= FileNotFound (Maybe FilePath)
| OtherError String
type FSResult a = Either FSError a
data FileSystemF next where
WriteFile :: FilePath -> String -> (NativeResult () -> next) -> FileSystemF next
ReadFile :: FilePath -> (NativeResult String -> next) -> FileSystemF next
instance Functor FileSystemF where
fmap f (WriteFile p c next) = WriteFile p c (f . next)
fmap f (ReadFile p next) = ReadFile p (f . next)
type FileSystem = Free FileSystemF
Smart constructor which returns a native error type :
writeFileIO :: FilePath -> String -> FileSystem (NativeResult ())
writeFileIO filePath content = liftF $ WriteFile filePath content id
Smart constructor which returns a custom error type :
writeFile' :: FilePath -> String -> FileSystem (FSResult ())
writeFile' filePath content = do
eRes <- writeFileIO filePath content
pure $ fromNativeResult eRes
Native error - > custom error :
fromNativeResult :: NativeResult a -> FSResult a
fromNativeResult (Right a) = Right a
fromNativeResult (Left ioException) = let
fileName = IOE.ioe_filename ioException
errType = IOE.ioe_type ioException
in case errType of
IOE.NoSuchThing -> Left $ FileNotFound fileName
_ -> Left $ OtherError $ show errType
readFile' filePath = error "Not implemented"
interpretFileSystemF :: FileSystemF a -> IO a
interpretFileSystemF (WriteFile p c next) =
next <$> (try $ P.writeFile p c)
interpretFileSystemF (ReadFile p next) = error "Not implemented"
runFileSystem :: FileSystem a -> IO a
runFileSystem = foldFree interpretFileSystemF
spec :: Spec
spec = pure ()
|
7ab543621c553739e3c58a938df2344c5bc47951ecda5f5b843798b42d203060 | forward/incanter-BLAS | bayes.clj | bayes.clj -- Bayesian estimation library for Clojure
by
March 11 , 2009
Copyright ( c ) , 2009 . All rights reserved . The use
and distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.htincanter.at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
CHANGE LOG
March 11 , 2009 : First version
(ns ^{:doc "This is library provides functions for performing
basic Bayesian modeling and inference.
"
:author "David Edgar Liebke"}
incanter.bayes
(:use [incanter.core :only (matrix mmult mult div minus trans ncol nrow
plus to-list decomp-cholesky solve half-vectorize
vectorize symmetric-matrix identity-matrix kronecker
bind-columns)]
[incanter.stats :only (sample-normal sample-gamma sample-dirichlet
sample-inv-wishart sample-mvn mean)]))
(defn sample-model-params
" Returns a sample of the given size of the the parameters (coefficients and
error variance) of the given linear-model. The sample is generated using
Gibbs sampling.
See also:
incanter.stats/linear-model
Examples:
(use '(incanter core datasets stats charts bayes))
(def ols-data (to-matrix (get-dataset :survey)))
(def x (sel ols-data (range 0 2313) (range 1 10)))
(def y (sel ols-data (range 0 2313) 10))
(def lm (linear-model y x :intercept false))
(def param-samp (sample-model-params 5000 lm))
;; view trace plots
(view (trace-plot (:var param-samp )))
(view (trace-plot (sel (:coefs param-samp) :cols 0)))
;; view histograms
(view (histogram (:var param-samp)))
(view (histogram (sel (:coefs param-samp) :cols 0)))
;; calculate statistics
(map mean (trans (:coefs param-samp)))
(map median (trans (:coefs param-samp)))
(map sd (trans (:coefs param-samp)))
show the 95 % bayesian confidence interval for the firt coefficient
(quantile (sel (:coefs param-samp) :cols 0) :probs [0.025 0.975])
"
([^Integer size {:keys [x y coefs residuals]}]
(let [xtxi (solve (mmult (trans x) x))
shape (/ (- (nrow x) (ncol x)) 2)
rate (mult 1/2 (mmult (trans residuals) residuals))
s-sq (div 1 (sample-gamma size :shape shape :rate rate))]
{:coefs
(matrix
( pmap ; ; run a parallel map over the values of s - sq
(map
(fn [s2]
(to-list (plus (trans coefs)
(mmult (trans (sample-normal (ncol x)))
(decomp-cholesky (mult s2 xtxi))))))
(to-list (trans s-sq))))
:var s-sq})))
(defn sample-proportions
" sample-proportions has been renamed sample-multinomial-params"
([size counts]
(throw (Exception. "sample-proportions has been renamed sample-multinomial-params"))))
(defn sample-multinomial-params
" Returns a sample of multinomial proportion parameters.
The counts are assumed to have a multinomial distribution.
A uniform prior distribution is assigned to the multinomial vector
theta, then the posterior distribution of theta is
proportional to a dirichlet distribution with parameters
(plus counts 1).
Examples:
(use '(incanter core stats bayes charts))
(def samp-props (sample-multinomial-params 1000 [727 583 137]))
view means , 95 % CI , and histograms of the proportion parameters
(mean (sel samp-props :cols 0))
(quantile (sel samp-props :cols 0) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 0)))
(mean (sel samp-props :cols 1))
(quantile (sel samp-props :cols 1) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 1)))
(mean (sel samp-props :cols 2))
(quantile (sel samp-props :cols 2) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 2)))
view a histogram of the difference in proportions between the first
two candidates
(view (histogram (minus (sel samp-props :cols 0) (sel samp-props :cols 1))))
"
([^Integer size counts]
(sample-dirichlet size (plus counts 1))))
(defn sample-mvn-params
" Returns samples of means (sampled from an mvn distribution) and vectorized covariance
matrices (sampled from an inverse-wishart distribution) for the given mvn data.
Arguments:
size -- the number of samples to return
y -- the data used to estimate the parameters
Returns map with following fields:
:means
:sigmas
Examples:
(use '(incanter core stats bayes charts))
(def y (sample-mvn 500 :mean [0 0] :sigma (identity-matrix 2)))
(def samp (sample-mvn-params 1000 y))
(map mean (trans (:means samp)))
(symmetric-matrix (map mean (trans (:sigmas samp))) :lower false)
(view (histogram (sel (:means samp) :cols 0) :x-label \"mean 1\"))
(view (histogram (sel (:means samp) :cols 1) :x-label \"mean 2\"))
(view (histogram (sel (:sigmas samp) :cols 1) :x-label \"covariance\"))
(view (histogram (sel (:sigmas samp) :cols 0) :x-label \"variance 1\"))
(view (histogram (sel (:sigmas samp) :cols 2) :x-label \"variance 2\"))
(map #(quantile % :probs [0.025 0.0975]) (trans (:means samp)))
(map #(quantile % :probs [0.025 0.0975]) (trans (:sigmas samp)))
(use '(incanter core stats bayes charts))
(def y (sample-mvn 500 :sigma (symmetric-matrix [10 5 10]) :mean [5 2]))
(def samp (sample-mvn-params 1000 y))
(symmetric-matrix (map mean (trans (:sigmas samp))) :lower false)
(map mean (trans (:means samp)))
"
([^Integer size y & options]
(let [opts (when options (apply assoc {} options))
means (map mean (trans y))
n (count y)
S (reduce plus
(map #(mmult (minus (to-list %) means)
(trans (minus (to-list %) means)))
y))
sigma-samp (matrix (for [_ (range size)]
(half-vectorize (sample-inv-wishart :df (dec n) :scale (solve S)))))
mu-samp (matrix (for [sigma sigma-samp]
(sample-mvn 1
:mean means
:sigma (div (symmetric-matrix sigma :lower false) n))))
]
{:means mu-samp :sigmas sigma-samp})))
(defn- sample-mv-model-params
"
Examples:
(use '(incanter core stats bayes datasets))
(def survey (to-matrix (get-dataset :survey)))
(def x (sel survey :cols (range 2 10)))
(def y (sel survey :cols (range 10 14)))
(time (def params (sample-mv-model-params 100 y x)))
(trans (matrix (map mean (trans (:coefs params))) (inc (ncol x))))
(matrix (map mean (trans (:sigmas params))) (ncol y))
"
([^Integer size y x & options]
(let [opts (when options (apply assoc {} options))
_x (bind-columns (repeat (nrow x) 1) x)
;_x x
d (ncol y)
k (ncol _x)
df (dec (nrow y))
( mapcat identity y )
y-vec (vectorize y)
I-d (identity-matrix d)
xt (trans _x)
xtx (mmult xt _x)
kron-I-x (kronecker I-d _x)
]
(loop [i 0 coefs nil sigmas (list (vectorize (identity-matrix d)))]
(if (= i size)
{:coefs (matrix coefs) :sigmas (matrix sigmas)}
(let [s (trans (matrix (first sigmas) d))
vb (solve (kronecker (solve s) xtx))
mn ( mmult vb ( identity ( mmult xt y ( trans ( solve s ) ) ) ) )
mn (mmult vb (vectorize (mmult xt y (trans (solve s)))))
b (plus mn (trans (mmult (trans (sample-normal (* d k))) (decomp-cholesky vb)))) ;; added trans to sample-normal output
;_ (println b)
draw s from inverse wishart
e ( matrix ( minus y - vec ( b ) ) d )
e (trans (matrix (minus y-vec (mmult kron-I-x b)) (nrow y)))
;_ (println (incanter.core/dim e))
v (mmult (trans e) e)
s-new (sample-inv-wishart :df df :scale v)]
(recur (inc i) (conj coefs b) (conj sigmas (vectorize s-new)))))))))
| null | https://raw.githubusercontent.com/forward/incanter-BLAS/da48558cc9d8296b775d8e88de532a4897ee966e/src/main/clojure/incanter/bayes.clj | clojure | Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.htincanter.at the root of this
distribution. By using this software in any fashion, you are
agreeing to be bound by the terms of this license. You must not
remove this notice, or any other, from this software.
view trace plots
view histograms
calculate statistics
; run a parallel map over the values of s - sq
_x x
added trans to sample-normal output
_ (println b)
_ (println (incanter.core/dim e)) | bayes.clj -- Bayesian estimation library for Clojure
by
March 11 , 2009
Copyright ( c ) , 2009 . All rights reserved . The use
and distribution terms for this software are covered by the Eclipse
CHANGE LOG
March 11 , 2009 : First version
(ns ^{:doc "This is library provides functions for performing
basic Bayesian modeling and inference.
"
:author "David Edgar Liebke"}
incanter.bayes
(:use [incanter.core :only (matrix mmult mult div minus trans ncol nrow
plus to-list decomp-cholesky solve half-vectorize
vectorize symmetric-matrix identity-matrix kronecker
bind-columns)]
[incanter.stats :only (sample-normal sample-gamma sample-dirichlet
sample-inv-wishart sample-mvn mean)]))
(defn sample-model-params
" Returns a sample of the given size of the the parameters (coefficients and
error variance) of the given linear-model. The sample is generated using
Gibbs sampling.
See also:
incanter.stats/linear-model
Examples:
(use '(incanter core datasets stats charts bayes))
(def ols-data (to-matrix (get-dataset :survey)))
(def x (sel ols-data (range 0 2313) (range 1 10)))
(def y (sel ols-data (range 0 2313) 10))
(def lm (linear-model y x :intercept false))
(def param-samp (sample-model-params 5000 lm))
(view (trace-plot (:var param-samp )))
(view (trace-plot (sel (:coefs param-samp) :cols 0)))
(view (histogram (:var param-samp)))
(view (histogram (sel (:coefs param-samp) :cols 0)))
(map mean (trans (:coefs param-samp)))
(map median (trans (:coefs param-samp)))
(map sd (trans (:coefs param-samp)))
show the 95 % bayesian confidence interval for the firt coefficient
(quantile (sel (:coefs param-samp) :cols 0) :probs [0.025 0.975])
"
([^Integer size {:keys [x y coefs residuals]}]
(let [xtxi (solve (mmult (trans x) x))
shape (/ (- (nrow x) (ncol x)) 2)
rate (mult 1/2 (mmult (trans residuals) residuals))
s-sq (div 1 (sample-gamma size :shape shape :rate rate))]
{:coefs
(matrix
(map
(fn [s2]
(to-list (plus (trans coefs)
(mmult (trans (sample-normal (ncol x)))
(decomp-cholesky (mult s2 xtxi))))))
(to-list (trans s-sq))))
:var s-sq})))
(defn sample-proportions
" sample-proportions has been renamed sample-multinomial-params"
([size counts]
(throw (Exception. "sample-proportions has been renamed sample-multinomial-params"))))
(defn sample-multinomial-params
" Returns a sample of multinomial proportion parameters.
The counts are assumed to have a multinomial distribution.
A uniform prior distribution is assigned to the multinomial vector
theta, then the posterior distribution of theta is
proportional to a dirichlet distribution with parameters
(plus counts 1).
Examples:
(use '(incanter core stats bayes charts))
(def samp-props (sample-multinomial-params 1000 [727 583 137]))
view means , 95 % CI , and histograms of the proportion parameters
(mean (sel samp-props :cols 0))
(quantile (sel samp-props :cols 0) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 0)))
(mean (sel samp-props :cols 1))
(quantile (sel samp-props :cols 1) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 1)))
(mean (sel samp-props :cols 2))
(quantile (sel samp-props :cols 2) :probs [0.0275 0.975])
(view (histogram (sel samp-props :cols 2)))
view a histogram of the difference in proportions between the first
two candidates
(view (histogram (minus (sel samp-props :cols 0) (sel samp-props :cols 1))))
"
([^Integer size counts]
(sample-dirichlet size (plus counts 1))))
(defn sample-mvn-params
" Returns samples of means (sampled from an mvn distribution) and vectorized covariance
matrices (sampled from an inverse-wishart distribution) for the given mvn data.
Arguments:
size -- the number of samples to return
y -- the data used to estimate the parameters
Returns map with following fields:
:means
:sigmas
Examples:
(use '(incanter core stats bayes charts))
(def y (sample-mvn 500 :mean [0 0] :sigma (identity-matrix 2)))
(def samp (sample-mvn-params 1000 y))
(map mean (trans (:means samp)))
(symmetric-matrix (map mean (trans (:sigmas samp))) :lower false)
(view (histogram (sel (:means samp) :cols 0) :x-label \"mean 1\"))
(view (histogram (sel (:means samp) :cols 1) :x-label \"mean 2\"))
(view (histogram (sel (:sigmas samp) :cols 1) :x-label \"covariance\"))
(view (histogram (sel (:sigmas samp) :cols 0) :x-label \"variance 1\"))
(view (histogram (sel (:sigmas samp) :cols 2) :x-label \"variance 2\"))
(map #(quantile % :probs [0.025 0.0975]) (trans (:means samp)))
(map #(quantile % :probs [0.025 0.0975]) (trans (:sigmas samp)))
(use '(incanter core stats bayes charts))
(def y (sample-mvn 500 :sigma (symmetric-matrix [10 5 10]) :mean [5 2]))
(def samp (sample-mvn-params 1000 y))
(symmetric-matrix (map mean (trans (:sigmas samp))) :lower false)
(map mean (trans (:means samp)))
"
([^Integer size y & options]
(let [opts (when options (apply assoc {} options))
means (map mean (trans y))
n (count y)
S (reduce plus
(map #(mmult (minus (to-list %) means)
(trans (minus (to-list %) means)))
y))
sigma-samp (matrix (for [_ (range size)]
(half-vectorize (sample-inv-wishart :df (dec n) :scale (solve S)))))
mu-samp (matrix (for [sigma sigma-samp]
(sample-mvn 1
:mean means
:sigma (div (symmetric-matrix sigma :lower false) n))))
]
{:means mu-samp :sigmas sigma-samp})))
(defn- sample-mv-model-params
"
Examples:
(use '(incanter core stats bayes datasets))
(def survey (to-matrix (get-dataset :survey)))
(def x (sel survey :cols (range 2 10)))
(def y (sel survey :cols (range 10 14)))
(time (def params (sample-mv-model-params 100 y x)))
(trans (matrix (map mean (trans (:coefs params))) (inc (ncol x))))
(matrix (map mean (trans (:sigmas params))) (ncol y))
"
([^Integer size y x & options]
(let [opts (when options (apply assoc {} options))
_x (bind-columns (repeat (nrow x) 1) x)
d (ncol y)
k (ncol _x)
df (dec (nrow y))
( mapcat identity y )
y-vec (vectorize y)
I-d (identity-matrix d)
xt (trans _x)
xtx (mmult xt _x)
kron-I-x (kronecker I-d _x)
]
(loop [i 0 coefs nil sigmas (list (vectorize (identity-matrix d)))]
(if (= i size)
{:coefs (matrix coefs) :sigmas (matrix sigmas)}
(let [s (trans (matrix (first sigmas) d))
vb (solve (kronecker (solve s) xtx))
mn ( mmult vb ( identity ( mmult xt y ( trans ( solve s ) ) ) ) )
mn (mmult vb (vectorize (mmult xt y (trans (solve s)))))
draw s from inverse wishart
e ( matrix ( minus y - vec ( b ) ) d )
e (trans (matrix (minus y-vec (mmult kron-I-x b)) (nrow y)))
v (mmult (trans e) e)
s-new (sample-inv-wishart :df df :scale v)]
(recur (inc i) (conj coefs b) (conj sigmas (vectorize s-new)))))))))
|
76d4824d5b508bac2ba71727578a481efdff6835ecbabc311be098a088272dfe | achirkin/vulkan | VK_EXT_memory_priority.hs | # OPTIONS_HADDOCK not - home #
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MagicHash #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE Strict #-}
{-# LANGUAGE ViewPatterns #-}
module Graphics.Vulkan.Ext.VK_EXT_memory_priority
* Vulkan extension : @VK_EXT_memory_priority@
-- |
--
-- supported: @vulkan@
--
contact : @Jeff Bolz @jeffbolznv@
--
-- author: @EXT@
--
-- type: @device@
--
Extension number : @239@
--
-- Required extensions: 'VK_KHR_get_physical_device_properties2'.
--
-- ** Required extensions: 'VK_KHR_get_physical_device_properties2'.
module Graphics.Vulkan.Marshal, AHardwareBuffer(),
ANativeWindow(), CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..),
VkAndroidSurfaceCreateFlagsKHR(..), VkBufferViewCreateFlags(..),
VkBuildAccelerationStructureFlagsNV(..),
VkCommandPoolTrimFlags(..), VkCommandPoolTrimFlagsKHR(..),
VkDebugUtilsMessengerCallbackDataFlagsEXT(..),
VkDebugUtilsMessengerCreateFlagsEXT(..),
VkDescriptorBindingFlagsEXT(..), VkDescriptorPoolResetFlags(..),
VkDescriptorUpdateTemplateCreateFlags(..),
VkDescriptorUpdateTemplateCreateFlagsKHR(..),
VkDeviceCreateFlags(..), VkDirectFBSurfaceCreateFlagsEXT(..),
VkDisplayModeCreateFlagsKHR(..),
VkDisplaySurfaceCreateFlagsKHR(..), VkEventCreateFlags(..),
VkExternalFenceFeatureFlagsKHR(..),
VkExternalFenceHandleTypeFlagsKHR(..),
VkExternalMemoryFeatureFlagsKHR(..),
VkExternalMemoryHandleTypeFlagsKHR(..),
VkExternalSemaphoreFeatureFlagsKHR(..),
VkExternalSemaphoreHandleTypeFlagsKHR(..),
VkFenceImportFlagsKHR(..), VkGeometryFlagsNV(..),
VkGeometryInstanceFlagsNV(..), VkHeadlessSurfaceCreateFlagsEXT(..),
VkIOSSurfaceCreateFlagsMVK(..),
VkImagePipeSurfaceCreateFlagsFUCHSIA(..),
VkInstanceCreateFlags(..), VkMacOSSurfaceCreateFlagsMVK(..),
VkMemoryAllocateFlagsKHR(..), VkMemoryMapFlags(..),
VkMetalSurfaceCreateFlagsEXT(..), VkPeerMemoryFeatureFlagsKHR(..),
VkPipelineColorBlendStateCreateFlags(..),
VkPipelineCoverageModulationStateCreateFlagsNV(..),
VkPipelineCoverageReductionStateCreateFlagsNV(..),
VkPipelineCoverageToColorStateCreateFlagsNV(..),
VkPipelineDepthStencilStateCreateFlags(..),
VkPipelineDiscardRectangleStateCreateFlagsEXT(..),
VkPipelineDynamicStateCreateFlags(..),
VkPipelineInputAssemblyStateCreateFlags(..),
VkPipelineLayoutCreateFlags(..),
VkPipelineMultisampleStateCreateFlags(..),
VkPipelineRasterizationConservativeStateCreateFlagsEXT(..),
VkPipelineRasterizationDepthClipStateCreateFlagsEXT(..),
VkPipelineRasterizationStateCreateFlags(..),
VkPipelineRasterizationStateStreamCreateFlagsEXT(..),
VkPipelineTessellationStateCreateFlags(..),
VkPipelineVertexInputStateCreateFlags(..),
VkPipelineViewportStateCreateFlags(..),
VkPipelineViewportSwizzleStateCreateFlagsNV(..),
VkQueryPoolCreateFlags(..), VkResolveModeFlagsKHR(..),
VkSemaphoreCreateFlags(..), VkSemaphoreImportFlagsKHR(..),
VkSemaphoreWaitFlagsKHR(..),
VkStreamDescriptorSurfaceCreateFlagsGGP(..),
VkValidationCacheCreateFlagsEXT(..), VkViSurfaceCreateFlagsNN(..),
VkWaylandSurfaceCreateFlagsKHR(..),
VkWin32SurfaceCreateFlagsKHR(..), VkXcbSurfaceCreateFlagsKHR(..),
VkXlibSurfaceCreateFlagsKHR(..), VkDeviceCreateInfo,
VkDeviceDiagnosticsConfigBitmaskNV(..), VkDeviceEventTypeEXT(..),
VkDeviceGroupPresentModeBitmaskKHR(..), VkDeviceCreateFlagBits(..),
VkDeviceDiagnosticsConfigFlagBitsNV(),
VkDeviceDiagnosticsConfigFlagsNV(),
VkDeviceGroupPresentModeFlagBitsKHR(),
VkDeviceGroupPresentModeFlagsKHR(), VkDeviceQueueCreateBitmask(..),
VkDeviceQueueCreateFlagBits(), VkDeviceQueueCreateFlags(),
VkDeviceQueueCreateInfo, VkMemoryAllocateInfo,
VkMemoryPriorityAllocateInfoEXT, VkPhysicalDeviceFeatures,
VkPhysicalDeviceFeatures2,
VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkStructureType(..),
-- > #include "vk_platform.h"
VK_EXT_MEMORY_PRIORITY_SPEC_VERSION,
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION,
VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Bitmasks
import Graphics.Vulkan.Types.Enum.Device
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Struct.Device (VkDeviceCreateInfo, VkDeviceQueueCreateInfo)
import Graphics.Vulkan.Types.Struct.Memory (VkMemoryAllocateInfo,
VkMemoryPriorityAllocateInfoEXT)
import Graphics.Vulkan.Types.Struct.PhysicalDevice (VkPhysicalDeviceFeatures2,
VkPhysicalDeviceMemoryPriorityFeaturesEXT)
import Graphics.Vulkan.Types.Struct.PhysicalDeviceFeatures (VkPhysicalDeviceFeatures)
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION = 1
type VK_EXT_MEMORY_PRIORITY_SPEC_VERSION = 1
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME <-
(is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME -> True)
where
VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= _VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
# INLINE _ VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME #
_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString
_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= Ptr "VK_EXT_memory_priority\NUL"#
# INLINE is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME #
is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString -> Bool
is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
type VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME =
"VK_EXT_memory_priority"
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
:: VkStructureType
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
= VkStructureType 1000238000
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT ::
VkStructureType
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT =
VkStructureType 1000238001
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Ext/VK_EXT_memory_priority.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE Strict #
# LANGUAGE ViewPatterns #
|
supported: @vulkan@
author: @EXT@
type: @device@
Required extensions: 'VK_KHR_get_physical_device_properties2'.
** Required extensions: 'VK_KHR_get_physical_device_properties2'.
> #include "vk_platform.h" | # OPTIONS_HADDOCK not - home #
# LANGUAGE PatternSynonyms #
module Graphics.Vulkan.Ext.VK_EXT_memory_priority
* Vulkan extension : @VK_EXT_memory_priority@
contact : @Jeff Bolz @jeffbolznv@
Extension number : @239@
module Graphics.Vulkan.Marshal, AHardwareBuffer(),
ANativeWindow(), CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..),
VkAndroidSurfaceCreateFlagsKHR(..), VkBufferViewCreateFlags(..),
VkBuildAccelerationStructureFlagsNV(..),
VkCommandPoolTrimFlags(..), VkCommandPoolTrimFlagsKHR(..),
VkDebugUtilsMessengerCallbackDataFlagsEXT(..),
VkDebugUtilsMessengerCreateFlagsEXT(..),
VkDescriptorBindingFlagsEXT(..), VkDescriptorPoolResetFlags(..),
VkDescriptorUpdateTemplateCreateFlags(..),
VkDescriptorUpdateTemplateCreateFlagsKHR(..),
VkDeviceCreateFlags(..), VkDirectFBSurfaceCreateFlagsEXT(..),
VkDisplayModeCreateFlagsKHR(..),
VkDisplaySurfaceCreateFlagsKHR(..), VkEventCreateFlags(..),
VkExternalFenceFeatureFlagsKHR(..),
VkExternalFenceHandleTypeFlagsKHR(..),
VkExternalMemoryFeatureFlagsKHR(..),
VkExternalMemoryHandleTypeFlagsKHR(..),
VkExternalSemaphoreFeatureFlagsKHR(..),
VkExternalSemaphoreHandleTypeFlagsKHR(..),
VkFenceImportFlagsKHR(..), VkGeometryFlagsNV(..),
VkGeometryInstanceFlagsNV(..), VkHeadlessSurfaceCreateFlagsEXT(..),
VkIOSSurfaceCreateFlagsMVK(..),
VkImagePipeSurfaceCreateFlagsFUCHSIA(..),
VkInstanceCreateFlags(..), VkMacOSSurfaceCreateFlagsMVK(..),
VkMemoryAllocateFlagsKHR(..), VkMemoryMapFlags(..),
VkMetalSurfaceCreateFlagsEXT(..), VkPeerMemoryFeatureFlagsKHR(..),
VkPipelineColorBlendStateCreateFlags(..),
VkPipelineCoverageModulationStateCreateFlagsNV(..),
VkPipelineCoverageReductionStateCreateFlagsNV(..),
VkPipelineCoverageToColorStateCreateFlagsNV(..),
VkPipelineDepthStencilStateCreateFlags(..),
VkPipelineDiscardRectangleStateCreateFlagsEXT(..),
VkPipelineDynamicStateCreateFlags(..),
VkPipelineInputAssemblyStateCreateFlags(..),
VkPipelineLayoutCreateFlags(..),
VkPipelineMultisampleStateCreateFlags(..),
VkPipelineRasterizationConservativeStateCreateFlagsEXT(..),
VkPipelineRasterizationDepthClipStateCreateFlagsEXT(..),
VkPipelineRasterizationStateCreateFlags(..),
VkPipelineRasterizationStateStreamCreateFlagsEXT(..),
VkPipelineTessellationStateCreateFlags(..),
VkPipelineVertexInputStateCreateFlags(..),
VkPipelineViewportStateCreateFlags(..),
VkPipelineViewportSwizzleStateCreateFlagsNV(..),
VkQueryPoolCreateFlags(..), VkResolveModeFlagsKHR(..),
VkSemaphoreCreateFlags(..), VkSemaphoreImportFlagsKHR(..),
VkSemaphoreWaitFlagsKHR(..),
VkStreamDescriptorSurfaceCreateFlagsGGP(..),
VkValidationCacheCreateFlagsEXT(..), VkViSurfaceCreateFlagsNN(..),
VkWaylandSurfaceCreateFlagsKHR(..),
VkWin32SurfaceCreateFlagsKHR(..), VkXcbSurfaceCreateFlagsKHR(..),
VkXlibSurfaceCreateFlagsKHR(..), VkDeviceCreateInfo,
VkDeviceDiagnosticsConfigBitmaskNV(..), VkDeviceEventTypeEXT(..),
VkDeviceGroupPresentModeBitmaskKHR(..), VkDeviceCreateFlagBits(..),
VkDeviceDiagnosticsConfigFlagBitsNV(),
VkDeviceDiagnosticsConfigFlagsNV(),
VkDeviceGroupPresentModeFlagBitsKHR(),
VkDeviceGroupPresentModeFlagsKHR(), VkDeviceQueueCreateBitmask(..),
VkDeviceQueueCreateFlagBits(), VkDeviceQueueCreateFlags(),
VkDeviceQueueCreateInfo, VkMemoryAllocateInfo,
VkMemoryPriorityAllocateInfoEXT, VkPhysicalDeviceFeatures,
VkPhysicalDeviceFeatures2,
VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkStructureType(..),
VK_EXT_MEMORY_PRIORITY_SPEC_VERSION,
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION,
VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Bitmasks
import Graphics.Vulkan.Types.Enum.Device
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Struct.Device (VkDeviceCreateInfo, VkDeviceQueueCreateInfo)
import Graphics.Vulkan.Types.Struct.Memory (VkMemoryAllocateInfo,
VkMemoryPriorityAllocateInfoEXT)
import Graphics.Vulkan.Types.Struct.PhysicalDevice (VkPhysicalDeviceFeatures2,
VkPhysicalDeviceMemoryPriorityFeaturesEXT)
import Graphics.Vulkan.Types.Struct.PhysicalDeviceFeatures (VkPhysicalDeviceFeatures)
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_EXT_MEMORY_PRIORITY_SPEC_VERSION = 1
type VK_EXT_MEMORY_PRIORITY_SPEC_VERSION = 1
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString
pattern VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME <-
(is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME -> True)
where
VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= _VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
# INLINE _ VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME #
_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString
_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= Ptr "VK_EXT_memory_priority\NUL"#
# INLINE is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME #
is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME :: CString -> Bool
is_VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME
type VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME =
"VK_EXT_memory_priority"
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
:: VkStructureType
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
= VkStructureType 1000238000
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT ::
VkStructureType
pattern VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT =
VkStructureType 1000238001
|
d9e5c6b87b26da2d55c0e9e698e573c6b0945b9e22c752e4c2c3fa222d478564 | satori-com/mzbench | loop_rate.erl | [{pool, [{size, 1},
{worker_type, dummy_worker}],
[{loop, [{time, {200, ms}}, {rate, {1.1, rps}}],
[{print, "loop#1"}]},
{loop, [{time, {0.2, sec}}, {rate, {1, rpm}}],
[{print, "loop#2"}]},
{loop, [{time, {0.0033, min}}, {rate, {1.1, rph}}],
[{print, "loop#3"}]},
{loop, [{time, {0.00005, h}},
{rate, {ramp, linear, {1.1, rpm},
{2.1, rps}}}],
[{print, "loop#4"}]},
{loop, [{time, {0.00005, h}},
{rate, {ramp, linear, {5, rps},
{5, rpm}}}],
[{print, "loop#5"}]}]
}].
| null | https://raw.githubusercontent.com/satori-com/mzbench/02be2684655cde94d537c322bb0611e258ae9718/acceptance_tests/scripts/loop_rate.erl | erlang | [{pool, [{size, 1},
{worker_type, dummy_worker}],
[{loop, [{time, {200, ms}}, {rate, {1.1, rps}}],
[{print, "loop#1"}]},
{loop, [{time, {0.2, sec}}, {rate, {1, rpm}}],
[{print, "loop#2"}]},
{loop, [{time, {0.0033, min}}, {rate, {1.1, rph}}],
[{print, "loop#3"}]},
{loop, [{time, {0.00005, h}},
{rate, {ramp, linear, {1.1, rpm},
{2.1, rps}}}],
[{print, "loop#4"}]},
{loop, [{time, {0.00005, h}},
{rate, {ramp, linear, {5, rps},
{5, rpm}}}],
[{print, "loop#5"}]}]
}].
|
|
d8e815351689416c052bcbddcf55bee0c8d0c808b307b7dac23a57bd4dfeff86 | eponai/sulolive | main.cljs | (ns env.web.main
(:require [eponai.web.app :as app]
[eponai.client.devtools :as devtools]
))
(defn ^:export runsulo []
(devtools/install-app)
(app/run-simple {}))
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/env/client/simple/env/web/main.cljs | clojure | (ns env.web.main
(:require [eponai.web.app :as app]
[eponai.client.devtools :as devtools]
))
(defn ^:export runsulo []
(devtools/install-app)
(app/run-simple {}))
|
|
16b44549159b2df48b3a39dd6859045349d1e0f8c360efbf985494f0182aa219 | Viasat/halite | test_propagate.clj | Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.test-propagate
(:require [com.viasat.halite.choco-clj-opt :as choco-clj]
[com.viasat.halite.propagate :as propagate]
[com.viasat.halite.transpile.lowering :as lowering]
[com.viasat.halite.transpile.rewriting :as rewriting :refer [with-summarized-trace-for]]
[com.viasat.halite.transpile.simplify :as simplify]
[com.viasat.halite.transpile.ssa :as ssa]
[com.viasat.halite.transpile.util :refer [fixpoint]]
[schema.core :as s]
[schema.test])
(:use clojure.test))
(def strings-and-abstract-specs-example
'{:ws/Painted
{:abstract? true
:fields {:color :String}
:constraints [["validColors" (or (= color "red") (= color "green") (= color "blue"))]]}
:ws/Car
{:fields {:horsePower :Integer}
:constraints [["validHorsePowers" (and (<= 120 horsePower) (<= horsePower 300))]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (if (> horsePower 250) "red" "blue")}}}}})
(def simple-answer {:$type :ws/Car,
:horsePower {:$in [120 300]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}})
(deftest test-strings-and-abstract-specs-example
(are [in out]
(= out (propagate/propagate strings-and-abstract-specs-example in))
{:$type :ws/Car}
simple-answer
{:$type :ws/Car :$refines-to {:ws/Painted {:color {:$in #{"red" "yellow"}}}}}
{:$type :ws/Car,
:horsePower {:$in [251 300]},
:$refines-to #:ws{:Painted {:color "red"}}}
{:$type :ws/Car :horsePower 140}
{:$type :ws/Car, :horsePower 140
:$refines-to {:ws/Painted {:color "blue"}}}))
(deftest test-propagate-cond
(is (= simple-answer
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower :Integer}
:constraints [["validHorsePowers" (cond (and (<= 120 horsePower)
(<= horsePower 300)) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower 250) "red"
"blue")}}}}}
{:$type :ws/Car}))))
(deftest test-propagate-fixed-decimal
(is (= {:$type :ws/Car,
:horsePower {:$in [#d "12.0" #d "30.0"]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}}
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower [:Decimal 1]}
:constraints [["validHorsePowers" (cond (and (<= #d "12.0" horsePower)
(<= horsePower #d "30.0")) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower #d "25.0") "red"
"blue")}}}}}
{:$type :ws/Car}))))
(deftest test-propagate-fixed-decimal-rescale
(is (= {:$type :ws/Car,
:horsePower {:$in [#d "12.0" #d "30.0"]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}}
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower [:Decimal 1]}
:constraints [["validHorsePowers" (cond (and (<= (rescale (if true
#d "12.0123"
#d "13.9999") 1) horsePower)
(<= horsePower (rescale (rescale (* #d "1.0" 30) 2) 1))) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower (rescale #d "25.09" 1)) "red"
"blue")}}}}}
{:$type :ws/Car}))))
;; (run-tests)
| null | https://raw.githubusercontent.com/Viasat/halite/a5f81473dadc1b8e63ed6744d3b0154098f1b3ab/test/com/viasat/halite/test_propagate.clj | clojure | (run-tests) | Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.test-propagate
(:require [com.viasat.halite.choco-clj-opt :as choco-clj]
[com.viasat.halite.propagate :as propagate]
[com.viasat.halite.transpile.lowering :as lowering]
[com.viasat.halite.transpile.rewriting :as rewriting :refer [with-summarized-trace-for]]
[com.viasat.halite.transpile.simplify :as simplify]
[com.viasat.halite.transpile.ssa :as ssa]
[com.viasat.halite.transpile.util :refer [fixpoint]]
[schema.core :as s]
[schema.test])
(:use clojure.test))
(def strings-and-abstract-specs-example
'{:ws/Painted
{:abstract? true
:fields {:color :String}
:constraints [["validColors" (or (= color "red") (= color "green") (= color "blue"))]]}
:ws/Car
{:fields {:horsePower :Integer}
:constraints [["validHorsePowers" (and (<= 120 horsePower) (<= horsePower 300))]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (if (> horsePower 250) "red" "blue")}}}}})
(def simple-answer {:$type :ws/Car,
:horsePower {:$in [120 300]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}})
(deftest test-strings-and-abstract-specs-example
(are [in out]
(= out (propagate/propagate strings-and-abstract-specs-example in))
{:$type :ws/Car}
simple-answer
{:$type :ws/Car :$refines-to {:ws/Painted {:color {:$in #{"red" "yellow"}}}}}
{:$type :ws/Car,
:horsePower {:$in [251 300]},
:$refines-to #:ws{:Painted {:color "red"}}}
{:$type :ws/Car :horsePower 140}
{:$type :ws/Car, :horsePower 140
:$refines-to {:ws/Painted {:color "blue"}}}))
(deftest test-propagate-cond
(is (= simple-answer
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower :Integer}
:constraints [["validHorsePowers" (cond (and (<= 120 horsePower)
(<= horsePower 300)) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower 250) "red"
"blue")}}}}}
{:$type :ws/Car}))))
(deftest test-propagate-fixed-decimal
(is (= {:$type :ws/Car,
:horsePower {:$in [#d "12.0" #d "30.0"]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}}
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower [:Decimal 1]}
:constraints [["validHorsePowers" (cond (and (<= #d "12.0" horsePower)
(<= horsePower #d "30.0")) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower #d "25.0") "red"
"blue")}}}}}
{:$type :ws/Car}))))
(deftest test-propagate-fixed-decimal-rescale
(is (= {:$type :ws/Car,
:horsePower {:$in [#d "12.0" #d "30.0"]},
:$refines-to #:ws{:Painted {:color {:$in #{"blue" "green" "red"}}}}}
(propagate/propagate '{:ws/Painted {:abstract? true
:fields {:color :String}
:constraints [["validColors" (cond (= color "red") true
(= color "green") true
(= color "blue") true
false)]]}
:ws/Car {:fields {:horsePower [:Decimal 1]}
:constraints [["validHorsePowers" (cond (and (<= (rescale (if true
#d "12.0123"
#d "13.9999") 1) horsePower)
(<= horsePower (rescale (rescale (* #d "1.0" 30) 2) 1))) true
false)]]
:refines-to {:ws/Painted
{:expr {:$type :ws/Painted
:color (cond (> horsePower (rescale #d "25.09" 1)) "red"
"blue")}}}}}
{:$type :ws/Car}))))
|
e8b899272b67defdc89bc64b45c8a5825805759ac924eb560917c413af7d1aab | reanimate/reanimate | BoundingBox.hs | |
Bounding - boxes can be immensely useful for aligning objects
but they are not part of the SVG specification and can not be
computed for all SVG nodes . In particular , you 'll get bad results
when asking for the bounding boxes of Text nodes ( because fonts
are difficult ) , clipped nodes , and filtered nodes .
Bounding-boxes can be immensely useful for aligning objects
but they are not part of the SVG specification and cannot be
computed for all SVG nodes. In particular, you'll get bad results
when asking for the bounding boxes of Text nodes (because fonts
are difficult), clipped nodes, and filtered nodes.
-}
module Reanimate.Svg.BoundingBox
( boundingBox
, svgHeight
, svgWidth
) where
import Control.Arrow ((***))
import Control.Lens ((^.))
import Data.List (foldl')
import Data.Maybe (mapMaybe)
import qualified Data.Vector.Unboxed as V
import qualified Geom2D.CubicBezier.Linear as Bezier
import Graphics.SvgTree
import Linear.V2 (V2 (V2))
import Linear.Vector (Additive (zero))
import Reanimate.Constants (defaultDPI)
import Reanimate.Svg.LineCommand (LineCommand (..), toLineCommands)
import qualified Reanimate.Transform as Transform
-- | Return bounding box of SVG tree.
The four numbers returned are ( minimal X - coordinate , minimal Y - coordinate , width , height )
--
-- Note: Bounding boxes are computed on a best-effort basis and will not work
-- in all cases. The only supported SVG nodes are: path, circle, polyline,
ellipse , line , rectangle , image and svg . All other nodes return ( 0,0,0,0 ) .
-- The box for the svg node is based on the document's width and height
-- (if both are present).
boundingBox :: Tree -> (Double, Double, Double, Double)
boundingBox t =
case svgBoundingPoints t of
[] -> (0,0,0,0)
(V2 x y:rest) ->
let (minx, miny, maxx, maxy) = foldl' worker (x, y, x, y) rest
in (minx, miny, maxx-minx, maxy-miny)
where
worker (minx, miny, maxx, maxy) (V2 x y) =
(min minx x, min miny y, max maxx x, max maxy y)
-- | Height of SVG node in local units (not pixels). Computed on best-effort basis
-- and will not give accurate results for all SVG nodes.
svgHeight :: Tree -> Double
svgHeight t = h
where
(_x, _y, _w, h) = boundingBox t
-- | Width of SVG node in local units (not pixels). Computed on best-effort basis
-- and will not give accurate results for all SVG nodes.
svgWidth :: Tree -> Double
svgWidth t = w
where
(_x, _y, w, _h) = boundingBox t
-- | Sampling of points in a line path.
linePoints :: [LineCommand] -> [RPoint]
linePoints = worker zero
where
worker _from [] = []
worker from (x:xs) =
case x of
LineMove to -> worker to xs
-- LineDraw to -> from:to:worker to xs
LineBezier [p] ->
p : worker p xs
LineBezier ctrl -> -- approximation
let bezier = Bezier.AnyBezier (V.fromList (from:ctrl))
in [ Bezier.evalBezier bezier (recip chunks*i) | i <- [0..chunks]] ++
worker (last ctrl) xs
LineEnd p -> p : worker p xs
chunks = 10
svgBoundingPoints :: Tree -> [RPoint]
svgBoundingPoints t = map (Transform.transformPoint m) $
case t of
None -> []
UseTree{} -> []
GroupTree g -> concatMap svgBoundingPoints (g ^. groupChildren)
SymbolTree g -> concatMap svgBoundingPoints (g ^. groupChildren)
FilterTree{} -> []
DefinitionTree{} -> []
PathTree p -> linePoints $ toLineCommands (p ^. pathDefinition)
CircleTree c -> circleBoundingPoints c
PolyLineTree pl -> pl ^. polyLinePoints
EllipseTree e -> ellipseBoundingPoints e
LineTree l -> map pointToRPoint [l ^. linePoint1, l ^. linePoint2]
RectangleTree r ->
let p = pointToRPoint (r ^. rectUpperLeftCorner)
mDims = (r ^. rectWidth, r ^. rectHeight)
in rectPoints p mDims
TextTree{} -> []
ImageTree img ->
let p = pointToRPoint (img ^. imageCornerUpperLeft)
dims = (img ^. imageWidth, img ^. imageHeight)
in rectPoints' p dims
MeshGradientTree{} -> []
SvgTree d -> let mDims = (d ^. documentWidth, d ^. documentHeight)
in rectPoints (V2 0 0) mDims
_ -> []
where
m = Transform.mkMatrix (t ^. transform)
mapTuple f = f *** f
toUserUnit' = toUserUnit defaultDPI
pointToRPoint p =
case mapTuple toUserUnit' p of
(Num x, Num y) -> V2 x y
_ -> error "Reanimate.Svg.svgBoundingPoints: Unrecognized number format."
circleBoundingPoints circ =
let (xnum, ynum) = circ ^. circleCenter
rnum = circ ^. circleRadius
in case mapMaybe unpackNumber [xnum, ynum, rnum] of
[x, y, r] -> ellipsePoints x y r r
_ -> []
ellipseBoundingPoints e =
let (xnum,ynum) = e ^. ellipseCenter
xrnum = e ^. ellipseXRadius
yrnum = e ^. ellipseYRadius
in case mapMaybe unpackNumber [xnum, ynum, xrnum, yrnum] of
[x, y, xr, yr] -> ellipsePoints x y xr yr
_ -> []
ellipsePoints x y xr yr = [ V2 (x + xr * cos angle) (y + yr * sin angle)
| angle <- [0, pi/10 .. 2 * pi] ]
rectPoints p mDims = case mDims of
(Just w, Just h) -> rectPoints' p (w, h)
_ -> [p]
rectPoints' p@(V2 x y) dims =
p : case mapTuple toUserUnit' dims of
((Num w), (Num h)) -> let (x', y') = (x + w, y + h)
in [V2 x' y, V2 x' y', V2 x y']
_ -> []
unpackNumber n =
case toUserUnit' n of
Num d -> Just d
_ -> Nothing
| null | https://raw.githubusercontent.com/reanimate/reanimate/2d2a37b6acc2f683c9ca1339678ddf75b31b740e/src/Reanimate/Svg/BoundingBox.hs | haskell | | Return bounding box of SVG tree.
Note: Bounding boxes are computed on a best-effort basis and will not work
in all cases. The only supported SVG nodes are: path, circle, polyline,
The box for the svg node is based on the document's width and height
(if both are present).
| Height of SVG node in local units (not pixels). Computed on best-effort basis
and will not give accurate results for all SVG nodes.
| Width of SVG node in local units (not pixels). Computed on best-effort basis
and will not give accurate results for all SVG nodes.
| Sampling of points in a line path.
LineDraw to -> from:to:worker to xs
approximation | |
Bounding - boxes can be immensely useful for aligning objects
but they are not part of the SVG specification and can not be
computed for all SVG nodes . In particular , you 'll get bad results
when asking for the bounding boxes of Text nodes ( because fonts
are difficult ) , clipped nodes , and filtered nodes .
Bounding-boxes can be immensely useful for aligning objects
but they are not part of the SVG specification and cannot be
computed for all SVG nodes. In particular, you'll get bad results
when asking for the bounding boxes of Text nodes (because fonts
are difficult), clipped nodes, and filtered nodes.
-}
module Reanimate.Svg.BoundingBox
( boundingBox
, svgHeight
, svgWidth
) where
import Control.Arrow ((***))
import Control.Lens ((^.))
import Data.List (foldl')
import Data.Maybe (mapMaybe)
import qualified Data.Vector.Unboxed as V
import qualified Geom2D.CubicBezier.Linear as Bezier
import Graphics.SvgTree
import Linear.V2 (V2 (V2))
import Linear.Vector (Additive (zero))
import Reanimate.Constants (defaultDPI)
import Reanimate.Svg.LineCommand (LineCommand (..), toLineCommands)
import qualified Reanimate.Transform as Transform
The four numbers returned are ( minimal X - coordinate , minimal Y - coordinate , width , height )
ellipse , line , rectangle , image and svg . All other nodes return ( 0,0,0,0 ) .
boundingBox :: Tree -> (Double, Double, Double, Double)
boundingBox t =
case svgBoundingPoints t of
[] -> (0,0,0,0)
(V2 x y:rest) ->
let (minx, miny, maxx, maxy) = foldl' worker (x, y, x, y) rest
in (minx, miny, maxx-minx, maxy-miny)
where
worker (minx, miny, maxx, maxy) (V2 x y) =
(min minx x, min miny y, max maxx x, max maxy y)
svgHeight :: Tree -> Double
svgHeight t = h
where
(_x, _y, _w, h) = boundingBox t
svgWidth :: Tree -> Double
svgWidth t = w
where
(_x, _y, w, _h) = boundingBox t
linePoints :: [LineCommand] -> [RPoint]
linePoints = worker zero
where
worker _from [] = []
worker from (x:xs) =
case x of
LineMove to -> worker to xs
LineBezier [p] ->
p : worker p xs
let bezier = Bezier.AnyBezier (V.fromList (from:ctrl))
in [ Bezier.evalBezier bezier (recip chunks*i) | i <- [0..chunks]] ++
worker (last ctrl) xs
LineEnd p -> p : worker p xs
chunks = 10
svgBoundingPoints :: Tree -> [RPoint]
svgBoundingPoints t = map (Transform.transformPoint m) $
case t of
None -> []
UseTree{} -> []
GroupTree g -> concatMap svgBoundingPoints (g ^. groupChildren)
SymbolTree g -> concatMap svgBoundingPoints (g ^. groupChildren)
FilterTree{} -> []
DefinitionTree{} -> []
PathTree p -> linePoints $ toLineCommands (p ^. pathDefinition)
CircleTree c -> circleBoundingPoints c
PolyLineTree pl -> pl ^. polyLinePoints
EllipseTree e -> ellipseBoundingPoints e
LineTree l -> map pointToRPoint [l ^. linePoint1, l ^. linePoint2]
RectangleTree r ->
let p = pointToRPoint (r ^. rectUpperLeftCorner)
mDims = (r ^. rectWidth, r ^. rectHeight)
in rectPoints p mDims
TextTree{} -> []
ImageTree img ->
let p = pointToRPoint (img ^. imageCornerUpperLeft)
dims = (img ^. imageWidth, img ^. imageHeight)
in rectPoints' p dims
MeshGradientTree{} -> []
SvgTree d -> let mDims = (d ^. documentWidth, d ^. documentHeight)
in rectPoints (V2 0 0) mDims
_ -> []
where
m = Transform.mkMatrix (t ^. transform)
mapTuple f = f *** f
toUserUnit' = toUserUnit defaultDPI
pointToRPoint p =
case mapTuple toUserUnit' p of
(Num x, Num y) -> V2 x y
_ -> error "Reanimate.Svg.svgBoundingPoints: Unrecognized number format."
circleBoundingPoints circ =
let (xnum, ynum) = circ ^. circleCenter
rnum = circ ^. circleRadius
in case mapMaybe unpackNumber [xnum, ynum, rnum] of
[x, y, r] -> ellipsePoints x y r r
_ -> []
ellipseBoundingPoints e =
let (xnum,ynum) = e ^. ellipseCenter
xrnum = e ^. ellipseXRadius
yrnum = e ^. ellipseYRadius
in case mapMaybe unpackNumber [xnum, ynum, xrnum, yrnum] of
[x, y, xr, yr] -> ellipsePoints x y xr yr
_ -> []
ellipsePoints x y xr yr = [ V2 (x + xr * cos angle) (y + yr * sin angle)
| angle <- [0, pi/10 .. 2 * pi] ]
rectPoints p mDims = case mDims of
(Just w, Just h) -> rectPoints' p (w, h)
_ -> [p]
rectPoints' p@(V2 x y) dims =
p : case mapTuple toUserUnit' dims of
((Num w), (Num h)) -> let (x', y') = (x + w, y + h)
in [V2 x' y, V2 x' y', V2 x y']
_ -> []
unpackNumber n =
case toUserUnit' n of
Num d -> Just d
_ -> Nothing
|
202dddf5f2e6c2dc0e23ddb51bd383aa0a74d06cf50f31c5f4604df18e0f898a | kupl/FixML | sub28.ml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec arithmetic = fun f -> match f with
| Num num -> num
| Plus(num1,num2) -> (arithmetic num1) + (arithmetic num2)
| Minus(num1,num2) -> (arithmetic num1) - (arithmetic num2)
let rec eval : formula -> bool
= fun f -> match f with
| True -> true
| False -> false
| Not fm -> if eval fm then false else true
| AndAlso(fm1,fm2) -> (eval fm1) && (eval fm2)
| OrElse(fm1,fm2) -> (eval fm1) || (eval fm2)
| Imply(fm1,fm2) -> if (eval fm1) then if (eval fm2) then true else false else false
| Equal(exp1,exp2) -> (arithmetic exp1) = (arithmetic exp2)
| null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/formula/formula1/submissions/sub28.ml | ocaml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec arithmetic = fun f -> match f with
| Num num -> num
| Plus(num1,num2) -> (arithmetic num1) + (arithmetic num2)
| Minus(num1,num2) -> (arithmetic num1) - (arithmetic num2)
let rec eval : formula -> bool
= fun f -> match f with
| True -> true
| False -> false
| Not fm -> if eval fm then false else true
| AndAlso(fm1,fm2) -> (eval fm1) && (eval fm2)
| OrElse(fm1,fm2) -> (eval fm1) || (eval fm2)
| Imply(fm1,fm2) -> if (eval fm1) then if (eval fm2) then true else false else false
| Equal(exp1,exp2) -> (arithmetic exp1) = (arithmetic exp2)
|
|
3391680132fc4401ae7203b87a35464decc927835a51fa7d15d55c7605979cba | oxidizing/sihl | web_static.ml | let middleware () =
let local_path =
Option.value
(Core_configuration.read_string "PUBLIC_DIR")
~default:"./public"
in
let internal_uri_prefix =
Option.value
(Core_configuration.read_string "PUBLIC_URI_PREFIX")
~default:"/assets"
in
let uri_prefix = Web.externalize_path internal_uri_prefix in
Opium.Middleware.static_unix ~local_path ~uri_prefix ()
;;
| null | https://raw.githubusercontent.com/oxidizing/sihl/c6786f25424c1b9f40ce656e908bd31515f1cd09/sihl/src/web_static.ml | ocaml | let middleware () =
let local_path =
Option.value
(Core_configuration.read_string "PUBLIC_DIR")
~default:"./public"
in
let internal_uri_prefix =
Option.value
(Core_configuration.read_string "PUBLIC_URI_PREFIX")
~default:"/assets"
in
let uri_prefix = Web.externalize_path internal_uri_prefix in
Opium.Middleware.static_unix ~local_path ~uri_prefix ()
;;
|
|
bd27eef44a18ab6e4ba8dece727b6b2cef1a90a62d90b1455eb33b28480af0cc | dradtke/Lisp-Text-Editor | selections.lisp | (in-package :gtk-cffi)
(defclass target-list (object)
())
| null | https://raw.githubusercontent.com/dradtke/Lisp-Text-Editor/b0947828eda82d7edd0df8ec2595e7491a633580/quicklisp/dists/quicklisp/software/gtk-cffi-20120208-cvs/gtk/selections.lisp | lisp | (in-package :gtk-cffi)
(defclass target-list (object)
())
|
|
033b73bfab21bf11cd56415acb10f84c5e53871702012a7c04d69d937f5b98a1 | racket/web-server | cookies-test.rkt | #lang racket/base
(require rackunit
racket/promise
racket/list
racket/match
racket/file
(for-syntax racket/base)
net/url
net/cookies/common
(except-in net/cookies/server
make-cookie)
web-server/http/request-structs
web-server/http/response-structs
web-server/http/cookie
web-server/http/id-cookie
web-server/http/cookie-parse)
(provide cookies-tests)
(define (header-equal? h1 h2)
(and (bytes=? (header-field h1)
(header-field h2))
(bytes=? (header-value h1)
(header-value h2))))
(define (set-header->read-header h)
(make-header #"Cookie" (header-value h)))
(define-check (check-equal?/list-no-order actual expected)
(or (and (list? actual)
(list? expected)
(= (length actual)
(length expected))
(let loop ([actual-to-go actual]
[expected-to-go expected])
(match expected-to-go
['() (null? actual-to-go)]
[(cons this-expected more-expected)
(and (member this-expected actual-to-go)
(loop (remove this-expected actual-to-go)
more-expected))])))
(with-check-info (['actual actual]
['expected expected])
(fail-check))))
(define-syntax (test-equal?/list-no-order stx)
(syntax-case stx ()
[(_ msg actual expected)
(with-syntax ([expr (syntax/loc stx (check-equal?/list-no-order actual expected))])
(syntax/loc stx
(test-case msg expr)))]))
(define cookies-tests
(test-suite
"Cookies"
(test-suite
"cookie.rkt"
(test-suite
"cookie->header and make-cookie"
(test-check "Simple" header-equal?
(cookie->header (make-cookie "name" "value"))
(make-header #"Set-Cookie" #"name=value"))
(test-equal? "Comment"
(header-value (cookie->header (make-cookie "name" "value" #:comment "comment")))
#"name=value") ;comment is now ignored
(test-equal? "Domain"
(header-value (cookie->header (make-cookie "name" "value" #:domain "host.domain")))
#"name=value; Domain=host.domain")
(test-equal? "max-age"
(header-value (cookie->header (make-cookie "name" "value" #:max-age 24)))
#"name=value; Max-Age=24")
(test-equal? "path"
(header-value (cookie->header (make-cookie "name" "value" #:path "path")))
#"name=value; Path=path")
(test-equal? "secure? #t"
(header-value (cookie->header (make-cookie "name" "value" #:secure? #t)))
#"name=value; Secure")
(test-equal? "secure? #f"
(header-value (cookie->header (make-cookie "name" "value" #:secure? #f)))
#"name=value")))
(let ()
(define (reqcs hs)
(request-cookies
(make-request
#"GET" (string->url "")
hs (delay empty) #f
"host" 80 "client")))
(define (reqc h)
(reqcs (list (make-header #"Cookie" h))))
(test-suite
"cookie-parse.rkt"
;RFC 6265 no longer gives special meaning to "$Version" "$Path" or "$Domain"
(test-equal? "None"
(reqcs empty)
empty)
(test-equal?/list-no-order "Simple"
(reqc #"$Version=\"1\"; name=\"value\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "name" "value" #f #f)))
(test-equal?/list-no-order "Path"
(reqc #"$Version=\"1\"; name=\"value\"; $Path=\"/acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Path" "/acme" #f #f)
(make-client-cookie "name" "value" #f #f))) ;new version of request-cookies never populates path or domain
(test-equal?/list-no-order "Domain"
(reqc #"$Version=\"1\"; name=\"value\"; $Domain=\"host.acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Domain" "host.acme" #f #f)
(make-client-cookie "name" "value" #f #f))) ;new version of request-cookies never populates path or domain
(test-equal?/list-no-order "Multiple"
(reqc #"$Version=\"1\"; key1=\"value1\"; key2=\"value2\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "key1" "value1" #f #f)
(make-client-cookie "key2" "value2" #f #f)))
(test-equal?/list-no-order "Multiple w/ paths & domains"
(reqc #"$Version=\"1\"; key1=\"value1\"; $Path=\"/acme\"; key2=\"value2\"; $Domain=\"host.acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Domain" "host.acme" #f #f)
(make-client-cookie "$Path" "/acme" #f #f)
(make-client-cookie "key1" "value1" #f #f) ;new version of request-cookies never populates path or domain
(make-client-cookie "key2" "value2" #f #f)))
(test-equal?/list-no-order "phpBB. PR10689"
(reqc #"style_cookie=null; phpbb3_e1p9b_u=54; phpbb3_e1p9b_k=; phpbb3_e1p9b_sid=3fa8d7a7b65fbabcbe9b345861dc079a")
(list (make-client-cookie "style_cookie" "null" #f #f)
(make-client-cookie "phpbb3_e1p9b_u" "54" #f #f)
(make-client-cookie "phpbb3_e1p9b_k" "" #f #f)
(make-client-cookie "phpbb3_e1p9b_sid" "3fa8d7a7b65fbabcbe9b345861dc079a" #f #f)))
(test-equal?/list-no-order "Google"
(reqc ;this is rejected if there is a \n between the cookies or if there is a trailing \r\n
(bytes-append #"teaching-order=course; "
#"__utmz=165257760.1272597702.1.1.utmcsr=(direct)"
#"|utmccn=(direct)|utmcmd=(none)"))
(list (make-client-cookie "teaching-order" "course" #f #f)
(make-client-cookie "__utmz" "165257760.1272597702.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)" #f #f)))
#;(let ()
(define in "hell\"w\"o") ;<--- this is not a cookie-value?
(define out #"id=\"hell\\\"w\\\"o\"")
(test-check "quotes (pr14194)" header-equal?
(cookie->header (make-cookie "id" in))
(make-header #"Set-Cookie" out))
(test-equal? "quotes (pr14194)"
(reqc out)
(list (make-client-cookie "id" in #f #f))))))
(test-suite
"RFC 6265 modifications"
(let ([dt (date* 26 42 0 9 3 2017 4 67 #f 0 0 "UTC")])
(test-equal? "#:expires as string"
(cookie-expires (make-cookie "my-cookie"
"my-value"
#:expires "Thu, 09 Mar 2017 00:42:26 GMT"))
dt)
(define c
(make-cookie "my-cookie"
"my-value"
#:comment "This is ignored"
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:secure? 'yes
#:http-only? 'yes
#:extension "ext"))
(test-suite
"extra arguments to make-cookie"
(check-match c
(cookie "my-cookie"
"my-value"
(? (λ (x) (equal? x dt)))
42
"example.com"
"/some-path"
#t
#t
"ext"))
(check-match (cookie->header c)
(header
#"Set-Cookie"
(app (λ (val) (regexp-split #rx"; " val))
(list-no-order #"my-cookie=my-value"
#"Expires=Thu, 09 Mar 2017 00:42:26 GMT"
#"Max-Age=42"
#"Domain=example.com"
#"Path=/some-path"
#"Secure"
#"HttpOnly"
#"ext"))))
)))
(test-suite
"id-cookie.rkt"
(test-suite
"make-secret-salt/file"
(let ([tmp-secret-salt-path (make-temporary-file)])
(define (delete-salt-file)
(when (file-exists? tmp-secret-salt-path)
(delete-file tmp-secret-salt-path)))
(dynamic-wind delete-salt-file
(λ ()
(test-equal? "should only initialize once"
(make-secret-salt/file tmp-secret-salt-path)
(make-secret-salt/file tmp-secret-salt-path)))
delete-salt-file)))
(let ()
(define test-secret-salt
(bytes-append #"U;\256\0.\203Iu\3663\367\262d\220\276t\207\17^_0\240\2U\341"
#"\240E\20\322\36\213\210\224\35ey\365:\332\"\e\211\262\v@y\n"
#"\377\32561\364\277R\363\334Q\273\270\36\223\242\202\272\206"
#"\2\355\335\343\327\211\22\24\365\377\353\340\332\e\21\312\217"
#"\220\344\203\322\320\322\341\2731\e\236\230\307\246\23i\352>3,"
#"\260*\2,\375DK\302S\270Q\2433v\327\272\1\16\361y\213\4\16X\345H"))
(test-suite
"make-id-cookie and valid-id-cookie?"
(test-false "reject forged"
(valid-id-cookie? (client-cookie "my-id-cookie"
"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m4o&1489023629&forged-value"
#f #f)
#:name "my-id-cookie"
#:key test-secret-salt))
(test-false "reject truncated signature"
;; before web-server-lib v1.6, generated signatures were incorectly truncated
(valid-id-cookie? (client-cookie "my-id-cookie"
"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m&1489023629&my-signed-value"
#f #f)
#:name "my-id-cookie"
#:key test-secret-salt))
(let ([dt (date* 26 42 0 9 3 2017 4 67 #f 0 0 "UTC")])
;; Rather than repeating each test for every possible combination of:
;; - Name argument to make-id-cookie as string or bytes
;; - Value argument to make-id-cookie as string or bytes
;; - Name argument to valid-id-cookie as string or bytes
;; we mix use of strings vs. bytes to get reasonable coverage overall.
(define kw-c
(make-id-cookie "my-id-cookie"
#"my-signed-value"
#:key test-secret-salt
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:secure? 'yes ;; non-boolean values should be accepted
#:http-only? #t
#:extension "ext"))
(define by-pos-c
(make-id-cookie #"my-id-cookie"
test-secret-salt
"my-signed-value"
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:secure? #t
#:http-only? 'yes
#:extension "ext"))
(for ([c (list kw-c by-pos-c)]
[convention (map string-info '("keyword" "by-position"))])
(with-check-info (['cookie c]
['|make-id-cookie calling convention| convention])
(test-not-false "infinite timeout"
(valid-id-cookie? c
#:name "my-id-cookie"
#:key test-secret-salt))
(test-not-false "finite timeout"
(valid-id-cookie? c
#:name #"my-id-cookie"
#:key test-secret-salt
#:timeout (current-seconds)))
(test-false "reject expired"
(valid-id-cookie? c
#:name "my-id-cookie"
#:key test-secret-salt
#:timeout (- (current-seconds)
86400)))))))
(test-suite
"request-id-cookie"
(let ()
(define req
(make-request
#"GET" (string->url "")
(list (header #"Cookie"
#"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m4o&1489023629&my-signed-value"))
(delay empty) #f "host" 80 "client"))
(test-not-false "infinite timeout & shelf life"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt))
(for ([name (in-list '("my-id-cookie" #"my-id-cookie"))])
(with-check-info (['|name argument| name])
(test-not-false "finite timeout"
(request-id-cookie req
#:name name
#:key test-secret-salt
#:timeout (current-seconds)))
(test-not-false "finite timeout / by position"
(request-id-cookie name
test-secret-salt
req
#:timeout (current-seconds)))))
(test-false "timeout / reject expired"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:timeout 1089023629))
(test-equal? "long finite shelf-life / fresh cookie"
(valid-id-cookie? (make-id-cookie "fresh-id-cookie"
"test-value"
#:key #"test-key")
#:name "fresh-id-cookie"
#:key #"test-key"
#:shelf-life 500)
"test-value")
(test-equal? "long finite shelf-life"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:shelf-life (+ 10
(- (current-seconds)
1489023629)))
"my-signed-value")
(test-false "shelf-life / reject expired"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:shelf-life -10))
))))))
(module+ test
(require rackunit/text-ui)
(run-tests cookies-tests))
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-test/tests/web-server/http/cookies-test.rkt | racket | comment is now ignored
RFC 6265 no longer gives special meaning to "$Version" "$Path" or "$Domain"
new version of request-cookies never populates path or domain
new version of request-cookies never populates path or domain
new version of request-cookies never populates path or domain
this is rejected if there is a \n between the cookies or if there is a trailing \r\n
(let ()
<--- this is not a cookie-value?
before web-server-lib v1.6, generated signatures were incorectly truncated
Rather than repeating each test for every possible combination of:
- Name argument to make-id-cookie as string or bytes
- Value argument to make-id-cookie as string or bytes
- Name argument to valid-id-cookie as string or bytes
we mix use of strings vs. bytes to get reasonable coverage overall.
non-boolean values should be accepted | #lang racket/base
(require rackunit
racket/promise
racket/list
racket/match
racket/file
(for-syntax racket/base)
net/url
net/cookies/common
(except-in net/cookies/server
make-cookie)
web-server/http/request-structs
web-server/http/response-structs
web-server/http/cookie
web-server/http/id-cookie
web-server/http/cookie-parse)
(provide cookies-tests)
(define (header-equal? h1 h2)
(and (bytes=? (header-field h1)
(header-field h2))
(bytes=? (header-value h1)
(header-value h2))))
(define (set-header->read-header h)
(make-header #"Cookie" (header-value h)))
(define-check (check-equal?/list-no-order actual expected)
(or (and (list? actual)
(list? expected)
(= (length actual)
(length expected))
(let loop ([actual-to-go actual]
[expected-to-go expected])
(match expected-to-go
['() (null? actual-to-go)]
[(cons this-expected more-expected)
(and (member this-expected actual-to-go)
(loop (remove this-expected actual-to-go)
more-expected))])))
(with-check-info (['actual actual]
['expected expected])
(fail-check))))
(define-syntax (test-equal?/list-no-order stx)
(syntax-case stx ()
[(_ msg actual expected)
(with-syntax ([expr (syntax/loc stx (check-equal?/list-no-order actual expected))])
(syntax/loc stx
(test-case msg expr)))]))
(define cookies-tests
(test-suite
"Cookies"
(test-suite
"cookie.rkt"
(test-suite
"cookie->header and make-cookie"
(test-check "Simple" header-equal?
(cookie->header (make-cookie "name" "value"))
(make-header #"Set-Cookie" #"name=value"))
(test-equal? "Comment"
(header-value (cookie->header (make-cookie "name" "value" #:comment "comment")))
(test-equal? "Domain"
(header-value (cookie->header (make-cookie "name" "value" #:domain "host.domain")))
#"name=value; Domain=host.domain")
(test-equal? "max-age"
(header-value (cookie->header (make-cookie "name" "value" #:max-age 24)))
#"name=value; Max-Age=24")
(test-equal? "path"
(header-value (cookie->header (make-cookie "name" "value" #:path "path")))
#"name=value; Path=path")
(test-equal? "secure? #t"
(header-value (cookie->header (make-cookie "name" "value" #:secure? #t)))
#"name=value; Secure")
(test-equal? "secure? #f"
(header-value (cookie->header (make-cookie "name" "value" #:secure? #f)))
#"name=value")))
(let ()
(define (reqcs hs)
(request-cookies
(make-request
#"GET" (string->url "")
hs (delay empty) #f
"host" 80 "client")))
(define (reqc h)
(reqcs (list (make-header #"Cookie" h))))
(test-suite
"cookie-parse.rkt"
(test-equal? "None"
(reqcs empty)
empty)
(test-equal?/list-no-order "Simple"
(reqc #"$Version=\"1\"; name=\"value\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "name" "value" #f #f)))
(test-equal?/list-no-order "Path"
(reqc #"$Version=\"1\"; name=\"value\"; $Path=\"/acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Path" "/acme" #f #f)
(test-equal?/list-no-order "Domain"
(reqc #"$Version=\"1\"; name=\"value\"; $Domain=\"host.acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Domain" "host.acme" #f #f)
(test-equal?/list-no-order "Multiple"
(reqc #"$Version=\"1\"; key1=\"value1\"; key2=\"value2\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "key1" "value1" #f #f)
(make-client-cookie "key2" "value2" #f #f)))
(test-equal?/list-no-order "Multiple w/ paths & domains"
(reqc #"$Version=\"1\"; key1=\"value1\"; $Path=\"/acme\"; key2=\"value2\"; $Domain=\"host.acme\"")
(list (make-client-cookie "$Version" "1" #f #f)
(make-client-cookie "$Domain" "host.acme" #f #f)
(make-client-cookie "$Path" "/acme" #f #f)
(make-client-cookie "key2" "value2" #f #f)))
(test-equal?/list-no-order "phpBB. PR10689"
(reqc #"style_cookie=null; phpbb3_e1p9b_u=54; phpbb3_e1p9b_k=; phpbb3_e1p9b_sid=3fa8d7a7b65fbabcbe9b345861dc079a")
(list (make-client-cookie "style_cookie" "null" #f #f)
(make-client-cookie "phpbb3_e1p9b_u" "54" #f #f)
(make-client-cookie "phpbb3_e1p9b_k" "" #f #f)
(make-client-cookie "phpbb3_e1p9b_sid" "3fa8d7a7b65fbabcbe9b345861dc079a" #f #f)))
(test-equal?/list-no-order "Google"
(bytes-append #"teaching-order=course; "
#"__utmz=165257760.1272597702.1.1.utmcsr=(direct)"
#"|utmccn=(direct)|utmcmd=(none)"))
(list (make-client-cookie "teaching-order" "course" #f #f)
(make-client-cookie "__utmz" "165257760.1272597702.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)" #f #f)))
(define out #"id=\"hell\\\"w\\\"o\"")
(test-check "quotes (pr14194)" header-equal?
(cookie->header (make-cookie "id" in))
(make-header #"Set-Cookie" out))
(test-equal? "quotes (pr14194)"
(reqc out)
(list (make-client-cookie "id" in #f #f))))))
(test-suite
"RFC 6265 modifications"
(let ([dt (date* 26 42 0 9 3 2017 4 67 #f 0 0 "UTC")])
(test-equal? "#:expires as string"
(cookie-expires (make-cookie "my-cookie"
"my-value"
#:expires "Thu, 09 Mar 2017 00:42:26 GMT"))
dt)
(define c
(make-cookie "my-cookie"
"my-value"
#:comment "This is ignored"
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:secure? 'yes
#:http-only? 'yes
#:extension "ext"))
(test-suite
"extra arguments to make-cookie"
(check-match c
(cookie "my-cookie"
"my-value"
(? (λ (x) (equal? x dt)))
42
"example.com"
"/some-path"
#t
#t
"ext"))
(check-match (cookie->header c)
(header
#"Set-Cookie"
(app (λ (val) (regexp-split #rx"; " val))
(list-no-order #"my-cookie=my-value"
#"Expires=Thu, 09 Mar 2017 00:42:26 GMT"
#"Max-Age=42"
#"Domain=example.com"
#"Path=/some-path"
#"Secure"
#"HttpOnly"
#"ext"))))
)))
(test-suite
"id-cookie.rkt"
(test-suite
"make-secret-salt/file"
(let ([tmp-secret-salt-path (make-temporary-file)])
(define (delete-salt-file)
(when (file-exists? tmp-secret-salt-path)
(delete-file tmp-secret-salt-path)))
(dynamic-wind delete-salt-file
(λ ()
(test-equal? "should only initialize once"
(make-secret-salt/file tmp-secret-salt-path)
(make-secret-salt/file tmp-secret-salt-path)))
delete-salt-file)))
(let ()
(define test-secret-salt
(bytes-append #"U;\256\0.\203Iu\3663\367\262d\220\276t\207\17^_0\240\2U\341"
#"\240E\20\322\36\213\210\224\35ey\365:\332\"\e\211\262\v@y\n"
#"\377\32561\364\277R\363\334Q\273\270\36\223\242\202\272\206"
#"\2\355\335\343\327\211\22\24\365\377\353\340\332\e\21\312\217"
#"\220\344\203\322\320\322\341\2731\e\236\230\307\246\23i\352>3,"
#"\260*\2,\375DK\302S\270Q\2433v\327\272\1\16\361y\213\4\16X\345H"))
(test-suite
"make-id-cookie and valid-id-cookie?"
(test-false "reject forged"
(valid-id-cookie? (client-cookie "my-id-cookie"
"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m4o&1489023629&forged-value"
#f #f)
#:name "my-id-cookie"
#:key test-secret-salt))
(test-false "reject truncated signature"
(valid-id-cookie? (client-cookie "my-id-cookie"
"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m&1489023629&my-signed-value"
#f #f)
#:name "my-id-cookie"
#:key test-secret-salt))
(let ([dt (date* 26 42 0 9 3 2017 4 67 #f 0 0 "UTC")])
(define kw-c
(make-id-cookie "my-id-cookie"
#"my-signed-value"
#:key test-secret-salt
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:http-only? #t
#:extension "ext"))
(define by-pos-c
(make-id-cookie #"my-id-cookie"
test-secret-salt
"my-signed-value"
#:domain "example.com"
#:max-age 42
#:path "/some-path"
#:expires dt
#:secure? #t
#:http-only? 'yes
#:extension "ext"))
(for ([c (list kw-c by-pos-c)]
[convention (map string-info '("keyword" "by-position"))])
(with-check-info (['cookie c]
['|make-id-cookie calling convention| convention])
(test-not-false "infinite timeout"
(valid-id-cookie? c
#:name "my-id-cookie"
#:key test-secret-salt))
(test-not-false "finite timeout"
(valid-id-cookie? c
#:name #"my-id-cookie"
#:key test-secret-salt
#:timeout (current-seconds)))
(test-false "reject expired"
(valid-id-cookie? c
#:name "my-id-cookie"
#:key test-secret-salt
#:timeout (- (current-seconds)
86400)))))))
(test-suite
"request-id-cookie"
(let ()
(define req
(make-request
#"GET" (string->url "")
(list (header #"Cookie"
#"my-id-cookie=YmFLLOIDULjpLQOu1+cvMBM+m4o&1489023629&my-signed-value"))
(delay empty) #f "host" 80 "client"))
(test-not-false "infinite timeout & shelf life"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt))
(for ([name (in-list '("my-id-cookie" #"my-id-cookie"))])
(with-check-info (['|name argument| name])
(test-not-false "finite timeout"
(request-id-cookie req
#:name name
#:key test-secret-salt
#:timeout (current-seconds)))
(test-not-false "finite timeout / by position"
(request-id-cookie name
test-secret-salt
req
#:timeout (current-seconds)))))
(test-false "timeout / reject expired"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:timeout 1089023629))
(test-equal? "long finite shelf-life / fresh cookie"
(valid-id-cookie? (make-id-cookie "fresh-id-cookie"
"test-value"
#:key #"test-key")
#:name "fresh-id-cookie"
#:key #"test-key"
#:shelf-life 500)
"test-value")
(test-equal? "long finite shelf-life"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:shelf-life (+ 10
(- (current-seconds)
1489023629)))
"my-signed-value")
(test-false "shelf-life / reject expired"
(request-id-cookie req
#:name "my-id-cookie"
#:key test-secret-salt
#:shelf-life -10))
))))))
(module+ test
(require rackunit/text-ui)
(run-tests cookies-tests))
|
594b4e90691eb98e57c96b5b49210ed90da4d0fd559107e7ecc0b0e1cd0b33c5 | dinosaure/tuyau | tuyau_caml_strings.mli | type flow =
{ mutable input : string list
; output : string Queue.t }
val strings : string list Tuyau_caml.key
val strings_protocol : flow Tuyau_caml.Witness.protocol
| null | https://raw.githubusercontent.com/dinosaure/tuyau/8ed849805153f5dfad6c045782e3d20ef06cd9b6/caml/tuyau_caml_strings.mli | ocaml | type flow =
{ mutable input : string list
; output : string Queue.t }
val strings : string list Tuyau_caml.key
val strings_protocol : flow Tuyau_caml.Witness.protocol
|
|
052a8a5cf8318ff46ad0f4d41bed92627d87646037d54d87c194485a16bc4024 | apache/couchdb-fabric | fabric_group_info.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(fabric_group_info).
-export([go/2]).
-include_lib("fabric/include/fabric.hrl").
-include_lib("mem3/include/mem3.hrl").
-include_lib("couch/include/couch_db.hrl").
go(DbName, GroupId) when is_binary(GroupId) ->
{ok, DDoc} = fabric:open_doc(DbName, GroupId, [?ADMIN_CTX]),
go(DbName, DDoc);
go(DbName, #doc{id=DDocId}) ->
Shards = mem3:shards(DbName),
Ushards = mem3:ushards(DbName),
Workers = fabric_util:submit_jobs(Shards, group_info, [DDocId]),
RexiMon = fabric_util:create_monitors(Shards),
Acc = acc_init(Workers, Ushards),
try fabric_util:recv(Workers, #shard.ref, fun handle_message/3, Acc) of
{timeout, {WorkersDict, _, _}} ->
DefunctWorkers = fabric_util:remove_done_workers(WorkersDict, nil),
fabric_util:log_timeout(DefunctWorkers, "group_info"),
{error, timeout};
Else ->
Else
after
rexi_monitor:stop(RexiMon)
end.
handle_message({rexi_DOWN, _, {_,NodeRef},_}, _Shard,
{Counters, Acc, Ushards}) ->
case fabric_util:remove_down_workers(Counters, NodeRef) of
{ok, NewCounters} ->
{ok, {NewCounters, Acc, Ushards}};
error ->
{error, {nodedown, <<"progress not possible">>}}
end;
handle_message({rexi_EXIT, Reason}, Shard, {Counters, Acc, Ushards}) ->
NewCounters = lists:keydelete(Shard, #shard.ref, Counters),
case fabric_view:is_progress_possible(NewCounters) of
true ->
{ok, {NewCounters, Acc, Ushards}};
false ->
{error, Reason}
end;
handle_message({ok, Info}, Shard, {Counters0, Acc, Ushards}) ->
case fabric_dict:lookup_element(Shard, Counters0) of
undefined ->
% already heard from other node in this range
{ok, {Counters0, Acc, Ushards}};
nil ->
NewAcc = append_result(Info, Shard, Acc, Ushards),
Counters1 = fabric_dict:store(Shard, ok, Counters0),
Counters = fabric_view:remove_overlapping_shards(Shard, Counters1),
case is_complete(Counters) of
false ->
{ok, {Counters, NewAcc, Ushards}};
true ->
Pending = aggregate_pending(NewAcc),
Infos = get_infos(NewAcc),
Results = [{updates_pending, {Pending}} | merge_results(Infos)],
{stop, Results}
end
end;
handle_message(_, _, Acc) ->
{ok, Acc}.
acc_init(Workers, Ushards) ->
Set = sets:from_list([{Id, N} || #shard{name = Id, node = N} <- Ushards]),
{fabric_dict:init(Workers, nil), dict:new(), Set}.
is_complete(Counters) ->
not fabric_dict:any(nil, Counters).
append_result(Info, #shard{name = Name, node = Node}, Acc, Ushards) ->
IsPreferred = sets:is_element({Name, Node}, Ushards),
dict:append(Name, {Node, IsPreferred, Info}, Acc).
get_infos(Acc) ->
Values = [V || {_, V} <- dict:to_list(Acc)],
lists:flatten([Info || {_Node, _Pref, Info} <- lists:flatten(Values)]).
aggregate_pending(Dict) ->
{Preferred, Total, Minimum} =
dict:fold(fun(_Name, Results, {P, T, M}) ->
{Preferred, Total, Minimum} = calculate_pending(Results),
{P + Preferred, T + Total, M + Minimum}
end, {0, 0, 0}, Dict),
[
{minimum, Minimum},
{preferred, Preferred},
{total, Total}
].
calculate_pending(Results) ->
lists:foldl(fun
({_Node, true, Info}, {P, T, V}) ->
Pending = couch_util:get_value(pending_updates, Info),
{P + Pending, T + Pending, min(Pending, V)};
({_Node, false, Info}, {P, T, V}) ->
Pending = couch_util:get_value(pending_updates, Info),
{P, T + Pending, min(Pending, V)}
end, {0, 0, infinity}, Results).
merge_results(Info) ->
Dict = lists:foldl(fun({K,V},D0) -> orddict:append(K,V,D0) end,
orddict:new(), Info),
orddict:fold(fun
(signature, [X | _], Acc) ->
[{signature, X} | Acc];
(language, [X | _], Acc) ->
[{language, X} | Acc];
(disk_size, X, Acc) -> % legacy
[{disk_size, lists:sum(X)} | Acc];
(data_size, X, Acc) -> % legacy
[{data_size, lists:sum(X)} | Acc];
(sizes, X, Acc) ->
[{sizes, {merge_object(X)}} | Acc];
(compact_running, X, Acc) ->
[{compact_running, lists:member(true, X)} | Acc];
(updater_running, X, Acc) ->
[{updater_running, lists:member(true, X)} | Acc];
(waiting_commit, X, Acc) ->
[{waiting_commit, lists:member(true, X)} | Acc];
(waiting_clients, X, Acc) ->
[{waiting_clients, lists:sum(X)} | Acc];
(update_seq, X, Acc) ->
[{update_seq, lists:sum(X)} | Acc];
(purge_seq, X, Acc) ->
[{purge_seq, lists:sum(X)} | Acc];
(_, _, Acc) ->
Acc
end, [], Dict).
merge_object(Objects) ->
Dict = lists:foldl(fun({Props}, D) ->
lists:foldl(fun({K,V},D0) -> orddict:append(K,V,D0) end, D, Props)
end, orddict:new(), Objects),
orddict:fold(fun
(Key, X, Acc) ->
[{Key, lists:sum(X)} | Acc]
end, [], Dict).
| null | https://raw.githubusercontent.com/apache/couchdb-fabric/ce62148d0a4469751d8078cc223684da29b5d4a7/src/fabric_group_info.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
already heard from other node in this range
legacy
legacy | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(fabric_group_info).
-export([go/2]).
-include_lib("fabric/include/fabric.hrl").
-include_lib("mem3/include/mem3.hrl").
-include_lib("couch/include/couch_db.hrl").
go(DbName, GroupId) when is_binary(GroupId) ->
{ok, DDoc} = fabric:open_doc(DbName, GroupId, [?ADMIN_CTX]),
go(DbName, DDoc);
go(DbName, #doc{id=DDocId}) ->
Shards = mem3:shards(DbName),
Ushards = mem3:ushards(DbName),
Workers = fabric_util:submit_jobs(Shards, group_info, [DDocId]),
RexiMon = fabric_util:create_monitors(Shards),
Acc = acc_init(Workers, Ushards),
try fabric_util:recv(Workers, #shard.ref, fun handle_message/3, Acc) of
{timeout, {WorkersDict, _, _}} ->
DefunctWorkers = fabric_util:remove_done_workers(WorkersDict, nil),
fabric_util:log_timeout(DefunctWorkers, "group_info"),
{error, timeout};
Else ->
Else
after
rexi_monitor:stop(RexiMon)
end.
handle_message({rexi_DOWN, _, {_,NodeRef},_}, _Shard,
{Counters, Acc, Ushards}) ->
case fabric_util:remove_down_workers(Counters, NodeRef) of
{ok, NewCounters} ->
{ok, {NewCounters, Acc, Ushards}};
error ->
{error, {nodedown, <<"progress not possible">>}}
end;
handle_message({rexi_EXIT, Reason}, Shard, {Counters, Acc, Ushards}) ->
NewCounters = lists:keydelete(Shard, #shard.ref, Counters),
case fabric_view:is_progress_possible(NewCounters) of
true ->
{ok, {NewCounters, Acc, Ushards}};
false ->
{error, Reason}
end;
handle_message({ok, Info}, Shard, {Counters0, Acc, Ushards}) ->
case fabric_dict:lookup_element(Shard, Counters0) of
undefined ->
{ok, {Counters0, Acc, Ushards}};
nil ->
NewAcc = append_result(Info, Shard, Acc, Ushards),
Counters1 = fabric_dict:store(Shard, ok, Counters0),
Counters = fabric_view:remove_overlapping_shards(Shard, Counters1),
case is_complete(Counters) of
false ->
{ok, {Counters, NewAcc, Ushards}};
true ->
Pending = aggregate_pending(NewAcc),
Infos = get_infos(NewAcc),
Results = [{updates_pending, {Pending}} | merge_results(Infos)],
{stop, Results}
end
end;
handle_message(_, _, Acc) ->
{ok, Acc}.
acc_init(Workers, Ushards) ->
Set = sets:from_list([{Id, N} || #shard{name = Id, node = N} <- Ushards]),
{fabric_dict:init(Workers, nil), dict:new(), Set}.
is_complete(Counters) ->
not fabric_dict:any(nil, Counters).
append_result(Info, #shard{name = Name, node = Node}, Acc, Ushards) ->
IsPreferred = sets:is_element({Name, Node}, Ushards),
dict:append(Name, {Node, IsPreferred, Info}, Acc).
get_infos(Acc) ->
Values = [V || {_, V} <- dict:to_list(Acc)],
lists:flatten([Info || {_Node, _Pref, Info} <- lists:flatten(Values)]).
aggregate_pending(Dict) ->
{Preferred, Total, Minimum} =
dict:fold(fun(_Name, Results, {P, T, M}) ->
{Preferred, Total, Minimum} = calculate_pending(Results),
{P + Preferred, T + Total, M + Minimum}
end, {0, 0, 0}, Dict),
[
{minimum, Minimum},
{preferred, Preferred},
{total, Total}
].
calculate_pending(Results) ->
lists:foldl(fun
({_Node, true, Info}, {P, T, V}) ->
Pending = couch_util:get_value(pending_updates, Info),
{P + Pending, T + Pending, min(Pending, V)};
({_Node, false, Info}, {P, T, V}) ->
Pending = couch_util:get_value(pending_updates, Info),
{P, T + Pending, min(Pending, V)}
end, {0, 0, infinity}, Results).
merge_results(Info) ->
Dict = lists:foldl(fun({K,V},D0) -> orddict:append(K,V,D0) end,
orddict:new(), Info),
orddict:fold(fun
(signature, [X | _], Acc) ->
[{signature, X} | Acc];
(language, [X | _], Acc) ->
[{language, X} | Acc];
[{disk_size, lists:sum(X)} | Acc];
[{data_size, lists:sum(X)} | Acc];
(sizes, X, Acc) ->
[{sizes, {merge_object(X)}} | Acc];
(compact_running, X, Acc) ->
[{compact_running, lists:member(true, X)} | Acc];
(updater_running, X, Acc) ->
[{updater_running, lists:member(true, X)} | Acc];
(waiting_commit, X, Acc) ->
[{waiting_commit, lists:member(true, X)} | Acc];
(waiting_clients, X, Acc) ->
[{waiting_clients, lists:sum(X)} | Acc];
(update_seq, X, Acc) ->
[{update_seq, lists:sum(X)} | Acc];
(purge_seq, X, Acc) ->
[{purge_seq, lists:sum(X)} | Acc];
(_, _, Acc) ->
Acc
end, [], Dict).
merge_object(Objects) ->
Dict = lists:foldl(fun({Props}, D) ->
lists:foldl(fun({K,V},D0) -> orddict:append(K,V,D0) end, D, Props)
end, orddict:new(), Objects),
orddict:fold(fun
(Key, X, Acc) ->
[{Key, lists:sum(X)} | Acc]
end, [], Dict).
|
7e85f10f6a135654b6fdbc39a92d1b89fceec38baa387c66331268f854cb7c95 | janestreet/hardcaml_fixed_point | test_resize.ml | open! Base
open Hardcaml
open! Expect_test_helpers_base
module Unsigned = Hardcaml_fixed_point.Unsigned (Bits)
module Signed = Hardcaml_fixed_point.Signed (Bits)
let test_round
(type a)
(module X : Hardcaml_fixed_point.Fixed_point with type t = a and type bits = Bits.t)
width_int_a
width_frac_a
a
width_int_b
width_frac_b
=
let fx = X.of_float width_int_a width_frac_a a in
let opfx = X.resize fx width_int_b width_frac_b in
Stdio.printf
"%f[%i:%i] | %f %s[%i:%i] = %s = %f\n"
a
width_int_a
width_frac_a
(X.to_float fx)
(fx |> X.signal |> Bits.to_bstr)
width_int_b
width_frac_b
(opfx |> X.signal |> Bits.to_bstr)
(X.to_float opfx)
;;
let%expect_test "simple rounding" =
test_round (module Unsigned) 3 6 3.12 3 4;
[%expect {| 3.120000[3:6] | 3.109375 011000111[3:4] = 0110001 = 3.062500 |}]
;;
let unsigned_rounding_ops =
[ Unsigned.Round.neg_infinity
; Unsigned.Round.pos_infinity
; Unsigned.Round.to_zero
; Unsigned.Round.away_from_zero
; Unsigned.Round.tie_to_neg_infinity
; Unsigned.Round.tie_to_pos_infinity
; Unsigned.Round.tie_to_zero
; Unsigned.Round.tie_away_from_zero
; Unsigned.Round.tie_to_nearest_even
; Unsigned.Round.tie_to_nearest_odd
]
;;
let test_unsigned_table () =
for i = 0 to 15 do
let a = Unsigned.create 2 (Bits.of_int ~width:5 i) in
let resize rnd = Unsigned.resize ~round:rnd a 3 0 |> Unsigned.signal |> Bits.to_int in
Stdio.printf "%3i %f " i (Unsigned.to_float a);
List.iter unsigned_rounding_ops ~f:(fun x -> Stdio.printf "%i " (resize x));
Stdio.printf "\n"
done
;;
let%expect_test "unsigned tabular" =
test_unsigned_table ();
[%expect
{|
0 0.000000 0 0 0 0 0 0 0 0 0 0
1 0.250000 0 1 0 1 0 0 0 0 0 0
2 0.500000 0 1 0 1 0 1 0 1 0 1
3 0.750000 0 1 0 1 1 1 1 1 1 1
4 1.000000 1 1 1 1 1 1 1 1 1 1
5 1.250000 1 2 1 2 1 1 1 1 1 1
6 1.500000 1 2 1 2 1 2 1 2 2 1
7 1.750000 1 2 1 2 2 2 2 2 2 2
8 2.000000 2 2 2 2 2 2 2 2 2 2
9 2.250000 2 3 2 3 2 2 2 2 2 2
10 2.500000 2 3 2 3 2 3 2 3 2 3
11 2.750000 2 3 2 3 3 3 3 3 3 3
12 3.000000 3 3 3 3 3 3 3 3 3 3
13 3.250000 3 4 3 4 3 3 3 3 3 3
14 3.500000 3 4 3 4 3 4 3 4 4 3
15 3.750000 3 4 3 4 4 4 4 4 4 4 |}]
;;
let signed_rounding_ops =
[ Signed.Round.neg_infinity
; Signed.Round.pos_infinity
; Signed.Round.to_zero
; Signed.Round.away_from_zero
; Signed.Round.tie_to_neg_infinity
; Signed.Round.tie_to_pos_infinity
; Signed.Round.tie_to_zero
; Signed.Round.tie_away_from_zero
; Signed.Round.tie_to_nearest_even
; Signed.Round.tie_to_nearest_odd
]
;;
let test_signed_table () =
for i = -8 to 7 do
let a = Signed.create 2 (Bits.of_int ~width:5 i) in
let resize rnd = Signed.resize ~round:rnd a 3 0 |> Signed.signal |> Bits.to_sint in
Stdio.printf "%3i %+f " i (Signed.to_float a);
List.iter signed_rounding_ops ~f:(fun x -> Stdio.printf "%+i " (resize x));
Stdio.printf "\n"
done
;;
let%expect_test "signed tabular" =
test_signed_table ();
[%expect
{|
-8 -2.000000 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2
-7 -1.750000 -2 -1 -1 -2 -2 -2 -2 -2 -2 -2
-6 -1.500000 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1
-5 -1.250000 -2 -1 -1 -2 -1 -1 -1 -1 -1 -1
-4 -1.000000 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-3 -0.750000 -1 +0 +0 -1 -1 -1 -1 -1 -1 -1
-2 -0.500000 -1 +0 +0 -1 -1 +0 +0 -1 +0 -1
-1 -0.250000 -1 +0 +0 -1 +0 +0 +0 +0 +0 +0
0 +0.000000 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0
1 +0.250000 +0 +1 +0 +1 +0 +0 +0 +0 +0 +0
2 +0.500000 +0 +1 +0 +1 +0 +1 +0 +1 +0 +1
3 +0.750000 +0 +1 +0 +1 +1 +1 +1 +1 +1 +1
4 +1.000000 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1
5 +1.250000 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1
6 +1.500000 +1 +2 +1 +2 +1 +2 +1 +2 +2 +1
7 +1.750000 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 |}]
;;
let%expect_test "resize to a larger size" =
let test_resize ~i ~f ~i' ~f' v =
let fu = Unsigned.create f (Bits.of_int ~width:(i + f) v) in
let fu_unsigned_wrap = Unsigned.resize ~overflow:Unsigned.Overflow.wrap fu i' f' in
let fu_unsigned_saturate =
Unsigned.resize ~overflow:Unsigned.Overflow.saturate fu i' f'
in
let fs = Signed.create f (Bits.of_int ~width:(i + f) v) in
let fs_signed_wrap = Signed.resize ~overflow:Signed.Overflow.wrap fs i' f' in
let fs_signed_saturate = Signed.resize ~overflow:Signed.Overflow.saturate fs i' f' in
print_s
[%message
(fu : Unsigned.t)
(fu_unsigned_wrap : Unsigned.t)
(fu_unsigned_saturate : Unsigned.t)
(fs : Signed.t)
(fs_signed_wrap : Signed.t)
(fs_signed_saturate : Signed.t)]
in
test_resize ~i:1 ~f:1 ~i':2 ~f':2 3;
[%expect
{|
((fu ((s 11) (fp 1)))
(fu_unsigned_wrap ((s 0110) (fp 2)))
(fu_unsigned_saturate ((s 0110) (fp 2)))
(fs ((s 11) (fp 1)))
(fs_signed_wrap ((s 1110) (fp 2)))
(fs_signed_saturate ((s 1110) (fp 2)))) |}];
test_resize ~i:4 ~f:3 ~i':6 ~f':3 0b1111000;
[%expect
{|
((fu ((s 1111000) (fp 3)))
(fu_unsigned_wrap ((s 001111000) (fp 3)))
(fu_unsigned_saturate ((s 001111000) (fp 3)))
(fs ((s 1111000) (fp 3)))
(fs_signed_wrap ((s 111111000) (fp 3)))
(fs_signed_saturate ((s 111111000) (fp 3)))) |}];
test_resize ~i:4 ~f:3 ~i':4 ~f':5 0b1111000;
[%expect
{|
((fu ((s 1111000) (fp 3)))
(fu_unsigned_wrap ((s 111100000) (fp 5)))
(fu_unsigned_saturate ((s 111100000) (fp 5)))
(fs ((s 1111000) (fp 3)))
(fs_signed_wrap ((s 111100000) (fp 5)))
(fs_signed_saturate ((s 111100000) (fp 5)))) |}]
;;
| null | https://raw.githubusercontent.com/janestreet/hardcaml_fixed_point/52ac071c3dea60595d70f2c36d1e5b77d21b77ea/test/test_resize.ml | ocaml | open! Base
open Hardcaml
open! Expect_test_helpers_base
module Unsigned = Hardcaml_fixed_point.Unsigned (Bits)
module Signed = Hardcaml_fixed_point.Signed (Bits)
let test_round
(type a)
(module X : Hardcaml_fixed_point.Fixed_point with type t = a and type bits = Bits.t)
width_int_a
width_frac_a
a
width_int_b
width_frac_b
=
let fx = X.of_float width_int_a width_frac_a a in
let opfx = X.resize fx width_int_b width_frac_b in
Stdio.printf
"%f[%i:%i] | %f %s[%i:%i] = %s = %f\n"
a
width_int_a
width_frac_a
(X.to_float fx)
(fx |> X.signal |> Bits.to_bstr)
width_int_b
width_frac_b
(opfx |> X.signal |> Bits.to_bstr)
(X.to_float opfx)
;;
let%expect_test "simple rounding" =
test_round (module Unsigned) 3 6 3.12 3 4;
[%expect {| 3.120000[3:6] | 3.109375 011000111[3:4] = 0110001 = 3.062500 |}]
;;
let unsigned_rounding_ops =
[ Unsigned.Round.neg_infinity
; Unsigned.Round.pos_infinity
; Unsigned.Round.to_zero
; Unsigned.Round.away_from_zero
; Unsigned.Round.tie_to_neg_infinity
; Unsigned.Round.tie_to_pos_infinity
; Unsigned.Round.tie_to_zero
; Unsigned.Round.tie_away_from_zero
; Unsigned.Round.tie_to_nearest_even
; Unsigned.Round.tie_to_nearest_odd
]
;;
let test_unsigned_table () =
for i = 0 to 15 do
let a = Unsigned.create 2 (Bits.of_int ~width:5 i) in
let resize rnd = Unsigned.resize ~round:rnd a 3 0 |> Unsigned.signal |> Bits.to_int in
Stdio.printf "%3i %f " i (Unsigned.to_float a);
List.iter unsigned_rounding_ops ~f:(fun x -> Stdio.printf "%i " (resize x));
Stdio.printf "\n"
done
;;
let%expect_test "unsigned tabular" =
test_unsigned_table ();
[%expect
{|
0 0.000000 0 0 0 0 0 0 0 0 0 0
1 0.250000 0 1 0 1 0 0 0 0 0 0
2 0.500000 0 1 0 1 0 1 0 1 0 1
3 0.750000 0 1 0 1 1 1 1 1 1 1
4 1.000000 1 1 1 1 1 1 1 1 1 1
5 1.250000 1 2 1 2 1 1 1 1 1 1
6 1.500000 1 2 1 2 1 2 1 2 2 1
7 1.750000 1 2 1 2 2 2 2 2 2 2
8 2.000000 2 2 2 2 2 2 2 2 2 2
9 2.250000 2 3 2 3 2 2 2 2 2 2
10 2.500000 2 3 2 3 2 3 2 3 2 3
11 2.750000 2 3 2 3 3 3 3 3 3 3
12 3.000000 3 3 3 3 3 3 3 3 3 3
13 3.250000 3 4 3 4 3 3 3 3 3 3
14 3.500000 3 4 3 4 3 4 3 4 4 3
15 3.750000 3 4 3 4 4 4 4 4 4 4 |}]
;;
let signed_rounding_ops =
[ Signed.Round.neg_infinity
; Signed.Round.pos_infinity
; Signed.Round.to_zero
; Signed.Round.away_from_zero
; Signed.Round.tie_to_neg_infinity
; Signed.Round.tie_to_pos_infinity
; Signed.Round.tie_to_zero
; Signed.Round.tie_away_from_zero
; Signed.Round.tie_to_nearest_even
; Signed.Round.tie_to_nearest_odd
]
;;
let test_signed_table () =
for i = -8 to 7 do
let a = Signed.create 2 (Bits.of_int ~width:5 i) in
let resize rnd = Signed.resize ~round:rnd a 3 0 |> Signed.signal |> Bits.to_sint in
Stdio.printf "%3i %+f " i (Signed.to_float a);
List.iter signed_rounding_ops ~f:(fun x -> Stdio.printf "%+i " (resize x));
Stdio.printf "\n"
done
;;
let%expect_test "signed tabular" =
test_signed_table ();
[%expect
{|
-8 -2.000000 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2
-7 -1.750000 -2 -1 -1 -2 -2 -2 -2 -2 -2 -2
-6 -1.500000 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1
-5 -1.250000 -2 -1 -1 -2 -1 -1 -1 -1 -1 -1
-4 -1.000000 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1
-3 -0.750000 -1 +0 +0 -1 -1 -1 -1 -1 -1 -1
-2 -0.500000 -1 +0 +0 -1 -1 +0 +0 -1 +0 -1
-1 -0.250000 -1 +0 +0 -1 +0 +0 +0 +0 +0 +0
0 +0.000000 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0
1 +0.250000 +0 +1 +0 +1 +0 +0 +0 +0 +0 +0
2 +0.500000 +0 +1 +0 +1 +0 +1 +0 +1 +0 +1
3 +0.750000 +0 +1 +0 +1 +1 +1 +1 +1 +1 +1
4 +1.000000 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1
5 +1.250000 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1
6 +1.500000 +1 +2 +1 +2 +1 +2 +1 +2 +2 +1
7 +1.750000 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 |}]
;;
let%expect_test "resize to a larger size" =
let test_resize ~i ~f ~i' ~f' v =
let fu = Unsigned.create f (Bits.of_int ~width:(i + f) v) in
let fu_unsigned_wrap = Unsigned.resize ~overflow:Unsigned.Overflow.wrap fu i' f' in
let fu_unsigned_saturate =
Unsigned.resize ~overflow:Unsigned.Overflow.saturate fu i' f'
in
let fs = Signed.create f (Bits.of_int ~width:(i + f) v) in
let fs_signed_wrap = Signed.resize ~overflow:Signed.Overflow.wrap fs i' f' in
let fs_signed_saturate = Signed.resize ~overflow:Signed.Overflow.saturate fs i' f' in
print_s
[%message
(fu : Unsigned.t)
(fu_unsigned_wrap : Unsigned.t)
(fu_unsigned_saturate : Unsigned.t)
(fs : Signed.t)
(fs_signed_wrap : Signed.t)
(fs_signed_saturate : Signed.t)]
in
test_resize ~i:1 ~f:1 ~i':2 ~f':2 3;
[%expect
{|
((fu ((s 11) (fp 1)))
(fu_unsigned_wrap ((s 0110) (fp 2)))
(fu_unsigned_saturate ((s 0110) (fp 2)))
(fs ((s 11) (fp 1)))
(fs_signed_wrap ((s 1110) (fp 2)))
(fs_signed_saturate ((s 1110) (fp 2)))) |}];
test_resize ~i:4 ~f:3 ~i':6 ~f':3 0b1111000;
[%expect
{|
((fu ((s 1111000) (fp 3)))
(fu_unsigned_wrap ((s 001111000) (fp 3)))
(fu_unsigned_saturate ((s 001111000) (fp 3)))
(fs ((s 1111000) (fp 3)))
(fs_signed_wrap ((s 111111000) (fp 3)))
(fs_signed_saturate ((s 111111000) (fp 3)))) |}];
test_resize ~i:4 ~f:3 ~i':4 ~f':5 0b1111000;
[%expect
{|
((fu ((s 1111000) (fp 3)))
(fu_unsigned_wrap ((s 111100000) (fp 5)))
(fu_unsigned_saturate ((s 111100000) (fp 5)))
(fs ((s 1111000) (fp 3)))
(fs_signed_wrap ((s 111100000) (fp 5)))
(fs_signed_saturate ((s 111100000) (fp 5)))) |}]
;;
|
|
0e32cb35a4d54e3c4a6e32ff1d598d477e1448616399a1d9a019d2451c9fa37e | project-oak/hafnium-verification | accessTreeTests.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
(* string set domain we use to ensure we're getting the expected traces *)
module MockTraceDomain = struct
include AbstractDomain.FiniteSet (String)
let top_str = "T"
let top = singleton top_str
let singleton e =
assert (e <> top_str) ;
singleton e
(* total hack of a widening just to test that widening of traces is working *)
let widen ~prev ~next ~num_iters:_ =
let trace_diff = diff next prev in
if not (is_empty trace_diff) then top else join prev next
(* similarly, hack printing so top looks different *)
let pp fmt s = if phys_equal s top then F.pp_print_char fmt 'T' else pp fmt s
end
module MakeTree (Config : AccessTree.Config) = struct
include AccessTree.Make (MockTraceDomain) (Config)
let assert_trees_equal tree1 tree2 =
let rec access_tree_equal (trace1, subtree1) (trace2, subtree2) =
MockTraceDomain.equal trace1 trace2
&&
match (subtree1, subtree2) with
| Star, Star ->
true
| Subtree t1, Subtree t2 ->
AccessMap.equal access_tree_equal t1 t2
| _ ->
false
in
let base_tree_equal tree1 tree2 = BaseMap.equal access_tree_equal tree1 tree2 in
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to get tree %a but got %a" pp expected pp actual
in
OUnit2.assert_equal ~cmp:base_tree_equal ~pp_diff tree1 tree2
end
module Domain = MakeTree (AccessTree.DefaultConfig)
let tests =
let open AccessPathTestUtils in
let x_base = make_base "x" in
let y_base = make_base "y" in
let z_base = make_base "z" in
let f = make_field_access "f" in
let g = make_field_access "g" in
let array = make_array_access (Typ.mk Tvoid) in
let x = AccessPath.Abs.Exact (make_access_path "x" []) in
let xF = AccessPath.Abs.Exact (make_access_path "x" ["f"]) in
let xG = AccessPath.Abs.Exact (make_access_path "x" ["g"]) in
let xFG = AccessPath.Abs.Exact (make_access_path "x" ["f"; "g"]) in
let y = AccessPath.Abs.Exact (make_access_path "y" []) in
let yF = AccessPath.Abs.Exact (make_access_path "y" ["f"]) in
let yG = AccessPath.Abs.Exact (make_access_path "y" ["g"]) in
let yFG = AccessPath.Abs.Exact (make_access_path "y" ["f"; "g"]) in
let z = AccessPath.Abs.Exact (make_access_path "z" []) in
let zF = AccessPath.Abs.Exact (make_access_path "z" ["f"]) in
let zFG = AccessPath.Abs.Exact (make_access_path "z" ["f"; "g"]) in
let xArr = AccessPath.Abs.Exact (make_base "x", [array]) in
let xArrF =
let accesses = [array; make_field_access "f"] in
AccessPath.Abs.Exact (make_base "x", accesses)
in
let a_star = AccessPath.Abs.Abstracted (make_access_path "a" []) in
let x_star = AccessPath.Abs.Abstracted (make_access_path "x" []) in
let xF_star = AccessPath.Abs.Abstracted (make_access_path "x" ["f"]) in
let xG_star = AccessPath.Abs.Abstracted (make_access_path "x" ["g"]) in
let y_star = AccessPath.Abs.Abstracted (make_access_path "y" []) in
let yF_star = AccessPath.Abs.Abstracted (make_access_path "y" ["f"]) in
let z_star = AccessPath.Abs.Abstracted (make_access_path "z" []) in
let x_trace = MockTraceDomain.singleton "x" in
let y_trace = MockTraceDomain.singleton "y" in
let z_trace = MockTraceDomain.singleton "z" in
let xF_trace = MockTraceDomain.singleton "xF" in
let yF_trace = MockTraceDomain.singleton "yF" in
let xFG_trace = MockTraceDomain.singleton "xFG" in
let array_f_trace = MockTraceDomain.singleton "arrayF" in
let x_star_trace = MockTraceDomain.of_list ["x"; "xF"; "xFG"] in
let g_subtree = Domain.make_access_node xF_trace g xFG_trace in
let x_subtree = Domain.AccessMap.singleton f g_subtree |> Domain.make_node x_trace in
let yF_subtree = Domain.make_starred_leaf yF_trace in
let y_subtree = Domain.AccessMap.singleton f yF_subtree |> Domain.make_node y_trace in
let z_subtree = Domain.make_starred_leaf z_trace in
let tree =
Domain.BaseMap.singleton x_base x_subtree
|> Domain.BaseMap.add y_base y_subtree
|> Domain.BaseMap.add z_base z_subtree
in
let x_base_tree = Domain.BaseMap.singleton x_base Domain.empty_node in
let y_base_tree = Domain.BaseMap.singleton y_base Domain.empty_node in
let x_y_base_tree = Domain.BaseMap.add y_base Domain.empty_node x_base_tree in
let xFG_tree = Domain.BaseMap.singleton x_base x_subtree in
let x_star_tree = Domain.BaseMap.singleton x_base (Domain.make_starred_leaf x_trace) in
let yF_star_tree = Domain.BaseMap.singleton y_base y_subtree in
let x_yF_star_tree = Domain.BaseMap.add y_base y_subtree x_star_tree in
let x_star_tree_xFG_trace =
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf x_star_trace)
in
let open OUnit2 in
let no_trace = "NONE" in
let get_trace_str access_path tree =
match Domain.get_trace access_path tree with
| Some trace ->
F.asprintf "%a" MockTraceDomain.pp trace
| None ->
no_trace
in
let assert_traces_eq access_path tree expected_trace_str =
let actual_trace_str = get_trace_str access_path tree in
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to retrieve trace %s but got %s" expected actual
in
assert_equal ~pp_diff actual_trace_str expected_trace_str
in
let assert_trace_not_found access_path tree = assert_traces_eq access_path tree no_trace in
let assert_node_equal access_path tree expected_node =
match Domain.get_node access_path tree with
| Some actual_node ->
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to retrieve node %a but got %a" Domain.pp_node expected
Domain.pp_node actual
in
assert_equal ~pp_diff expected_node actual_node
| None ->
assert false
in
let get_trace_test =
let get_trace_test_ _ =
(* exact access path tests *)
assert_traces_eq z tree "{ z }" ;
assert_traces_eq xF tree "{ xF }" ;
assert_traces_eq yF tree "{ yF }" ;
assert_traces_eq xFG tree "{ xFG }" ;
assert_trace_not_found xG tree ;
(* starred access path tests *)
assert_traces_eq x_star tree "{ x, xF, xFG }" ;
assert_traces_eq xF_star tree "{ xF, xFG }" ;
assert_trace_not_found xG_star tree ;
assert_trace_not_found a_star tree ;
(* starred tree tests *)
assert_traces_eq zF tree "{ z }" ;
assert_traces_eq zFG tree "{ z }" ;
assert_traces_eq z_star tree "{ z }" ;
assert_traces_eq y_star tree "{ y, yF }" ;
assert_traces_eq yF_star tree "{ yF }" ;
assert_traces_eq yFG tree "{ yF }" ;
assert_trace_not_found yG tree ;
(* get_trace is just (fst get_node), so light tests here *)
(* exact access path tests *)
assert_node_equal z tree z_subtree ;
assert_node_equal xF tree g_subtree ;
assert_node_equal xFG tree (Domain.make_normal_leaf xFG_trace) ;
(* starred tree tests *)
assert_node_equal yFG tree yF_subtree ;
(* starred access path tests *)
let joined_y_subtree =
Domain.AccessMap.singleton f yF_subtree
|> Domain.make_node (MockTraceDomain.join y_trace yF_trace)
in
assert_node_equal y_star tree joined_y_subtree
in
"get_trace" >:: get_trace_test_
in
let add_trace_test =
let add_trace_test_ _ =
(* special trace to indicate that we've added successfully *)
let added_trace = MockTraceDomain.singleton "added" in
let mk_x_y_base_tree trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf trace)
|> Domain.BaseMap.add y_base Domain.empty_node
in
let mk_xFG_node leaf_trace =
Domain.make_access_node MockTraceDomain.empty g leaf_trace
|> Domain.AccessMap.singleton f
|> Domain.make_node MockTraceDomain.empty
in
let mk_xFG_tree leaf_trace = mk_xFG_node leaf_trace |> Domain.BaseMap.singleton x_base in
let mk_xArrF_tree leaf_trace =
Domain.make_access_node MockTraceDomain.empty f leaf_trace
|> Domain.AccessMap.singleton array
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
(* normal tests *)
(* add base when absent *)
let x_y_base_tree_with_added_trace = mk_x_y_base_tree added_trace in
Domain.assert_trees_equal
(Domain.add_trace x added_trace y_base_tree)
x_y_base_tree_with_added_trace ;
(* add base when present *)
Domain.assert_trees_equal
(Domain.add_trace x added_trace x_y_base_tree)
x_y_base_tree_with_added_trace ;
let x_y_base_tree_with_y_trace = mk_x_y_base_tree y_trace in
Domain.assert_trees_equal
(Domain.add_trace x added_trace x_y_base_tree_with_y_trace)
x_y_base_tree_with_added_trace ;
(* add path when absent *)
let xFG_tree_added_trace = mk_xFG_tree added_trace in
Domain.assert_trees_equal (Domain.add_trace xFG added_trace x_base_tree) xFG_tree_added_trace ;
(* add path when present *)
let xFG_tree_y_trace = mk_xFG_tree y_trace in
Domain.assert_trees_equal
(Domain.add_trace xFG added_trace xFG_tree_y_trace)
xFG_tree_added_trace ;
(* add starred path when base absent *)
let xF_star_tree_added_trace =
Domain.make_starred_leaf added_trace
|> Domain.AccessMap.singleton f
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace Domain.bottom)
xF_star_tree_added_trace ;
(* add starred path when base present *)
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace x_base_tree)
xF_star_tree_added_trace ;
(* adding array path should do weak updates *)
let aArrF_tree = mk_xArrF_tree array_f_trace in
let aArrF_tree_joined_trace =
mk_xArrF_tree (MockTraceDomain.join added_trace array_f_trace)
in
Domain.assert_trees_equal
(Domain.add_trace xArrF added_trace aArrF_tree)
aArrF_tree_joined_trace ;
(* starred tests *)
(* we should do a strong update when updating x.f* with x.f *)
let yF_tree_added_trace =
Domain.make_normal_leaf added_trace
|> Domain.AccessMap.singleton f |> Domain.make_node y_trace
|> Domain.BaseMap.singleton y_base
in
Domain.assert_trees_equal (Domain.add_trace yF added_trace yF_star_tree) yF_tree_added_trace ;
(* but not when updating x* with x.f *)
let x_star_tree_added_trace =
let joined_trace = MockTraceDomain.join x_trace added_trace in
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf joined_trace)
in
Domain.assert_trees_equal
(Domain.add_trace xF added_trace x_star_tree)
x_star_tree_added_trace ;
when updating x.f.g with x.f * , we should remember traces associated with f and even as
we replace that subtree with a *
we replace that subtree with a * *)
let xF_star_tree_joined_traces =
let joined_trace =
MockTraceDomain.join added_trace xFG_trace |> MockTraceDomain.join xF_trace
in
Domain.make_starred_leaf joined_trace
|> Domain.AccessMap.singleton f |> Domain.make_node x_trace
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace xFG_tree)
xF_star_tree_joined_traces ;
[ add_node ] tests are sparse , since [ add_trace ] is just [ add_node ] < empty node > . main things
to test are ( 1 ) adding a non - empty node works , ( 2 ) adding a non - empty node does the proper
joins in the weak update case
to test are (1) adding a non-empty node works, (2) adding a non-empty node does the proper
joins in the weak update case *)
case ( 1 ): adding XFG to y base tree works
let y_xFG_tree = Domain.BaseMap.add y_base Domain.empty_node (mk_xFG_tree xFG_trace) in
Domain.assert_trees_equal (Domain.add_node x (mk_xFG_node xFG_trace) y_base_tree) y_xFG_tree ;
case ( 2 ): adding a non - empty node does weak updates when required
let arr_tree =
let arr_subtree =
Domain.AccessMap.singleton f (Domain.make_normal_leaf array_f_trace)
|> Domain.AccessMap.add g (Domain.make_normal_leaf xFG_trace)
in
Domain.AccessMap.singleton array (Domain.make_node xF_trace arr_subtree)
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal (Domain.add_node xArr g_subtree aArrF_tree) arr_tree
in
"add_trace" >:: add_trace_test_
in
let lteq_test =
let lteq_test_ _ =
(* regular tree tests *)
assert_bool "<= equal;" (Domain.leq ~lhs:tree ~rhs:tree) ;
assert_bool "<= bases" (Domain.leq ~lhs:x_base_tree ~rhs:x_y_base_tree) ;
assert_bool "<= regular1" (Domain.leq ~lhs:x_base_tree ~rhs:xFG_tree) ;
assert_bool "<= regular2" (Domain.leq ~lhs:xFG_tree ~rhs:tree) ;
assert_bool "<= regular3" (Domain.leq ~lhs:y_base_tree ~rhs:tree) ;
assert_bool "<= bases negative1" (not (Domain.leq ~lhs:x_y_base_tree ~rhs:x_base_tree)) ;
assert_bool "<= bases negative2" (not (Domain.leq ~lhs:x_base_tree ~rhs:y_base_tree)) ;
assert_bool "<= negative1" (not (Domain.leq ~lhs:xFG_tree ~rhs:y_base_tree)) ;
assert_bool "<= negative2" (not (Domain.leq ~lhs:tree ~rhs:xFG_tree)) ;
(* star tree tests *)
assert_bool "<= star lhs equal" (Domain.leq ~lhs:x_star_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs1" (Domain.leq ~lhs:x_base_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs2" (Domain.leq ~lhs:xFG_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs3" (Domain.leq ~lhs:y_base_tree ~rhs:yF_star_tree) ;
assert_bool "<= star rhs4" (Domain.leq ~lhs:yF_star_tree ~rhs:tree) ;
assert_bool "<= star lhs negative1" (not (Domain.leq ~lhs:x_star_tree ~rhs:x_base_tree)) ;
assert_bool "<= star lhs negative2" (not (Domain.leq ~lhs:x_star_tree ~rhs:xFG_tree)) ;
assert_bool "<= star lhs negative3" (not (Domain.leq ~lhs:yF_star_tree ~rhs:y_base_tree)) ;
assert_bool "<= star lhs negative4" (not (Domain.leq ~lhs:tree ~rhs:yF_star_tree)) ;
(* <= tree but not <= trace tests *)
same as x_base_tree , but with a trace higher in the traces lattice
let x_base_tree_higher_trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf y_trace)
in
(* same as x_star_tree, but with a trace incomparable in the traces lattice *)
let x_star_tree_diff_trace =
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf y_trace)
in
assert_bool "(x, {}) <= (x, {y})" (Domain.leq ~lhs:x_base_tree ~rhs:x_base_tree_higher_trace) ;
assert_bool "(x, {y}) not <= (x, {})"
(not (Domain.leq ~lhs:x_base_tree_higher_trace ~rhs:x_base_tree)) ;
assert_bool "(x*, {y})* not <= (x*, {x})"
(not (Domain.leq ~lhs:x_star_tree_diff_trace ~rhs:x_star_tree)) ;
assert_bool "(x*, {x})* not <= (x*, {y})"
(not (Domain.leq ~lhs:x_star_tree ~rhs:x_star_tree_diff_trace))
in
"lteq" >:: lteq_test_
in
let join_test =
let join_test_ _ =
(* normal |_| normal *)
Domain.assert_trees_equal (Domain.join x_base_tree y_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join y_base_tree x_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join x_y_base_tree x_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join x_base_tree xFG_tree) xFG_tree ;
(* starred |_| starred *)
Domain.assert_trees_equal (Domain.join x_star_tree yF_star_tree) x_yF_star_tree ;
(* normal |_| starred *)
Domain.assert_trees_equal (Domain.join tree xFG_tree) tree ;
(* [x_star_tree] and [x_base_tree] both have trace "{ x }" associated with x... *)
Domain.assert_trees_equal (Domain.join x_star_tree x_base_tree) x_star_tree ;
(* ...but [xFG_tree] has some nested traces that should get joined with "{ x }" *)
Domain.assert_trees_equal (Domain.join x_star_tree xFG_tree) x_star_tree_xFG_trace
in
"join" >:: join_test_
in
let widen_test =
let widen_test_ _ =
let make_x_base_tree trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf trace)
in
let widen prev next = Domain.widen ~prev ~next ~num_iters:4 in
(* a bit light on the tests here, since widen is implemented as a simple wrapper of join *)
widening traces works :
x |- > ( " x " , empty ) \/ x |- > ( " y " , empty ) =
x |- > ( T , empty )
x |-> ("x", empty) \/ x |-> ("y", empty) =
x |-> (T, empty)
*)
let x_tree_x_trace = make_x_base_tree x_trace in
let x_tree_y_trace = make_x_base_tree y_trace in
let x_tree_top_trace = make_x_base_tree MockTraceDomain.top in
Domain.assert_trees_equal (widen x_tree_x_trace x_tree_y_trace) x_tree_top_trace ;
adding stars to a base works :
x |- > ( { } , empty ) \/ y |- > ( { } , empty ) =
( x |- > ( { } , empty ) , y |- > ( { } , Star ) )
x |-> ({}, empty) \/ y |-> ({}, empty) =
(x |-> ({}, empty), y |-> ({}, Star) )
*)
let x_y_star_base_tree =
Domain.BaseMap.add y_base (Domain.make_starred_leaf MockTraceDomain.empty) x_base_tree
in
Domain.assert_trees_equal (widen x_base_tree y_base_tree) x_y_star_base_tree ;
adding stars to a subtree works :
x |- > ( " y " , empty ) \/
x |- > ( " x " , f |- > ( " f " , g |- > ( " g " , empty ) ) ) =
x |- > ( T , f |- > ( T , * ) )
x |-> ("y", empty) \/
x |-> ("x" , f |-> ("f", g |-> ("g", empty))) =
x |-> (T , f |-> (T, * ))
*)
let xF_star_tree =
Domain.AccessMap.singleton f (Domain.make_starred_leaf MockTraceDomain.top)
|> Domain.make_node MockTraceDomain.top
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal (widen x_tree_y_trace xFG_tree) xF_star_tree ;
widening is not commutative , and is it not join :
x |- > ( " x " , f |- > ( " f " , g |- > ( " g " , empty ) ) ) \/
x |- > ( " y " , empty ) =
x |- > ( T , f |- > ( " f " , g |- > ( " g " , empty ) ) )
x |-> ("x" , f |-> ("f", g |-> ("g", empty))) \/
x |-> ("y", empty) =
x |-> (T , f |-> ("f", g |-> ("g", empty)))
*)
let xFG_tree_widened_trace =
let _, xFG_node = x_subtree in
Domain.BaseMap.singleton x_base (MockTraceDomain.top, xFG_node)
in
Domain.assert_trees_equal (widen xFG_tree x_tree_y_trace) xFG_tree_widened_trace
in
"widen" >:: widen_test_
in
let fold_test =
let fold_test_ _ =
let collect_ap_traces acc ap trace = (ap, trace) :: acc in
let ap_traces = Domain.trace_fold collect_ap_traces tree [] in
let has_ap_trace_pair ap_in trace_in =
List.exists
~f:(fun (ap, trace) ->
AccessPath.Abs.equal ap ap_in && MockTraceDomain.equal trace trace_in )
ap_traces
in
assert_bool "Should have six ap/trace pairs" (Int.equal (List.length ap_traces) 6) ;
assert_bool "has x pair" (has_ap_trace_pair x x_trace) ;
assert_bool "has xF pair" (has_ap_trace_pair xF xF_trace) ;
assert_bool "has xFG pair" (has_ap_trace_pair xFG xFG_trace) ;
assert_bool "has y pair" (has_ap_trace_pair y y_trace) ;
assert_bool "has yF* pair" (has_ap_trace_pair yF_star yF_trace) ;
assert_bool "has z pair" (has_ap_trace_pair z_star z_trace)
in
"fold" >:: fold_test_
in
let depth_test =
let depth_test_ _ =
assert_equal (Domain.depth Domain.bottom) 0 ;
assert_equal (Domain.depth x_base_tree) 1 ;
assert_equal (Domain.depth x_y_base_tree) 1 ;
assert_equal (Domain.depth xFG_tree) 3 ;
assert_equal (Domain.depth x_star_tree) 1 ;
assert_equal (Domain.depth yF_star_tree) 2 ;
assert_equal (Domain.depth x_yF_star_tree) 2
in
"depth" >:: depth_test_
in
let max_depth_test =
let max_depth_test_ _ =
let module Max1 = MakeTree (struct
let max_depth = 1
let max_width = Int.max_value / 2
end) in
let f_node =
Max1.AccessMap.singleton f (Max1.make_normal_leaf x_trace)
|> Max1.make_node MockTraceDomain.empty
in
let x_tree = Max1.BaseMap.singleton x_base (Max1.make_normal_leaf x_trace) in
let x_star_tree = Max1.BaseMap.singleton x_base (Max1.make_starred_leaf x_trace) in
adding ( x.f , " x " ) to a tree with max height 1 should yield x |- > ( " x " , * )
Max1.assert_trees_equal (Max1.add_trace xF x_trace Max1.bottom) x_star_tree ;
(* same, but with (x.f.g, "x") *)
Max1.assert_trees_equal (Max1.add_trace xFG x_trace Max1.bottom) x_star_tree ;
(* adding node (f, "x") via access path x should also yield the same tree *)
Max1.assert_trees_equal (Max1.add_node x f_node Max1.bottom) x_star_tree ;
(* adding (x, "x") shouldn't add stars *)
Max1.assert_trees_equal (Max1.add_trace x x_trace Max1.bottom) x_tree ;
let module Max2 = MakeTree (struct
let max_depth = 2
let max_width = Int.max_value / 2
end) in
let f_node =
Max2.AccessMap.singleton f (Max2.make_normal_leaf x_trace)
|> Max2.make_node MockTraceDomain.empty
in
let fG_node =
Max2.make_access_node MockTraceDomain.empty g x_trace
|> Max2.AccessMap.singleton f
|> Max2.make_node MockTraceDomain.empty
in
let f_star_node =
Max2.AccessMap.singleton f (Max2.make_starred_leaf x_trace)
|> Max2.make_node MockTraceDomain.empty
in
let x_tree = Max2.BaseMap.singleton x_base Max2.empty_node in
let xF_tree = Max2.BaseMap.singleton x_base f_node in
let xF_star_tree = Max2.BaseMap.singleton x_base f_star_node in
(* adding x.f to an empty tree should't add stars... *)
Max2.assert_trees_equal (Max2.add_trace xF x_trace Max2.bottom) xF_tree ;
(* ... but adding x.f.g should *)
Max2.assert_trees_equal (Max2.add_trace xFG x_trace Max2.bottom) xF_star_tree ;
(* adding the node (f.g, "x") to a tree with x should produce the same result *)
Max2.assert_trees_equal (Max2.add_node x fG_node x_tree) xF_star_tree
in
"max_depth" >:: max_depth_test_
in
let max_width_test =
let max_width_test_ _ =
let module Max1 = MakeTree (struct
let max_depth = Int.max_value / 2
let max_width = 1
end) in
let x_base_tree = Max1.BaseMap.singleton x_base Max1.empty_node in
let y_base_tree = Max1.BaseMap.singleton y_base Max1.empty_node in
let x_y_base_tree = Max1.BaseMap.add y_base Max1.empty_node x_base_tree in
let f_node =
Max1.AccessMap.singleton f (Max1.make_normal_leaf y_trace)
|> Max1.make_node MockTraceDomain.empty
in
let g_node =
Max1.AccessMap.singleton g (Max1.make_normal_leaf z_trace)
|> Max1.make_node MockTraceDomain.empty
in
let star_node = Max1.make_starred_leaf (MockTraceDomain.join y_trace z_trace) in
let xF_tree = Max1.BaseMap.singleton x_base f_node in
let xG_tree = Max1.BaseMap.singleton x_base g_node in
let x_star_tree = Max1.BaseMap.singleton x_base star_node in
(* adding x.f to a tree containing just x should work *)
Max1.assert_trees_equal (Max1.add_trace xF y_trace Max1.bottom) xF_tree ;
(* but adding x.g to a tree containing x.f should create a star *)
Max1.assert_trees_equal (Max1.add_trace xG z_trace xF_tree) x_star_tree ;
joining the x.f and x.g trees should also create a star
Max1.assert_trees_equal (Max1.join xF_tree xG_tree) x_star_tree ;
(* adding x.f to a tree where it's already present shouldn't create a star *)
Max1.assert_trees_equal (Max1.add_trace xF y_trace xF_tree) xF_tree ;
(* and joining the same tree with itself shouldn't either *)
Max1.assert_trees_equal (Max1.join xF_tree xF_tree) xF_tree ;
(* note that the width limit doesn't apply to the base layer *)
Max1.assert_trees_equal (Max1.join x_base_tree y_base_tree) x_y_base_tree
in
"max_width" >:: max_width_test_
in
"access_tree_suite"
>::: [ get_trace_test
; add_trace_test
; lteq_test
; join_test
; widen_test
; fold_test
; depth_test
; max_depth_test
; max_width_test ]
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/unit/accessTreeTests.ml | ocaml | string set domain we use to ensure we're getting the expected traces
total hack of a widening just to test that widening of traces is working
similarly, hack printing so top looks different
exact access path tests
starred access path tests
starred tree tests
get_trace is just (fst get_node), so light tests here
exact access path tests
starred tree tests
starred access path tests
special trace to indicate that we've added successfully
normal tests
add base when absent
add base when present
add path when absent
add path when present
add starred path when base absent
add starred path when base present
adding array path should do weak updates
starred tests
we should do a strong update when updating x.f* with x.f
but not when updating x* with x.f
regular tree tests
star tree tests
<= tree but not <= trace tests
same as x_star_tree, but with a trace incomparable in the traces lattice
normal |_| normal
starred |_| starred
normal |_| starred
[x_star_tree] and [x_base_tree] both have trace "{ x }" associated with x...
...but [xFG_tree] has some nested traces that should get joined with "{ x }"
a bit light on the tests here, since widen is implemented as a simple wrapper of join
same, but with (x.f.g, "x")
adding node (f, "x") via access path x should also yield the same tree
adding (x, "x") shouldn't add stars
adding x.f to an empty tree should't add stars...
... but adding x.f.g should
adding the node (f.g, "x") to a tree with x should produce the same result
adding x.f to a tree containing just x should work
but adding x.g to a tree containing x.f should create a star
adding x.f to a tree where it's already present shouldn't create a star
and joining the same tree with itself shouldn't either
note that the width limit doesn't apply to the base layer |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
module MockTraceDomain = struct
include AbstractDomain.FiniteSet (String)
let top_str = "T"
let top = singleton top_str
let singleton e =
assert (e <> top_str) ;
singleton e
let widen ~prev ~next ~num_iters:_ =
let trace_diff = diff next prev in
if not (is_empty trace_diff) then top else join prev next
let pp fmt s = if phys_equal s top then F.pp_print_char fmt 'T' else pp fmt s
end
module MakeTree (Config : AccessTree.Config) = struct
include AccessTree.Make (MockTraceDomain) (Config)
let assert_trees_equal tree1 tree2 =
let rec access_tree_equal (trace1, subtree1) (trace2, subtree2) =
MockTraceDomain.equal trace1 trace2
&&
match (subtree1, subtree2) with
| Star, Star ->
true
| Subtree t1, Subtree t2 ->
AccessMap.equal access_tree_equal t1 t2
| _ ->
false
in
let base_tree_equal tree1 tree2 = BaseMap.equal access_tree_equal tree1 tree2 in
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to get tree %a but got %a" pp expected pp actual
in
OUnit2.assert_equal ~cmp:base_tree_equal ~pp_diff tree1 tree2
end
module Domain = MakeTree (AccessTree.DefaultConfig)
let tests =
let open AccessPathTestUtils in
let x_base = make_base "x" in
let y_base = make_base "y" in
let z_base = make_base "z" in
let f = make_field_access "f" in
let g = make_field_access "g" in
let array = make_array_access (Typ.mk Tvoid) in
let x = AccessPath.Abs.Exact (make_access_path "x" []) in
let xF = AccessPath.Abs.Exact (make_access_path "x" ["f"]) in
let xG = AccessPath.Abs.Exact (make_access_path "x" ["g"]) in
let xFG = AccessPath.Abs.Exact (make_access_path "x" ["f"; "g"]) in
let y = AccessPath.Abs.Exact (make_access_path "y" []) in
let yF = AccessPath.Abs.Exact (make_access_path "y" ["f"]) in
let yG = AccessPath.Abs.Exact (make_access_path "y" ["g"]) in
let yFG = AccessPath.Abs.Exact (make_access_path "y" ["f"; "g"]) in
let z = AccessPath.Abs.Exact (make_access_path "z" []) in
let zF = AccessPath.Abs.Exact (make_access_path "z" ["f"]) in
let zFG = AccessPath.Abs.Exact (make_access_path "z" ["f"; "g"]) in
let xArr = AccessPath.Abs.Exact (make_base "x", [array]) in
let xArrF =
let accesses = [array; make_field_access "f"] in
AccessPath.Abs.Exact (make_base "x", accesses)
in
let a_star = AccessPath.Abs.Abstracted (make_access_path "a" []) in
let x_star = AccessPath.Abs.Abstracted (make_access_path "x" []) in
let xF_star = AccessPath.Abs.Abstracted (make_access_path "x" ["f"]) in
let xG_star = AccessPath.Abs.Abstracted (make_access_path "x" ["g"]) in
let y_star = AccessPath.Abs.Abstracted (make_access_path "y" []) in
let yF_star = AccessPath.Abs.Abstracted (make_access_path "y" ["f"]) in
let z_star = AccessPath.Abs.Abstracted (make_access_path "z" []) in
let x_trace = MockTraceDomain.singleton "x" in
let y_trace = MockTraceDomain.singleton "y" in
let z_trace = MockTraceDomain.singleton "z" in
let xF_trace = MockTraceDomain.singleton "xF" in
let yF_trace = MockTraceDomain.singleton "yF" in
let xFG_trace = MockTraceDomain.singleton "xFG" in
let array_f_trace = MockTraceDomain.singleton "arrayF" in
let x_star_trace = MockTraceDomain.of_list ["x"; "xF"; "xFG"] in
let g_subtree = Domain.make_access_node xF_trace g xFG_trace in
let x_subtree = Domain.AccessMap.singleton f g_subtree |> Domain.make_node x_trace in
let yF_subtree = Domain.make_starred_leaf yF_trace in
let y_subtree = Domain.AccessMap.singleton f yF_subtree |> Domain.make_node y_trace in
let z_subtree = Domain.make_starred_leaf z_trace in
let tree =
Domain.BaseMap.singleton x_base x_subtree
|> Domain.BaseMap.add y_base y_subtree
|> Domain.BaseMap.add z_base z_subtree
in
let x_base_tree = Domain.BaseMap.singleton x_base Domain.empty_node in
let y_base_tree = Domain.BaseMap.singleton y_base Domain.empty_node in
let x_y_base_tree = Domain.BaseMap.add y_base Domain.empty_node x_base_tree in
let xFG_tree = Domain.BaseMap.singleton x_base x_subtree in
let x_star_tree = Domain.BaseMap.singleton x_base (Domain.make_starred_leaf x_trace) in
let yF_star_tree = Domain.BaseMap.singleton y_base y_subtree in
let x_yF_star_tree = Domain.BaseMap.add y_base y_subtree x_star_tree in
let x_star_tree_xFG_trace =
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf x_star_trace)
in
let open OUnit2 in
let no_trace = "NONE" in
let get_trace_str access_path tree =
match Domain.get_trace access_path tree with
| Some trace ->
F.asprintf "%a" MockTraceDomain.pp trace
| None ->
no_trace
in
let assert_traces_eq access_path tree expected_trace_str =
let actual_trace_str = get_trace_str access_path tree in
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to retrieve trace %s but got %s" expected actual
in
assert_equal ~pp_diff actual_trace_str expected_trace_str
in
let assert_trace_not_found access_path tree = assert_traces_eq access_path tree no_trace in
let assert_node_equal access_path tree expected_node =
match Domain.get_node access_path tree with
| Some actual_node ->
let pp_diff fmt (actual, expected) =
F.fprintf fmt "Expected to retrieve node %a but got %a" Domain.pp_node expected
Domain.pp_node actual
in
assert_equal ~pp_diff expected_node actual_node
| None ->
assert false
in
let get_trace_test =
let get_trace_test_ _ =
assert_traces_eq z tree "{ z }" ;
assert_traces_eq xF tree "{ xF }" ;
assert_traces_eq yF tree "{ yF }" ;
assert_traces_eq xFG tree "{ xFG }" ;
assert_trace_not_found xG tree ;
assert_traces_eq x_star tree "{ x, xF, xFG }" ;
assert_traces_eq xF_star tree "{ xF, xFG }" ;
assert_trace_not_found xG_star tree ;
assert_trace_not_found a_star tree ;
assert_traces_eq zF tree "{ z }" ;
assert_traces_eq zFG tree "{ z }" ;
assert_traces_eq z_star tree "{ z }" ;
assert_traces_eq y_star tree "{ y, yF }" ;
assert_traces_eq yF_star tree "{ yF }" ;
assert_traces_eq yFG tree "{ yF }" ;
assert_trace_not_found yG tree ;
assert_node_equal z tree z_subtree ;
assert_node_equal xF tree g_subtree ;
assert_node_equal xFG tree (Domain.make_normal_leaf xFG_trace) ;
assert_node_equal yFG tree yF_subtree ;
let joined_y_subtree =
Domain.AccessMap.singleton f yF_subtree
|> Domain.make_node (MockTraceDomain.join y_trace yF_trace)
in
assert_node_equal y_star tree joined_y_subtree
in
"get_trace" >:: get_trace_test_
in
let add_trace_test =
let add_trace_test_ _ =
let added_trace = MockTraceDomain.singleton "added" in
let mk_x_y_base_tree trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf trace)
|> Domain.BaseMap.add y_base Domain.empty_node
in
let mk_xFG_node leaf_trace =
Domain.make_access_node MockTraceDomain.empty g leaf_trace
|> Domain.AccessMap.singleton f
|> Domain.make_node MockTraceDomain.empty
in
let mk_xFG_tree leaf_trace = mk_xFG_node leaf_trace |> Domain.BaseMap.singleton x_base in
let mk_xArrF_tree leaf_trace =
Domain.make_access_node MockTraceDomain.empty f leaf_trace
|> Domain.AccessMap.singleton array
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
let x_y_base_tree_with_added_trace = mk_x_y_base_tree added_trace in
Domain.assert_trees_equal
(Domain.add_trace x added_trace y_base_tree)
x_y_base_tree_with_added_trace ;
Domain.assert_trees_equal
(Domain.add_trace x added_trace x_y_base_tree)
x_y_base_tree_with_added_trace ;
let x_y_base_tree_with_y_trace = mk_x_y_base_tree y_trace in
Domain.assert_trees_equal
(Domain.add_trace x added_trace x_y_base_tree_with_y_trace)
x_y_base_tree_with_added_trace ;
let xFG_tree_added_trace = mk_xFG_tree added_trace in
Domain.assert_trees_equal (Domain.add_trace xFG added_trace x_base_tree) xFG_tree_added_trace ;
let xFG_tree_y_trace = mk_xFG_tree y_trace in
Domain.assert_trees_equal
(Domain.add_trace xFG added_trace xFG_tree_y_trace)
xFG_tree_added_trace ;
let xF_star_tree_added_trace =
Domain.make_starred_leaf added_trace
|> Domain.AccessMap.singleton f
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace Domain.bottom)
xF_star_tree_added_trace ;
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace x_base_tree)
xF_star_tree_added_trace ;
let aArrF_tree = mk_xArrF_tree array_f_trace in
let aArrF_tree_joined_trace =
mk_xArrF_tree (MockTraceDomain.join added_trace array_f_trace)
in
Domain.assert_trees_equal
(Domain.add_trace xArrF added_trace aArrF_tree)
aArrF_tree_joined_trace ;
let yF_tree_added_trace =
Domain.make_normal_leaf added_trace
|> Domain.AccessMap.singleton f |> Domain.make_node y_trace
|> Domain.BaseMap.singleton y_base
in
Domain.assert_trees_equal (Domain.add_trace yF added_trace yF_star_tree) yF_tree_added_trace ;
let x_star_tree_added_trace =
let joined_trace = MockTraceDomain.join x_trace added_trace in
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf joined_trace)
in
Domain.assert_trees_equal
(Domain.add_trace xF added_trace x_star_tree)
x_star_tree_added_trace ;
when updating x.f.g with x.f * , we should remember traces associated with f and even as
we replace that subtree with a *
we replace that subtree with a * *)
let xF_star_tree_joined_traces =
let joined_trace =
MockTraceDomain.join added_trace xFG_trace |> MockTraceDomain.join xF_trace
in
Domain.make_starred_leaf joined_trace
|> Domain.AccessMap.singleton f |> Domain.make_node x_trace
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal
(Domain.add_trace xF_star added_trace xFG_tree)
xF_star_tree_joined_traces ;
[ add_node ] tests are sparse , since [ add_trace ] is just [ add_node ] < empty node > . main things
to test are ( 1 ) adding a non - empty node works , ( 2 ) adding a non - empty node does the proper
joins in the weak update case
to test are (1) adding a non-empty node works, (2) adding a non-empty node does the proper
joins in the weak update case *)
case ( 1 ): adding XFG to y base tree works
let y_xFG_tree = Domain.BaseMap.add y_base Domain.empty_node (mk_xFG_tree xFG_trace) in
Domain.assert_trees_equal (Domain.add_node x (mk_xFG_node xFG_trace) y_base_tree) y_xFG_tree ;
case ( 2 ): adding a non - empty node does weak updates when required
let arr_tree =
let arr_subtree =
Domain.AccessMap.singleton f (Domain.make_normal_leaf array_f_trace)
|> Domain.AccessMap.add g (Domain.make_normal_leaf xFG_trace)
in
Domain.AccessMap.singleton array (Domain.make_node xF_trace arr_subtree)
|> Domain.make_node MockTraceDomain.empty
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal (Domain.add_node xArr g_subtree aArrF_tree) arr_tree
in
"add_trace" >:: add_trace_test_
in
let lteq_test =
let lteq_test_ _ =
assert_bool "<= equal;" (Domain.leq ~lhs:tree ~rhs:tree) ;
assert_bool "<= bases" (Domain.leq ~lhs:x_base_tree ~rhs:x_y_base_tree) ;
assert_bool "<= regular1" (Domain.leq ~lhs:x_base_tree ~rhs:xFG_tree) ;
assert_bool "<= regular2" (Domain.leq ~lhs:xFG_tree ~rhs:tree) ;
assert_bool "<= regular3" (Domain.leq ~lhs:y_base_tree ~rhs:tree) ;
assert_bool "<= bases negative1" (not (Domain.leq ~lhs:x_y_base_tree ~rhs:x_base_tree)) ;
assert_bool "<= bases negative2" (not (Domain.leq ~lhs:x_base_tree ~rhs:y_base_tree)) ;
assert_bool "<= negative1" (not (Domain.leq ~lhs:xFG_tree ~rhs:y_base_tree)) ;
assert_bool "<= negative2" (not (Domain.leq ~lhs:tree ~rhs:xFG_tree)) ;
assert_bool "<= star lhs equal" (Domain.leq ~lhs:x_star_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs1" (Domain.leq ~lhs:x_base_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs2" (Domain.leq ~lhs:xFG_tree ~rhs:x_star_tree) ;
assert_bool "<= star rhs3" (Domain.leq ~lhs:y_base_tree ~rhs:yF_star_tree) ;
assert_bool "<= star rhs4" (Domain.leq ~lhs:yF_star_tree ~rhs:tree) ;
assert_bool "<= star lhs negative1" (not (Domain.leq ~lhs:x_star_tree ~rhs:x_base_tree)) ;
assert_bool "<= star lhs negative2" (not (Domain.leq ~lhs:x_star_tree ~rhs:xFG_tree)) ;
assert_bool "<= star lhs negative3" (not (Domain.leq ~lhs:yF_star_tree ~rhs:y_base_tree)) ;
assert_bool "<= star lhs negative4" (not (Domain.leq ~lhs:tree ~rhs:yF_star_tree)) ;
same as x_base_tree , but with a trace higher in the traces lattice
let x_base_tree_higher_trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf y_trace)
in
let x_star_tree_diff_trace =
Domain.BaseMap.singleton x_base (Domain.make_starred_leaf y_trace)
in
assert_bool "(x, {}) <= (x, {y})" (Domain.leq ~lhs:x_base_tree ~rhs:x_base_tree_higher_trace) ;
assert_bool "(x, {y}) not <= (x, {})"
(not (Domain.leq ~lhs:x_base_tree_higher_trace ~rhs:x_base_tree)) ;
assert_bool "(x*, {y})* not <= (x*, {x})"
(not (Domain.leq ~lhs:x_star_tree_diff_trace ~rhs:x_star_tree)) ;
assert_bool "(x*, {x})* not <= (x*, {y})"
(not (Domain.leq ~lhs:x_star_tree ~rhs:x_star_tree_diff_trace))
in
"lteq" >:: lteq_test_
in
let join_test =
let join_test_ _ =
Domain.assert_trees_equal (Domain.join x_base_tree y_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join y_base_tree x_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join x_y_base_tree x_base_tree) x_y_base_tree ;
Domain.assert_trees_equal (Domain.join x_base_tree xFG_tree) xFG_tree ;
Domain.assert_trees_equal (Domain.join x_star_tree yF_star_tree) x_yF_star_tree ;
Domain.assert_trees_equal (Domain.join tree xFG_tree) tree ;
Domain.assert_trees_equal (Domain.join x_star_tree x_base_tree) x_star_tree ;
Domain.assert_trees_equal (Domain.join x_star_tree xFG_tree) x_star_tree_xFG_trace
in
"join" >:: join_test_
in
let widen_test =
let widen_test_ _ =
let make_x_base_tree trace =
Domain.BaseMap.singleton x_base (Domain.make_normal_leaf trace)
in
let widen prev next = Domain.widen ~prev ~next ~num_iters:4 in
widening traces works :
x |- > ( " x " , empty ) \/ x |- > ( " y " , empty ) =
x |- > ( T , empty )
x |-> ("x", empty) \/ x |-> ("y", empty) =
x |-> (T, empty)
*)
let x_tree_x_trace = make_x_base_tree x_trace in
let x_tree_y_trace = make_x_base_tree y_trace in
let x_tree_top_trace = make_x_base_tree MockTraceDomain.top in
Domain.assert_trees_equal (widen x_tree_x_trace x_tree_y_trace) x_tree_top_trace ;
adding stars to a base works :
x |- > ( { } , empty ) \/ y |- > ( { } , empty ) =
( x |- > ( { } , empty ) , y |- > ( { } , Star ) )
x |-> ({}, empty) \/ y |-> ({}, empty) =
(x |-> ({}, empty), y |-> ({}, Star) )
*)
let x_y_star_base_tree =
Domain.BaseMap.add y_base (Domain.make_starred_leaf MockTraceDomain.empty) x_base_tree
in
Domain.assert_trees_equal (widen x_base_tree y_base_tree) x_y_star_base_tree ;
adding stars to a subtree works :
x |- > ( " y " , empty ) \/
x |- > ( " x " , f |- > ( " f " , g |- > ( " g " , empty ) ) ) =
x |- > ( T , f |- > ( T , * ) )
x |-> ("y", empty) \/
x |-> ("x" , f |-> ("f", g |-> ("g", empty))) =
x |-> (T , f |-> (T, * ))
*)
let xF_star_tree =
Domain.AccessMap.singleton f (Domain.make_starred_leaf MockTraceDomain.top)
|> Domain.make_node MockTraceDomain.top
|> Domain.BaseMap.singleton x_base
in
Domain.assert_trees_equal (widen x_tree_y_trace xFG_tree) xF_star_tree ;
widening is not commutative , and is it not join :
x |- > ( " x " , f |- > ( " f " , g |- > ( " g " , empty ) ) ) \/
x |- > ( " y " , empty ) =
x |- > ( T , f |- > ( " f " , g |- > ( " g " , empty ) ) )
x |-> ("x" , f |-> ("f", g |-> ("g", empty))) \/
x |-> ("y", empty) =
x |-> (T , f |-> ("f", g |-> ("g", empty)))
*)
let xFG_tree_widened_trace =
let _, xFG_node = x_subtree in
Domain.BaseMap.singleton x_base (MockTraceDomain.top, xFG_node)
in
Domain.assert_trees_equal (widen xFG_tree x_tree_y_trace) xFG_tree_widened_trace
in
"widen" >:: widen_test_
in
let fold_test =
let fold_test_ _ =
let collect_ap_traces acc ap trace = (ap, trace) :: acc in
let ap_traces = Domain.trace_fold collect_ap_traces tree [] in
let has_ap_trace_pair ap_in trace_in =
List.exists
~f:(fun (ap, trace) ->
AccessPath.Abs.equal ap ap_in && MockTraceDomain.equal trace trace_in )
ap_traces
in
assert_bool "Should have six ap/trace pairs" (Int.equal (List.length ap_traces) 6) ;
assert_bool "has x pair" (has_ap_trace_pair x x_trace) ;
assert_bool "has xF pair" (has_ap_trace_pair xF xF_trace) ;
assert_bool "has xFG pair" (has_ap_trace_pair xFG xFG_trace) ;
assert_bool "has y pair" (has_ap_trace_pair y y_trace) ;
assert_bool "has yF* pair" (has_ap_trace_pair yF_star yF_trace) ;
assert_bool "has z pair" (has_ap_trace_pair z_star z_trace)
in
"fold" >:: fold_test_
in
let depth_test =
let depth_test_ _ =
assert_equal (Domain.depth Domain.bottom) 0 ;
assert_equal (Domain.depth x_base_tree) 1 ;
assert_equal (Domain.depth x_y_base_tree) 1 ;
assert_equal (Domain.depth xFG_tree) 3 ;
assert_equal (Domain.depth x_star_tree) 1 ;
assert_equal (Domain.depth yF_star_tree) 2 ;
assert_equal (Domain.depth x_yF_star_tree) 2
in
"depth" >:: depth_test_
in
let max_depth_test =
let max_depth_test_ _ =
let module Max1 = MakeTree (struct
let max_depth = 1
let max_width = Int.max_value / 2
end) in
let f_node =
Max1.AccessMap.singleton f (Max1.make_normal_leaf x_trace)
|> Max1.make_node MockTraceDomain.empty
in
let x_tree = Max1.BaseMap.singleton x_base (Max1.make_normal_leaf x_trace) in
let x_star_tree = Max1.BaseMap.singleton x_base (Max1.make_starred_leaf x_trace) in
adding ( x.f , " x " ) to a tree with max height 1 should yield x |- > ( " x " , * )
Max1.assert_trees_equal (Max1.add_trace xF x_trace Max1.bottom) x_star_tree ;
Max1.assert_trees_equal (Max1.add_trace xFG x_trace Max1.bottom) x_star_tree ;
Max1.assert_trees_equal (Max1.add_node x f_node Max1.bottom) x_star_tree ;
Max1.assert_trees_equal (Max1.add_trace x x_trace Max1.bottom) x_tree ;
let module Max2 = MakeTree (struct
let max_depth = 2
let max_width = Int.max_value / 2
end) in
let f_node =
Max2.AccessMap.singleton f (Max2.make_normal_leaf x_trace)
|> Max2.make_node MockTraceDomain.empty
in
let fG_node =
Max2.make_access_node MockTraceDomain.empty g x_trace
|> Max2.AccessMap.singleton f
|> Max2.make_node MockTraceDomain.empty
in
let f_star_node =
Max2.AccessMap.singleton f (Max2.make_starred_leaf x_trace)
|> Max2.make_node MockTraceDomain.empty
in
let x_tree = Max2.BaseMap.singleton x_base Max2.empty_node in
let xF_tree = Max2.BaseMap.singleton x_base f_node in
let xF_star_tree = Max2.BaseMap.singleton x_base f_star_node in
Max2.assert_trees_equal (Max2.add_trace xF x_trace Max2.bottom) xF_tree ;
Max2.assert_trees_equal (Max2.add_trace xFG x_trace Max2.bottom) xF_star_tree ;
Max2.assert_trees_equal (Max2.add_node x fG_node x_tree) xF_star_tree
in
"max_depth" >:: max_depth_test_
in
let max_width_test =
let max_width_test_ _ =
let module Max1 = MakeTree (struct
let max_depth = Int.max_value / 2
let max_width = 1
end) in
let x_base_tree = Max1.BaseMap.singleton x_base Max1.empty_node in
let y_base_tree = Max1.BaseMap.singleton y_base Max1.empty_node in
let x_y_base_tree = Max1.BaseMap.add y_base Max1.empty_node x_base_tree in
let f_node =
Max1.AccessMap.singleton f (Max1.make_normal_leaf y_trace)
|> Max1.make_node MockTraceDomain.empty
in
let g_node =
Max1.AccessMap.singleton g (Max1.make_normal_leaf z_trace)
|> Max1.make_node MockTraceDomain.empty
in
let star_node = Max1.make_starred_leaf (MockTraceDomain.join y_trace z_trace) in
let xF_tree = Max1.BaseMap.singleton x_base f_node in
let xG_tree = Max1.BaseMap.singleton x_base g_node in
let x_star_tree = Max1.BaseMap.singleton x_base star_node in
Max1.assert_trees_equal (Max1.add_trace xF y_trace Max1.bottom) xF_tree ;
Max1.assert_trees_equal (Max1.add_trace xG z_trace xF_tree) x_star_tree ;
joining the x.f and x.g trees should also create a star
Max1.assert_trees_equal (Max1.join xF_tree xG_tree) x_star_tree ;
Max1.assert_trees_equal (Max1.add_trace xF y_trace xF_tree) xF_tree ;
Max1.assert_trees_equal (Max1.join xF_tree xF_tree) xF_tree ;
Max1.assert_trees_equal (Max1.join x_base_tree y_base_tree) x_y_base_tree
in
"max_width" >:: max_width_test_
in
"access_tree_suite"
>::: [ get_trace_test
; add_trace_test
; lteq_test
; join_test
; widen_test
; fold_test
; depth_test
; max_depth_test
; max_width_test ]
|
02d2d4a4c3e7f7ecc27541c9a1cac0796b3ed243e2a18133d94d8efe68ebc44c | vrom911/Rum | ExprParser.hs | module Compiler.Rum.Internal.ExprParser where
import qualified Data.HashMap.Strict as HM
import Data.List (foldl')
import Text.Megaparsec
import Text.Megaparsec.String
import Compiler.Rum.Internal.AST
strSpace :: String -> Parser String
strSpace s = string s >>= \x -> space >> return x
chSpace :: Char -> Parser Char
chSpace s = char s <* space
keyWords :: [String]
keyWords = ["skip", "write", "if", "then", "else", "fi", "repeat", "until", "do", "od", "while", "for"]
varNameP :: Parser Variable
varNameP = Variable <$> ((((:) <$> (try (oneOf "_$") <|> letterChar)
<*> many (try alphaNumChar <|> oneOf "_-$")) >>= \x -> if x `elem` keyWords
then fail "Can not use Key words as variable names"
else pure x
) <* space)
parens :: Parser a -> Parser a
parens = between (chSpace '(') (chSpace ')')
rightAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
rightAssocsP f opP elP = do
el <- elP
rest <- many (opP *> rightAssocsP f opP elP)
pure $ if null rest then el else foldl' f el rest
leftAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
leftAssocsP f opP elP = elP >>= rest
where
rest x = opP *> elP >>= \y -> rest' (f x y)
rest' x = (opP *> elP >>= \y -> rest' (f x y)) <|> pure x
basicExprP :: Parser Expression
basicExprP = Const <$> numP
<|> ReadLn <$ strSpace "read()"
<|> Var <$> varNameP
<|> parens exprP
where
numP :: Parser Int
numP = (read <$> (try ((:) <$> char '-' <*> some digitChar) <|> some digitChar)) <* space
arithmeticExprP :: Parser Expression
arithmeticExprP = prior3
where
powP = rightAssocsP (BinOper Pow) (chSpace '^') basicExprP
p2 c op = leftAssocsP (BinOper c) (chSpace op) powP
prior2 = try (p2 Mul '*') <|> try (p2 Div '/') <|> try (p2 Mod '%') <|> powP
p3 c op = leftAssocsP (BinOper c) (chSpace op) prior2
prior3 = try (p3 Add '+') <|> try (p3 Sub '-') <|> prior2
compExprP :: Parser Expression
compExprP = do
le <- arithmeticExprP
op <- choice (strSpace <$> ["==", "!=", "<=", "<", ">=", ">"])
re <- arithmeticExprP
return $ CompOper ((\(Just s) -> s) $ HM.lookup op compMap) le re
where
compMap = HM.fromList [("==", Eq), ("!=", NotEq), ("<=", NotGt), ("<", Lt), (">=", NotLt), (">", Gt)]
binExprP :: Parser Expression
binExprP = try (parens compExprP <|> compExprP) <|> parens arithmeticExprP <|> arithmeticExprP
logicExprP :: Parser Expression
logicExprP = try lOr <|> try lAnd <|> binExprP
where
lAnd = leftAssocsP (LogicOper And) (strSpace "&&") binExprP
lOr = leftAssocsP (LogicOper Or) (strSpace "||") lAnd
exprP :: Parser Expression
exprP = try (parens exprP) <|> try (parens logicExprP <|> logicExprP) <|> parens binExprP
| null | https://raw.githubusercontent.com/vrom911/Rum/b060ff099cb0cb7c022b10902a7852d6c1ef1498/src/Compiler/Rum/Internal/ExprParser.hs | haskell | module Compiler.Rum.Internal.ExprParser where
import qualified Data.HashMap.Strict as HM
import Data.List (foldl')
import Text.Megaparsec
import Text.Megaparsec.String
import Compiler.Rum.Internal.AST
strSpace :: String -> Parser String
strSpace s = string s >>= \x -> space >> return x
chSpace :: Char -> Parser Char
chSpace s = char s <* space
keyWords :: [String]
keyWords = ["skip", "write", "if", "then", "else", "fi", "repeat", "until", "do", "od", "while", "for"]
varNameP :: Parser Variable
varNameP = Variable <$> ((((:) <$> (try (oneOf "_$") <|> letterChar)
<*> many (try alphaNumChar <|> oneOf "_-$")) >>= \x -> if x `elem` keyWords
then fail "Can not use Key words as variable names"
else pure x
) <* space)
parens :: Parser a -> Parser a
parens = between (chSpace '(') (chSpace ')')
rightAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
rightAssocsP f opP elP = do
el <- elP
rest <- many (opP *> rightAssocsP f opP elP)
pure $ if null rest then el else foldl' f el rest
leftAssocsP :: (a -> a -> a) -> Parser op -> Parser a -> Parser a
leftAssocsP f opP elP = elP >>= rest
where
rest x = opP *> elP >>= \y -> rest' (f x y)
rest' x = (opP *> elP >>= \y -> rest' (f x y)) <|> pure x
basicExprP :: Parser Expression
basicExprP = Const <$> numP
<|> ReadLn <$ strSpace "read()"
<|> Var <$> varNameP
<|> parens exprP
where
numP :: Parser Int
numP = (read <$> (try ((:) <$> char '-' <*> some digitChar) <|> some digitChar)) <* space
arithmeticExprP :: Parser Expression
arithmeticExprP = prior3
where
powP = rightAssocsP (BinOper Pow) (chSpace '^') basicExprP
p2 c op = leftAssocsP (BinOper c) (chSpace op) powP
prior2 = try (p2 Mul '*') <|> try (p2 Div '/') <|> try (p2 Mod '%') <|> powP
p3 c op = leftAssocsP (BinOper c) (chSpace op) prior2
prior3 = try (p3 Add '+') <|> try (p3 Sub '-') <|> prior2
compExprP :: Parser Expression
compExprP = do
le <- arithmeticExprP
op <- choice (strSpace <$> ["==", "!=", "<=", "<", ">=", ">"])
re <- arithmeticExprP
return $ CompOper ((\(Just s) -> s) $ HM.lookup op compMap) le re
where
compMap = HM.fromList [("==", Eq), ("!=", NotEq), ("<=", NotGt), ("<", Lt), (">=", NotLt), (">", Gt)]
binExprP :: Parser Expression
binExprP = try (parens compExprP <|> compExprP) <|> parens arithmeticExprP <|> arithmeticExprP
logicExprP :: Parser Expression
logicExprP = try lOr <|> try lAnd <|> binExprP
where
lAnd = leftAssocsP (LogicOper And) (strSpace "&&") binExprP
lOr = leftAssocsP (LogicOper Or) (strSpace "||") lAnd
exprP :: Parser Expression
exprP = try (parens exprP) <|> try (parens logicExprP <|> logicExprP) <|> parens binExprP
|
|
058f5533903a9678807df4e6b8bcd38fe283e5af181e372ae7532d1b7239538b | audreyt/openafp | ERG.hs |
module OpenAFP.Records.AFP.ERG where
import OpenAFP.Types
import OpenAFP.Internals
data ERG = ERG {
erg_Type :: !N3
,erg_ :: !N3
,erg :: !NStr
} deriving (Show, Typeable)
| null | https://raw.githubusercontent.com/audreyt/openafp/178e0dd427479ac7b8b461e05c263e52dd614b73/src/OpenAFP/Records/AFP/ERG.hs | haskell |
module OpenAFP.Records.AFP.ERG where
import OpenAFP.Types
import OpenAFP.Internals
data ERG = ERG {
erg_Type :: !N3
,erg_ :: !N3
,erg :: !NStr
} deriving (Show, Typeable)
|
|
6b631399915fdd0a3e3f3a3f9e62bc524812849b44df8b0bb16471a2f3cbd5c9 | district0x/district-ui-web3 | utils.cljs | (ns district.ui.web3.utils)
(defn web3-injected?
"Determines if the `web3` object has been injected by an
ethereum provider."
[]
(boolean (or (aget js/window "ethereum" ) (aget js/window "web3"))))
(defn web3-legacy?
"The old method of retrieving the current ethereum provider exposed
it at `window.web3.currentProvider`..
Notes:
- This changed in EIP-1102 to require authorization, and moved the
partial provider into `window.ethereum`.
- Can assume it isn't legacy when window.ethereum exists."
[]
(not (some-> js/window .-ethereum)))
| null | https://raw.githubusercontent.com/district0x/district-ui-web3/825785b70b653e82cd5bf66f0df69862d8f60bd0/src/district/ui/web3/utils.cljs | clojure | (ns district.ui.web3.utils)
(defn web3-injected?
"Determines if the `web3` object has been injected by an
ethereum provider."
[]
(boolean (or (aget js/window "ethereum" ) (aget js/window "web3"))))
(defn web3-legacy?
"The old method of retrieving the current ethereum provider exposed
it at `window.web3.currentProvider`..
Notes:
- This changed in EIP-1102 to require authorization, and moved the
partial provider into `window.ethereum`.
- Can assume it isn't legacy when window.ethereum exists."
[]
(not (some-> js/window .-ethereum)))
|
|
e7ee69f1ebd701e7809fc2d3b729caab95de099f1d5faaa8aa5245a5a5d9f538 | JonyEpsilon/darwin | metrics.clj | ;
This file is part of .
;
Copyright ( C ) 2014- , Imperial College , London , All rights reserved .
;
Contributors :
;
Released under the MIT license ..
;
(ns darwin.evolution.metrics
"Functions for capturing metrics for the run.")
(def metrics (atom {}))
(defn clear!
"Reset the metrics."
[]
(reset! metrics {}))
(defn add!
[key value]
(swap! metrics #(update-in % [key] (fn [x] (apply vector (conj x value))))))
(defn- calculate-stats
"Update a single population-level metric."
[values]
(let [mean-val (double (/ (apply + values) (count values)))
min-val (apply min values)
max-val (apply max values)]
[mean-val min-val max-val]))
(defn- update-stat
[key stat value]
(swap! metrics #(update-in % [key stat] (fn [x] (apply vector (conj x value))))))
(defn add-stats!
"Adds a metric derived from the statistics of a given set of values. Adds the mean, min and max of
the given values to the metric with the given name."
[key values]
(mapv #(update-stat key %1 %2) [:mean :min :max] (calculate-stats values)))
| null | https://raw.githubusercontent.com/JonyEpsilon/darwin/2b27aa83ec0b7bbc37effed243bf92673de586ea/src/darwin/evolution/metrics.clj | clojure | This file is part of .
Copyright ( C ) 2014- , Imperial College , London , All rights reserved .
Contributors :
Released under the MIT license ..
(ns darwin.evolution.metrics
"Functions for capturing metrics for the run.")
(def metrics (atom {}))
(defn clear!
"Reset the metrics."
[]
(reset! metrics {}))
(defn add!
[key value]
(swap! metrics #(update-in % [key] (fn [x] (apply vector (conj x value))))))
(defn- calculate-stats
"Update a single population-level metric."
[values]
(let [mean-val (double (/ (apply + values) (count values)))
min-val (apply min values)
max-val (apply max values)]
[mean-val min-val max-val]))
(defn- update-stat
[key stat value]
(swap! metrics #(update-in % [key stat] (fn [x] (apply vector (conj x value))))))
(defn add-stats!
"Adds a metric derived from the statistics of a given set of values. Adds the mean, min and max of
the given values to the metric with the given name."
[key values]
(mapv #(update-stat key %1 %2) [:mean :min :max] (calculate-stats values)))
|
|
3f516dc3a3ee97d3e7aaba4ce35dac2e012b0038f6412ec0a64742b274169172 | monadbobo/ocaml-core | extended_list_test.ml | open Core.Std
open OUnit
module L = Core_extended.Std.List
let is_even x = x mod 2 = 0
let test = "Extended_list" >::: [
"number" >::
(fun () ->
"base" @? (L.number [1;2;3;1;4] = [1,0;2,0;3,0;1,1;4,0]));
"multimerge" >::
(fun () ->
"base" @? (L.multimerge [[0;2];[2;3];[0;1];[1;2]] = [0;1;2;3]);
"dup" @? (L.multimerge [[0;1;2;0];[0;1]] = [0;1;2;0]);
(* There is no solution here: we just want to make sure that the
result has all the fields. *)
"circle" @? (
let header = L.multimerge [[0;1;2];[0;2;1;4]] in
List.sort ~cmp:Int.compare header = [0;1;2;4]));
("take_while" >:: fun () ->
"take evens" @? (
(L.take_while [2;4;6;7;8;9] is_even) = [2;4;6]));
("equal" >:::
let equal xs ys = L.equal ~equal:Int.equal xs ys in
let assert_equal xs ys = assert (equal xs ys) in
let assert_not_equal xs ys = assert (not (equal xs ys)) in
[
("1" >:: fun () -> assert_equal [] []);
("2" >:: fun () -> assert_not_equal [2] []);
("3" >:: fun () -> assert_not_equal [] [3]);
("4" >:: fun () -> assert_equal [4] [4]);
("5" >:: fun () -> assert_not_equal [0; 5] [0]);
("6" >:: fun () -> assert_not_equal [0] [0; 6]);
("7" >:: fun () -> assert_equal [0; 7] [0; 7]);
]);
("compare" >:::
let compare xs ys = L.compare ~cmp:Int.compare xs ys in
let assert_eq xs ys = assert (compare xs ys = 0) in
let assert_lt xs ys = assert (compare xs ys < 0) in
let assert_gt xs ys = assert (compare xs ys > 0) in
[
("1" >:: fun () -> assert_eq [] []);
("2" >:: fun () -> assert_gt [2] []);
("3" >:: fun () -> assert_lt [] [3]);
("4" >:: fun () -> assert_eq [4] [4]);
("4" >:: fun () -> assert_lt [3] [4]);
("4" >:: fun () -> assert_gt [3] [2]);
("5" >:: fun () -> assert_gt [0; 5] [0]);
("6" >:: fun () -> assert_lt [0] [0; 6]);
("5" >:: fun () -> assert_lt [0; 5] [1]);
("6" >:: fun () -> assert_gt [1] [0; 6]);
("7" >:: fun () -> assert_eq [0; 7] [0; 7]);
]);
]
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/core/extended/lib_test/extended_list_test.ml | ocaml | There is no solution here: we just want to make sure that the
result has all the fields. | open Core.Std
open OUnit
module L = Core_extended.Std.List
let is_even x = x mod 2 = 0
let test = "Extended_list" >::: [
"number" >::
(fun () ->
"base" @? (L.number [1;2;3;1;4] = [1,0;2,0;3,0;1,1;4,0]));
"multimerge" >::
(fun () ->
"base" @? (L.multimerge [[0;2];[2;3];[0;1];[1;2]] = [0;1;2;3]);
"dup" @? (L.multimerge [[0;1;2;0];[0;1]] = [0;1;2;0]);
"circle" @? (
let header = L.multimerge [[0;1;2];[0;2;1;4]] in
List.sort ~cmp:Int.compare header = [0;1;2;4]));
("take_while" >:: fun () ->
"take evens" @? (
(L.take_while [2;4;6;7;8;9] is_even) = [2;4;6]));
("equal" >:::
let equal xs ys = L.equal ~equal:Int.equal xs ys in
let assert_equal xs ys = assert (equal xs ys) in
let assert_not_equal xs ys = assert (not (equal xs ys)) in
[
("1" >:: fun () -> assert_equal [] []);
("2" >:: fun () -> assert_not_equal [2] []);
("3" >:: fun () -> assert_not_equal [] [3]);
("4" >:: fun () -> assert_equal [4] [4]);
("5" >:: fun () -> assert_not_equal [0; 5] [0]);
("6" >:: fun () -> assert_not_equal [0] [0; 6]);
("7" >:: fun () -> assert_equal [0; 7] [0; 7]);
]);
("compare" >:::
let compare xs ys = L.compare ~cmp:Int.compare xs ys in
let assert_eq xs ys = assert (compare xs ys = 0) in
let assert_lt xs ys = assert (compare xs ys < 0) in
let assert_gt xs ys = assert (compare xs ys > 0) in
[
("1" >:: fun () -> assert_eq [] []);
("2" >:: fun () -> assert_gt [2] []);
("3" >:: fun () -> assert_lt [] [3]);
("4" >:: fun () -> assert_eq [4] [4]);
("4" >:: fun () -> assert_lt [3] [4]);
("4" >:: fun () -> assert_gt [3] [2]);
("5" >:: fun () -> assert_gt [0; 5] [0]);
("6" >:: fun () -> assert_lt [0] [0; 6]);
("5" >:: fun () -> assert_lt [0; 5] [1]);
("6" >:: fun () -> assert_gt [1] [0; 6]);
("7" >:: fun () -> assert_eq [0; 7] [0; 7]);
]);
]
|
14d3032bc39b671ce79321456b3fd3534738d3a39e1318ad42ee5e5bd796c692 | RyanGlScott/ghc-software-foundations | Perm.hs | # LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
module SF.VFA.Perm where
import Data.Type.Equality
import Prelude.Singletons
import SF.LF.Logic
import SF.LF.Poly
data Permutation :: forall a. [a] -> [a] -> Prop where
PermNil :: Permutation '[] '[]
PermSkip :: forall a (x :: a) (l :: [a]) (l' :: [a]).
Sing x -> Permutation l l' -> Permutation (x:l) (x:l')
PermSwap :: forall a (x :: a) (y :: a) (l :: [a]).
Sing x -> Sing y -> Sing l -> Permutation (y:x:l) (x:y:l)
PermTrans :: forall a (l :: [a]) (l' :: [a]) (l'' :: [a]).
Permutation l l' -> Permutation l' l'' -> Permutation l l''
permutationRefl :: forall a (l :: [a]). Sing l -> Permutation l l
permutationRefl SNil = PermNil
permutationRefl (SCons sx sxs) = PermSkip sx $ permutationRefl sxs
permutationConsAppend :: forall a (l :: [a]) (x :: a).
Sing l -> Sing x -> Permutation (x:l) (l ++ '[x])
permutationConsAppend SNil sx = permutationRefl (SCons sx SNil)
permutationConsAppend (SCons sl' sls') sx =
PermTrans (PermSwap sl' sx sls')
(PermSkip sl' (permutationConsAppend sls' sx))
permutationAppTail :: forall a (l :: [a]) (l' :: [a]) (tl :: [a]).
Sing tl -> Permutation l l' -> Permutation (l ++ tl) (l' ++ tl)
permutationAppTail stl PermNil = permutationRefl stl
permutationAppTail stl (PermSkip sx p) = PermSkip sx $ permutationAppTail stl p
permutationAppTail stl (PermSwap sx sy sl) = PermSwap sx sy (sl %++ stl)
permutationAppTail stl (PermTrans p1 p2) =
permutationAppTail stl p1 `PermTrans` permutationAppTail stl p2
permutationAppComm :: forall a (l :: [a]) (l' :: [a]).
Sing l -> Sing l' -> Permutation (l ++ l') (l' ++ l)
permutationAppComm SNil sl'
| Refl <- appNilR sl'
= permutationRefl sl'
permutationAppComm sl@(SCons slx slxs) sl'
| Refl <- appAssoc sl' (SCons slx SNil) slxs
= PermSkip slx (permutationAppComm slxs sl')
`PermTrans` permutationAppTail slxs (permutationConsAppend sl' slx)
`PermTrans` permutationRefl (sl' %++ sl)
| null | https://raw.githubusercontent.com/RyanGlScott/ghc-software-foundations/ce7b8958e0aed4fb2c8611d71e7e0f1a2ef83222/verified-functional-algorithms/src/SF/VFA/Perm.hs | haskell | # LANGUAGE GADTs # | # LANGUAGE DataKinds #
module SF.VFA.Perm where
import Data.Type.Equality
import Prelude.Singletons
import SF.LF.Logic
import SF.LF.Poly
data Permutation :: forall a. [a] -> [a] -> Prop where
PermNil :: Permutation '[] '[]
PermSkip :: forall a (x :: a) (l :: [a]) (l' :: [a]).
Sing x -> Permutation l l' -> Permutation (x:l) (x:l')
PermSwap :: forall a (x :: a) (y :: a) (l :: [a]).
Sing x -> Sing y -> Sing l -> Permutation (y:x:l) (x:y:l)
PermTrans :: forall a (l :: [a]) (l' :: [a]) (l'' :: [a]).
Permutation l l' -> Permutation l' l'' -> Permutation l l''
permutationRefl :: forall a (l :: [a]). Sing l -> Permutation l l
permutationRefl SNil = PermNil
permutationRefl (SCons sx sxs) = PermSkip sx $ permutationRefl sxs
permutationConsAppend :: forall a (l :: [a]) (x :: a).
Sing l -> Sing x -> Permutation (x:l) (l ++ '[x])
permutationConsAppend SNil sx = permutationRefl (SCons sx SNil)
permutationConsAppend (SCons sl' sls') sx =
PermTrans (PermSwap sl' sx sls')
(PermSkip sl' (permutationConsAppend sls' sx))
permutationAppTail :: forall a (l :: [a]) (l' :: [a]) (tl :: [a]).
Sing tl -> Permutation l l' -> Permutation (l ++ tl) (l' ++ tl)
permutationAppTail stl PermNil = permutationRefl stl
permutationAppTail stl (PermSkip sx p) = PermSkip sx $ permutationAppTail stl p
permutationAppTail stl (PermSwap sx sy sl) = PermSwap sx sy (sl %++ stl)
permutationAppTail stl (PermTrans p1 p2) =
permutationAppTail stl p1 `PermTrans` permutationAppTail stl p2
permutationAppComm :: forall a (l :: [a]) (l' :: [a]).
Sing l -> Sing l' -> Permutation (l ++ l') (l' ++ l)
permutationAppComm SNil sl'
| Refl <- appNilR sl'
= permutationRefl sl'
permutationAppComm sl@(SCons slx slxs) sl'
| Refl <- appAssoc sl' (SCons slx SNil) slxs
= PermSkip slx (permutationAppComm slxs sl')
`PermTrans` permutationAppTail slxs (permutationConsAppend sl' slx)
`PermTrans` permutationRefl (sl' %++ sl)
|
a5e2c00b5a5629f511d2100b306c3eff495206ce4bb814c8a93a60e8a048390e | klutometis/clrs | stack-queue.scm | (define (stack-enqueue! stack x)
(let* ((data (stack-data stack))
(length (vector-length data))
(temp (make-stack (make-vector length #f) -1)))
(push! temp x)
(loop ((until (stack-empty? stack)))
(push! temp (pop! stack)))
(loop ((until (stack-empty? temp)))
(push! stack (pop! temp)))))
(define (stack-dequeue! stack)
(let* ((data (stack-data stack))
(length (vector-length data))
(temp (make-stack (make-vector length #f) -1)))
(loop ((until (stack-empty? stack)))
(push! temp (pop! stack)))
(let ((x (pop! temp)))
(vector-fill! data #f)
(loop ((until (stack-empty? temp)))
(push! stack (pop! temp)))
x)))
(define (queue-push! queue x)
(let* ((data (queue-data queue))
(length (vector-length data))
(temp (make-queue (make-vector length #f) 0 0)))
(enqueue! temp x)
(loop ((until (queue-empty? queue)))
(enqueue! temp (dequeue! queue)))
(loop ((until (queue-empty? temp)))
(enqueue! queue (dequeue! temp)))))
(define (queue-pop! queue)
(let* ((data (queue-data queue))
(length (vector-length data))
(temp (make-queue (make-vector length #f) 0 0)))
(loop ((until (queue-empty? queue)))
(enqueue! temp (dequeue! queue)))
(let ((x (dequeue! temp)))
(vector-fill! data #f)
(loop ((until (queue-empty? temp)))
(enqueue! queue (dequeue! temp)))
x)))
| null | https://raw.githubusercontent.com/klutometis/clrs/f85a8f0036f0946c9e64dde3259a19acc62b74a1/10.1/stack-queue.scm | scheme | (define (stack-enqueue! stack x)
(let* ((data (stack-data stack))
(length (vector-length data))
(temp (make-stack (make-vector length #f) -1)))
(push! temp x)
(loop ((until (stack-empty? stack)))
(push! temp (pop! stack)))
(loop ((until (stack-empty? temp)))
(push! stack (pop! temp)))))
(define (stack-dequeue! stack)
(let* ((data (stack-data stack))
(length (vector-length data))
(temp (make-stack (make-vector length #f) -1)))
(loop ((until (stack-empty? stack)))
(push! temp (pop! stack)))
(let ((x (pop! temp)))
(vector-fill! data #f)
(loop ((until (stack-empty? temp)))
(push! stack (pop! temp)))
x)))
(define (queue-push! queue x)
(let* ((data (queue-data queue))
(length (vector-length data))
(temp (make-queue (make-vector length #f) 0 0)))
(enqueue! temp x)
(loop ((until (queue-empty? queue)))
(enqueue! temp (dequeue! queue)))
(loop ((until (queue-empty? temp)))
(enqueue! queue (dequeue! temp)))))
(define (queue-pop! queue)
(let* ((data (queue-data queue))
(length (vector-length data))
(temp (make-queue (make-vector length #f) 0 0)))
(loop ((until (queue-empty? queue)))
(enqueue! temp (dequeue! queue)))
(let ((x (dequeue! temp)))
(vector-fill! data #f)
(loop ((until (queue-empty? temp)))
(enqueue! queue (dequeue! temp)))
x)))
|
|
e108ebfbc9846d8f39e2c4e078a270884ba9407cd72ebe5739fff9d4c857a234 | rbardou/red | log.ml | let handlers = ref []
let add_handler (handler: string -> unit) =
handlers := handler :: !handlers
let info m = List.iter (fun handler -> handler m) !handlers
let info x = Printf.ksprintf info x
let warn m = info "Warning: %s" m
let warn x = Printf.ksprintf warn x
let error ?exn message =
match exn with
| None ->
info "Error: %s" message
| Some exn ->
info "Error: %s: %s" message (Printexc.to_string exn)
let error ?exn x = Printf.ksprintf (error ?exn) x
| null | https://raw.githubusercontent.com/rbardou/red/e23c2830909b9e5cd6afe563313435ddaeda90bf/src/log.ml | ocaml | let handlers = ref []
let add_handler (handler: string -> unit) =
handlers := handler :: !handlers
let info m = List.iter (fun handler -> handler m) !handlers
let info x = Printf.ksprintf info x
let warn m = info "Warning: %s" m
let warn x = Printf.ksprintf warn x
let error ?exn message =
match exn with
| None ->
info "Error: %s" message
| Some exn ->
info "Error: %s: %s" message (Printexc.to_string exn)
let error ?exn x = Printf.ksprintf (error ?exn) x
|
|
24092bd1fd2459af8829af3330bde8920e5beb07ba8364d8495e78d82cfcf0ba | eckyputrady/haskell-scotty-realworld-example-app | HTTP.hs | module Feature.Auth.HTTP where
import ClassyPrelude
import Feature.Auth.Types
import Feature.Common.Util (orThrow)
import Feature.Common.HTTP
import Control.Monad.Except
import Web.Scotty.Trans
import Network.HTTP.Types.Status
class Monad m => Service m where
resolveToken :: Token -> m (Either TokenError CurrentUser)
getCurrentUser :: (Service m) => ActionT LText m (Either TokenError CurrentUser)
getCurrentUser = do
mayHeaderVal <- header "Authorization"
runExceptT $ do
headerVal <- ExceptT $ pure mayHeaderVal `orThrow` TokenErrorNotFound
let token = toStrict $ drop 6 headerVal
ExceptT $ lift $ resolveToken token
optionalUser :: (Service m) => ActionT LText m (Maybe CurrentUser)
optionalUser =
either (const Nothing) Just <$> getCurrentUser
requireUser :: (Service m) => ActionT LText m CurrentUser
requireUser = do
result <- getCurrentUser
stopIfError tokenErrorHandler (pure result)
where
tokenErrorHandler e = do
status status401
json e
| null | https://raw.githubusercontent.com/eckyputrady/haskell-scotty-realworld-example-app/366a1eec021fb1bfcbc2d8e0485b59cbedba10e5/src/Feature/Auth/HTTP.hs | haskell | module Feature.Auth.HTTP where
import ClassyPrelude
import Feature.Auth.Types
import Feature.Common.Util (orThrow)
import Feature.Common.HTTP
import Control.Monad.Except
import Web.Scotty.Trans
import Network.HTTP.Types.Status
class Monad m => Service m where
resolveToken :: Token -> m (Either TokenError CurrentUser)
getCurrentUser :: (Service m) => ActionT LText m (Either TokenError CurrentUser)
getCurrentUser = do
mayHeaderVal <- header "Authorization"
runExceptT $ do
headerVal <- ExceptT $ pure mayHeaderVal `orThrow` TokenErrorNotFound
let token = toStrict $ drop 6 headerVal
ExceptT $ lift $ resolveToken token
optionalUser :: (Service m) => ActionT LText m (Maybe CurrentUser)
optionalUser =
either (const Nothing) Just <$> getCurrentUser
requireUser :: (Service m) => ActionT LText m CurrentUser
requireUser = do
result <- getCurrentUser
stopIfError tokenErrorHandler (pure result)
where
tokenErrorHandler e = do
status status401
json e
|
|
462f2733d612f76bb600e3f0a7262e30636131f82bb73f3af3773e7e74b157dd | clj-commons/useful | compress.clj | (ns flatland.useful.compress
(:import [java.util.zip DeflaterOutputStream InflaterInputStream]
[java.io ByteArrayOutputStream ByteArrayInputStream]
[sun.misc BASE64Decoder BASE64Encoder]))
(defn smash [^String str]
(let [out (ByteArrayOutputStream.)]
(doto (DeflaterOutputStream. out)
(.write (.getBytes str))
(.finish))
(-> (BASE64Encoder.)
(.encodeBuffer (.toByteArray out)))))
(defn unsmash [^String str]
(let [bytes (-> (BASE64Decoder.) (.decodeBuffer str))
in (ByteArrayInputStream. bytes)]
(slurp (InflaterInputStream. in))))
| null | https://raw.githubusercontent.com/clj-commons/useful/dc5cdebf8983a2e2ea24ec8951fbb4dfb037da45/src/flatland/useful/compress.clj | clojure | (ns flatland.useful.compress
(:import [java.util.zip DeflaterOutputStream InflaterInputStream]
[java.io ByteArrayOutputStream ByteArrayInputStream]
[sun.misc BASE64Decoder BASE64Encoder]))
(defn smash [^String str]
(let [out (ByteArrayOutputStream.)]
(doto (DeflaterOutputStream. out)
(.write (.getBytes str))
(.finish))
(-> (BASE64Encoder.)
(.encodeBuffer (.toByteArray out)))))
(defn unsmash [^String str]
(let [bytes (-> (BASE64Decoder.) (.decodeBuffer str))
in (ByteArrayInputStream. bytes)]
(slurp (InflaterInputStream. in))))
|
|
33abfd04b6589ec11f857e8a5eb89091243f5bad228081b22946d8ff5a03e201 | avras/nsime | nsime_ipv4_header.erl | %%
Copyright ( C ) 2012 < >
%%
%% This file is part of nsime.
%%
nsime is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
%% (at your option) any later version.
%%
%% nsime is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%% GNU General Public License for more details.
%%
You should have received a copy of the GNU General Public License
%% along with nsime. If not, see </>.
%%
Purpose : IPv4 header module
Author :
-module(nsime_ipv4_header).
-author("Saravanan Vijayakumaran").
-include("nsime_ipv4_header.hrl").
-export([serialize/1, deserialize/1, enable_checksum/1,
set_payload_size/2, get_payload_size/1,
set_identification/2, get_identification/1,
set_tos/2, get_tos/1, set_dscp/2, get_dscp/1,
set_ecn/2, get_ecn/1, set_ttl/2, get_ttl/1,
set_more_fragments/1, set_last_fragment/1,
set_dont_fragment/1, set_may_fragment/1,
is_last_fragment/1, is_dont_fragment/1,
set_fragment_offset/2, get_fragment_offset/1,
set_protocol/2, get_protocol/1, is_checksum_ok/1,
set_source_address/2, get_source_address/1,
set_destination_address/2, get_destination_address/1]).
serialize(Header) ->
SrcAddress = binary:list_to_bin(
tuple_to_list(
Header#nsime_ipv4_header.source_address
)
),
DestAddress = binary:list_to_bin(
tuple_to_list(
Header#nsime_ipv4_header.destination_address
)
),
HeaderBinWithoutChecksum =
<<
?IP_VERSION:4,
(Header#nsime_ipv4_header.header_length):4,
(Header#nsime_ipv4_header.tos):8,
(Header#nsime_ipv4_header.total_length):16,
(Header#nsime_ipv4_header.identification):16,
(Header#nsime_ipv4_header.flags):3,
(Header#nsime_ipv4_header.fragment_offset):13,
(Header#nsime_ipv4_header.ttl):8,
(Header#nsime_ipv4_header.protocol):8,
0:16,
SrcAddress/binary,
DestAddress/binary
>>,
case Header#nsime_ipv4_header.calculate_checksum of
false ->
HeaderBinWithoutChecksum;
true ->
<<HeaderBeforeChecksum:80, 0:16, HeaderAfterChecksum:64>>
= HeaderBinWithoutChecksum,
Checksum = calculate_header_checksum(HeaderBinWithoutChecksum),
<<HeaderBeforeChecksum:80, Checksum:16, HeaderAfterChecksum:64>>
end.
deserialize(HeaderBinary) ->
<<
?IP_VERSION:4, HL:4, TOS:8,
TotalLength:16, Id:16, Flags:3, FragmentOffset:13,
TTL:8, Protocol:8, HeaderChecksum:16,
SrcAddress:32, DestAddress:32
>> = HeaderBinary,
#nsime_ipv4_header{
header_length = HL,
tos = TOS,
total_length = TotalLength,
identification = Id,
flags = Flags,
fragment_offset = FragmentOffset,
ttl = TTL,
protocol = Protocol,
checksum = HeaderChecksum,
source_address = list_to_tuple(binary:bin_to_list(<<SrcAddress:32>>)),
destination_address = list_to_tuple(binary:bin_to_list(<<DestAddress:32>>)),
checksum_correct = (calculate_header_checksum(HeaderBinary) == 0)
}.
enable_checksum(Header) ->
Header#nsime_ipv4_header{
calculate_checksum = true
}.
set_payload_size(Header, PayloadSize) ->
HeaderLength = Header#nsime_ipv4_header.header_length,
Header#nsime_ipv4_header{
total_length = HeaderLength*4 + PayloadSize
}.
get_payload_size(Header) ->
Header#nsime_ipv4_header.total_length -
4*Header#nsime_ipv4_header.header_length.
set_identification(Header, Id) ->
Header#nsime_ipv4_header{
identification = Id
}.
get_identification(Header) ->
Header#nsime_ipv4_header.identification.
set_tos(Header, TOS) ->
Header#nsime_ipv4_header{
tos = TOS
}.
get_tos(Header) ->
Header#nsime_ipv4_header.tos.
set_dscp(Header, DSCP) ->
TOS = Header#nsime_ipv4_header.tos,
<<NewTOS:8>> = <<DSCP:6, TOS:2>>,
Header#nsime_ipv4_header{
tos = NewTOS
}.
get_dscp(Header) ->
<<DSCP:6, _:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
DSCP.
set_ecn(Header, ECN) ->
<<DSCP:6, _:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
<<NewTOS:8>> = <<DSCP:6, ECN:2>>,
Header#nsime_ipv4_header{
tos = NewTOS
}.
get_ecn(Header) ->
<<_:6, ECN:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
ECN.
set_ttl(Header, TTL) ->
Header#nsime_ipv4_header{
ttl = TTL
}.
get_ttl(Header) ->
Header#nsime_ipv4_header.ttl.
set_more_fragments(Header) ->
Flags = Header#nsime_ipv4_header.flags,
Header#nsime_ipv4_header{
flags = Flags bor ?MORE_FRAGMENTS
}.
set_last_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
<<Mask:3>> = <<bnot ?MORE_FRAGMENTS:3>>,
Header#nsime_ipv4_header{
flags = Flags band Mask
}.
set_dont_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
Header#nsime_ipv4_header{
flags = Flags bor ?DONT_FRAGMENT
}.
set_may_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
<<Mask:3>> = <<bnot ?DONT_FRAGMENT:3>>,
Header#nsime_ipv4_header{
flags = Flags band Mask
}.
is_last_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
(Flags band ?MORE_FRAGMENTS) == 0.
is_dont_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
(Flags band ?DONT_FRAGMENT) == 1.
set_fragment_offset(Header, FragmentOffset) ->
Header#nsime_ipv4_header{
fragment_offset = FragmentOffset
}.
get_fragment_offset(Header) ->
Header#nsime_ipv4_header.fragment_offset.
set_protocol(Header, Protocol) ->
Header#nsime_ipv4_header{
protocol = Protocol
}.
get_protocol(Header) ->
Header#nsime_ipv4_header.protocol.
is_checksum_ok(Header) ->
Header#nsime_ipv4_header.checksum_correct.
set_source_address(Header, SrcAddress) ->
Header#nsime_ipv4_header{
source_address = SrcAddress
}.
get_source_address(Header) ->
Header#nsime_ipv4_header.source_address.
set_destination_address(Header, DestAddress) ->
Header#nsime_ipv4_header{
destination_address = DestAddress
}.
get_destination_address(Header) ->
Header#nsime_ipv4_header.destination_address.
%% Helper methods %%
calculate_header_checksum(HeaderBinary) ->
<<A1:16, A2:16, A3:16, A4:16, A5:16, A6:16, A7:16, A8:16, A9:16, A10:16>> = HeaderBinary,
Sum = A1+A2+A3+A4+A5+A6+A7+A8+A9+A10,
<<Checksum:16>> = <<bnot((Sum band 65535) + (Sum bsr 16)):16>>,
Checksum.
| null | https://raw.githubusercontent.com/avras/nsime/fc5c164272aa649541bb3895d9f4bea34f45beec/src/nsime_ipv4_header.erl | erlang |
This file is part of nsime.
(at your option) any later version.
nsime is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with nsime. If not, see </>.
Helper methods %% | Copyright ( C ) 2012 < >
nsime is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Purpose : IPv4 header module
Author :
-module(nsime_ipv4_header).
-author("Saravanan Vijayakumaran").
-include("nsime_ipv4_header.hrl").
-export([serialize/1, deserialize/1, enable_checksum/1,
set_payload_size/2, get_payload_size/1,
set_identification/2, get_identification/1,
set_tos/2, get_tos/1, set_dscp/2, get_dscp/1,
set_ecn/2, get_ecn/1, set_ttl/2, get_ttl/1,
set_more_fragments/1, set_last_fragment/1,
set_dont_fragment/1, set_may_fragment/1,
is_last_fragment/1, is_dont_fragment/1,
set_fragment_offset/2, get_fragment_offset/1,
set_protocol/2, get_protocol/1, is_checksum_ok/1,
set_source_address/2, get_source_address/1,
set_destination_address/2, get_destination_address/1]).
serialize(Header) ->
SrcAddress = binary:list_to_bin(
tuple_to_list(
Header#nsime_ipv4_header.source_address
)
),
DestAddress = binary:list_to_bin(
tuple_to_list(
Header#nsime_ipv4_header.destination_address
)
),
HeaderBinWithoutChecksum =
<<
?IP_VERSION:4,
(Header#nsime_ipv4_header.header_length):4,
(Header#nsime_ipv4_header.tos):8,
(Header#nsime_ipv4_header.total_length):16,
(Header#nsime_ipv4_header.identification):16,
(Header#nsime_ipv4_header.flags):3,
(Header#nsime_ipv4_header.fragment_offset):13,
(Header#nsime_ipv4_header.ttl):8,
(Header#nsime_ipv4_header.protocol):8,
0:16,
SrcAddress/binary,
DestAddress/binary
>>,
case Header#nsime_ipv4_header.calculate_checksum of
false ->
HeaderBinWithoutChecksum;
true ->
<<HeaderBeforeChecksum:80, 0:16, HeaderAfterChecksum:64>>
= HeaderBinWithoutChecksum,
Checksum = calculate_header_checksum(HeaderBinWithoutChecksum),
<<HeaderBeforeChecksum:80, Checksum:16, HeaderAfterChecksum:64>>
end.
deserialize(HeaderBinary) ->
<<
?IP_VERSION:4, HL:4, TOS:8,
TotalLength:16, Id:16, Flags:3, FragmentOffset:13,
TTL:8, Protocol:8, HeaderChecksum:16,
SrcAddress:32, DestAddress:32
>> = HeaderBinary,
#nsime_ipv4_header{
header_length = HL,
tos = TOS,
total_length = TotalLength,
identification = Id,
flags = Flags,
fragment_offset = FragmentOffset,
ttl = TTL,
protocol = Protocol,
checksum = HeaderChecksum,
source_address = list_to_tuple(binary:bin_to_list(<<SrcAddress:32>>)),
destination_address = list_to_tuple(binary:bin_to_list(<<DestAddress:32>>)),
checksum_correct = (calculate_header_checksum(HeaderBinary) == 0)
}.
enable_checksum(Header) ->
Header#nsime_ipv4_header{
calculate_checksum = true
}.
set_payload_size(Header, PayloadSize) ->
HeaderLength = Header#nsime_ipv4_header.header_length,
Header#nsime_ipv4_header{
total_length = HeaderLength*4 + PayloadSize
}.
get_payload_size(Header) ->
Header#nsime_ipv4_header.total_length -
4*Header#nsime_ipv4_header.header_length.
set_identification(Header, Id) ->
Header#nsime_ipv4_header{
identification = Id
}.
get_identification(Header) ->
Header#nsime_ipv4_header.identification.
set_tos(Header, TOS) ->
Header#nsime_ipv4_header{
tos = TOS
}.
get_tos(Header) ->
Header#nsime_ipv4_header.tos.
set_dscp(Header, DSCP) ->
TOS = Header#nsime_ipv4_header.tos,
<<NewTOS:8>> = <<DSCP:6, TOS:2>>,
Header#nsime_ipv4_header{
tos = NewTOS
}.
get_dscp(Header) ->
<<DSCP:6, _:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
DSCP.
set_ecn(Header, ECN) ->
<<DSCP:6, _:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
<<NewTOS:8>> = <<DSCP:6, ECN:2>>,
Header#nsime_ipv4_header{
tos = NewTOS
}.
get_ecn(Header) ->
<<_:6, ECN:2>> = <<(Header#nsime_ipv4_header.tos):8>>,
ECN.
set_ttl(Header, TTL) ->
Header#nsime_ipv4_header{
ttl = TTL
}.
get_ttl(Header) ->
Header#nsime_ipv4_header.ttl.
set_more_fragments(Header) ->
Flags = Header#nsime_ipv4_header.flags,
Header#nsime_ipv4_header{
flags = Flags bor ?MORE_FRAGMENTS
}.
set_last_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
<<Mask:3>> = <<bnot ?MORE_FRAGMENTS:3>>,
Header#nsime_ipv4_header{
flags = Flags band Mask
}.
set_dont_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
Header#nsime_ipv4_header{
flags = Flags bor ?DONT_FRAGMENT
}.
set_may_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
<<Mask:3>> = <<bnot ?DONT_FRAGMENT:3>>,
Header#nsime_ipv4_header{
flags = Flags band Mask
}.
is_last_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
(Flags band ?MORE_FRAGMENTS) == 0.
is_dont_fragment(Header) ->
Flags = Header#nsime_ipv4_header.flags,
(Flags band ?DONT_FRAGMENT) == 1.
set_fragment_offset(Header, FragmentOffset) ->
Header#nsime_ipv4_header{
fragment_offset = FragmentOffset
}.
get_fragment_offset(Header) ->
Header#nsime_ipv4_header.fragment_offset.
set_protocol(Header, Protocol) ->
Header#nsime_ipv4_header{
protocol = Protocol
}.
get_protocol(Header) ->
Header#nsime_ipv4_header.protocol.
is_checksum_ok(Header) ->
Header#nsime_ipv4_header.checksum_correct.
set_source_address(Header, SrcAddress) ->
Header#nsime_ipv4_header{
source_address = SrcAddress
}.
get_source_address(Header) ->
Header#nsime_ipv4_header.source_address.
set_destination_address(Header, DestAddress) ->
Header#nsime_ipv4_header{
destination_address = DestAddress
}.
get_destination_address(Header) ->
Header#nsime_ipv4_header.destination_address.
calculate_header_checksum(HeaderBinary) ->
<<A1:16, A2:16, A3:16, A4:16, A5:16, A6:16, A7:16, A8:16, A9:16, A10:16>> = HeaderBinary,
Sum = A1+A2+A3+A4+A5+A6+A7+A8+A9+A10,
<<Checksum:16>> = <<bnot((Sum band 65535) + (Sum bsr 16)):16>>,
Checksum.
|
0fce15272e1ff847c17c0d3c79f38f49fbf45a79ccb8079c5ceffcd9736a8efd | nasa/PRECiSA | DecisionPathTest.hs | -- Notices:
--
Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
-- Disclaimers
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
module Common.DecisionPathTest where
import Common.DecisionPath hiding (root)
import qualified Common.DecisionPath as DP
import Test.Tasty
import Test.Tasty.HUnit
testCommonDecisionPath :: TestTree
testCommonDecisionPath = testGroup "Common.DecisionPath"
[testDecisionPath
]
root = DP.root :: LDecisionPath
testDecisionPath = testGroup "DecisionPath" [
maxCommonPrefix__tests,
maxCommonPrefixOfList__tests,
isPrefix__tests,
isPrefixInList__tests,
existsPrefixInList__tests
]
maxCommonPrefix__tests = testGroup "maxCommonPrefix tests"
[maxCommonPrefix__test1
,maxCommonPrefix__test2
,maxCommonPrefix__test3
]
maxCommonPrefix__test1 = testCase "0100 /\\ 0000 = 0" $
maxCommonPrefix dp1 dp2 @?= root ~> 0
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 0 ~> 0 ~> 0
maxCommonPrefix__test2 = testCase "0100 /\\ 0101 = 010" $
maxCommonPrefix dp1 dp2 @?= root ~> 0 ~> 1 ~> 0
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 1 ~> 0 ~> 1
maxCommonPrefix__test3 = testCase "0100 /\\ 011 = 01" $
maxCommonPrefix dp1 dp2 @?= root ~> 0 ~> 1
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 1 ~> 1
maxCommonPrefixOfList__tests = testGroup "maxCommonPrefixOfList tests" [
maxCommonPrefixOfList__test1,
maxCommonPrefixOfList__test2
]
maxCommonPrefixOfList__test1 = testCase "[0110,0100,0111] = 01" $
maxCommonPrefixOfList dpList @?= root ~> 0 ~> 1
where
dpList = [
root ~> 0 ~> 1 ~> 1 ~> 0,
root ~> 0 ~> 1 ~> 0 ~> 0,
root ~> 0 ~> 1 ~> 1 ~> 1
]
maxCommonPrefixOfList__test2 = testCase "root = root" $
maxCommonPrefixOfList dpList @?= root
where
dpList = []
isPrefix__tests = testGroup "isPrefix tests"
[isPrefix__test1
,isPrefix__test2
,isPrefix__test3
,isPrefix__test4
]
isPrefix__test1 = testCase "101 `isPrefix` 1011" $
(dp1 `isPrefix` dp2) @?= True
where
dp1 = root ~> 1 ~> 0 ~> 1
dp2 = root ~> 1 ~> 0 ~> 1 ~> 1
isPrefix__test2 = testCase "not $ 101 `isPrefix` 100" $
(dp1 `isPrefix` dp2) @?= False
where
dp1 = root ~> 1 ~> 0 ~> 1
dp2 = root ~> 1 ~> 0 ~> 0
isPrefix__test3 = testCase "root `isPrefix` 100" $
(dp1 `isPrefix` dp2) @?= True
where
dp1 = root
dp2 = root ~> 1 ~> 0 ~> 0
isPrefix__test4 = testCase "root `isPrefix` root" $
(root `isPrefix` root) @?= True
isPrefixInList__tests = testGroup "isPrefixInList tests"
[isPrefixInList__test1
,isPrefixInList__test2
,isPrefixInList__test3
,isPrefixInList__test4
,isPrefixInList__test5
]
isPrefixInList__test1 = testCase "0 `isPrefixInList` [10,01]" $
(dp `isPrefixInList` dpList) @?= True
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1]
isPrefixInList__test2 = testCase "not $ 0 `isPrefixInList` [10,11]" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 1 ~> 1]
isPrefixInList__test3 = testCase "not $ 0 `isPrefixInList` []" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = []
isPrefixInList__test4 = testCase "not $ root `isPrefixInList` []" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root
dpList = []
isPrefixInList__test5 = testCase "root `isPrefixInList` [10,01]" $
(dp `isPrefixInList` dpList) @?= True
where
dp = root
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1]
existsPrefixInList__tests = testGroup "existsPrefixInList tests"
[existsPrefixInList__test1
,existsPrefixInList__test2
,existsPrefixInList__test3
,existsPrefixInList__test4
,existsPrefixInList__test5
,existsPrefixInList__test6
]
existsPrefixInList__test1 = testCase "011 `existsPrefixInList` [10,0]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root ~> 0 ~> 1 ~> 1
dpList = [root ~> 1 ~> 0, root ~> 0 ]
existsPrefixInList__test2 = testCase "not $ 0 `existsPrefixInList` [10,11]" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 1 ~> 1]
existsPrefixInList__test3 = testCase "not $ 0 `existsPrefixInList` []" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = []
existsPrefixInList__test4 = testCase "not $ root `existsPrefixInList` []" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root
dpList = []
existsPrefixInList__test5 = testCase "root `existsPrefixInList` [root,01]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root
dpList = [root, root ~> 0 ~> 1]
existsPrefixInList__test6 = testCase "not $ root `existsPrefixInList` [10,011]" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1 ~> 1]
existsPrefixInList__test7 = testCase "1 `existsPrefixInList` [root,01]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root ~> 0
dpList = [root, root ~> 0 ~> 1]
| null | https://raw.githubusercontent.com/nasa/PRECiSA/91e1e7543c5888ad5fb123d3462f71d085b99741/PRECiSA/tests/Common/DecisionPathTest.hs | haskell | Notices:
Disclaimers | Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
module Common.DecisionPathTest where
import Common.DecisionPath hiding (root)
import qualified Common.DecisionPath as DP
import Test.Tasty
import Test.Tasty.HUnit
testCommonDecisionPath :: TestTree
testCommonDecisionPath = testGroup "Common.DecisionPath"
[testDecisionPath
]
root = DP.root :: LDecisionPath
testDecisionPath = testGroup "DecisionPath" [
maxCommonPrefix__tests,
maxCommonPrefixOfList__tests,
isPrefix__tests,
isPrefixInList__tests,
existsPrefixInList__tests
]
maxCommonPrefix__tests = testGroup "maxCommonPrefix tests"
[maxCommonPrefix__test1
,maxCommonPrefix__test2
,maxCommonPrefix__test3
]
maxCommonPrefix__test1 = testCase "0100 /\\ 0000 = 0" $
maxCommonPrefix dp1 dp2 @?= root ~> 0
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 0 ~> 0 ~> 0
maxCommonPrefix__test2 = testCase "0100 /\\ 0101 = 010" $
maxCommonPrefix dp1 dp2 @?= root ~> 0 ~> 1 ~> 0
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 1 ~> 0 ~> 1
maxCommonPrefix__test3 = testCase "0100 /\\ 011 = 01" $
maxCommonPrefix dp1 dp2 @?= root ~> 0 ~> 1
where
dp1 = root ~> 0 ~> 1 ~> 0 ~> 0
dp2 = root ~> 0 ~> 1 ~> 1
maxCommonPrefixOfList__tests = testGroup "maxCommonPrefixOfList tests" [
maxCommonPrefixOfList__test1,
maxCommonPrefixOfList__test2
]
maxCommonPrefixOfList__test1 = testCase "[0110,0100,0111] = 01" $
maxCommonPrefixOfList dpList @?= root ~> 0 ~> 1
where
dpList = [
root ~> 0 ~> 1 ~> 1 ~> 0,
root ~> 0 ~> 1 ~> 0 ~> 0,
root ~> 0 ~> 1 ~> 1 ~> 1
]
maxCommonPrefixOfList__test2 = testCase "root = root" $
maxCommonPrefixOfList dpList @?= root
where
dpList = []
isPrefix__tests = testGroup "isPrefix tests"
[isPrefix__test1
,isPrefix__test2
,isPrefix__test3
,isPrefix__test4
]
isPrefix__test1 = testCase "101 `isPrefix` 1011" $
(dp1 `isPrefix` dp2) @?= True
where
dp1 = root ~> 1 ~> 0 ~> 1
dp2 = root ~> 1 ~> 0 ~> 1 ~> 1
isPrefix__test2 = testCase "not $ 101 `isPrefix` 100" $
(dp1 `isPrefix` dp2) @?= False
where
dp1 = root ~> 1 ~> 0 ~> 1
dp2 = root ~> 1 ~> 0 ~> 0
isPrefix__test3 = testCase "root `isPrefix` 100" $
(dp1 `isPrefix` dp2) @?= True
where
dp1 = root
dp2 = root ~> 1 ~> 0 ~> 0
isPrefix__test4 = testCase "root `isPrefix` root" $
(root `isPrefix` root) @?= True
isPrefixInList__tests = testGroup "isPrefixInList tests"
[isPrefixInList__test1
,isPrefixInList__test2
,isPrefixInList__test3
,isPrefixInList__test4
,isPrefixInList__test5
]
isPrefixInList__test1 = testCase "0 `isPrefixInList` [10,01]" $
(dp `isPrefixInList` dpList) @?= True
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1]
isPrefixInList__test2 = testCase "not $ 0 `isPrefixInList` [10,11]" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 1 ~> 1]
isPrefixInList__test3 = testCase "not $ 0 `isPrefixInList` []" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = []
isPrefixInList__test4 = testCase "not $ root `isPrefixInList` []" $
(dp `isPrefixInList` dpList) @?= False
where
dp = root
dpList = []
isPrefixInList__test5 = testCase "root `isPrefixInList` [10,01]" $
(dp `isPrefixInList` dpList) @?= True
where
dp = root
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1]
existsPrefixInList__tests = testGroup "existsPrefixInList tests"
[existsPrefixInList__test1
,existsPrefixInList__test2
,existsPrefixInList__test3
,existsPrefixInList__test4
,existsPrefixInList__test5
,existsPrefixInList__test6
]
existsPrefixInList__test1 = testCase "011 `existsPrefixInList` [10,0]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root ~> 0 ~> 1 ~> 1
dpList = [root ~> 1 ~> 0, root ~> 0 ]
existsPrefixInList__test2 = testCase "not $ 0 `existsPrefixInList` [10,11]" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = [root ~> 1 ~> 0, root ~> 1 ~> 1]
existsPrefixInList__test3 = testCase "not $ 0 `existsPrefixInList` []" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root ~> 0
dpList = []
existsPrefixInList__test4 = testCase "not $ root `existsPrefixInList` []" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root
dpList = []
existsPrefixInList__test5 = testCase "root `existsPrefixInList` [root,01]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root
dpList = [root, root ~> 0 ~> 1]
existsPrefixInList__test6 = testCase "not $ root `existsPrefixInList` [10,011]" $
(dp `existsPrefixInList` dpList) @?= False
where
dp = root
dpList = [root ~> 1 ~> 0, root ~> 0 ~> 1 ~> 1]
existsPrefixInList__test7 = testCase "1 `existsPrefixInList` [root,01]" $
(dp `existsPrefixInList` dpList) @?= True
where
dp = root ~> 0
dpList = [root, root ~> 0 ~> 1]
|
b326bcbd8e7c8f27d0e2b46d98247636e7bdc578d98d10de4ee0dd2aaa3aa533 | bryal/carth | Inferred.hs | # LANGUAGE TemplateHaskell , DataKinds #
-- TODO: Can this and Checked be merged to a single, parametrized AST?
| Type annotated AST as a result of typechecking
module Front.Inferred (module Front.Inferred, Type, TConst, WithPos(..), TVar(..), TPrim(..), Const(..), Type' (..), TConst') where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import Data.Bifunctor
import Lens.Micro.Platform (makeLenses)
import Misc
import qualified Front.Parsed as Parsed
import Front.Parsed (Type, TConst, TVar(..), Const(..))
import Front.SrcPos
import Front.TypeAst
data TypeErr
= MainNotDefined
| InvalidUserTypeSig SrcPos Scheme Scheme
| CtorArityMismatch SrcPos String Int Int
| ConflictingPatVarDefs SrcPos String
| UndefCtor SrcPos String
| UndefVar SrcPos String
| InfType SrcPos Type Type TVar Type
| UnificationFailed SrcPos Type Type Type Type
| ConflictingTypeDef SrcPos String
| ConflictingCtorDef SrcPos String
| RedundantCase SrcPos
| InexhaustivePats SrcPos String
| ExternNotMonomorphic (Parsed.Id 'Parsed.Small) TVar
| FoundHole SrcPos
| RecTypeDef String SrcPos
| UndefType SrcPos String
| WrongMainType SrcPos Parsed.Scheme
| RecursiveVarDef (WithPos String)
| TypeInstArityMismatch SrcPos String Int Int
| ConflictingVarDef SrcPos String
| NoClassInstance SrcPos ClassConstraint
| FunCaseArityMismatch SrcPos Int Int
| FunArityMismatch SrcPos Int Int
| DeBruijnIndexOutOfRange SrcPos Word
| FreeVarsInData SrcPos TVar
| FreeVarsInAlias SrcPos TVar
deriving (Show)
type ClassConstraint = (String, [Type])
data Scheme = Forall
{ _scmParams :: Set TVar
, _scmConstraints :: Set ClassConstraint
, _scmBody :: Type
}
deriving (Show, Eq)
makeLenses ''Scheme
data TypedVar = TypedVar String Type
deriving (Show, Eq, Ord)
type VariantIx = Integer
type Span = Integer
data Variant = VariantIx VariantIx | VariantStr String
deriving (Show, Eq, Ord)
data Con = Con
{ variant :: Variant
, span :: Span
, argTs :: [Type]
}
deriving Show
data Pat'
= PVar TypedVar
| PWild
| PCon Con [Pat]
| PBox Pat
deriving Show
data Pat = Pat SrcPos Type Pat'
deriving Show
type Fun = ([(String, Type)], (Expr, Type))
type Cases = [(WithPos [Pat], Expr)]
type Match = WithPos ([Expr], Cases, [Type], Type)
| Whether a Var refers to a builtin virtual , or a global / local definition . So we do n't
-- have to keep as much state about environment definitions in later passes.
data Virt = Virt | NonVirt deriving (Show, Eq)
type Var = (Virt, TypedVar)
data Expr
= Lit Const
| Var Var
| App Expr [Expr] Type
| If Expr Expr Expr
| Let Def Expr
| Fun Fun
| Match Match
| Ctor VariantIx Span TConst [Type]
| Sizeof Type
deriving Show
type Defs = TopologicalOrder Def
data Def = VarDef VarDef | RecDefs RecDefs deriving Show
type VarDef = (String, (Scheme, Expr))
type RecDefs = [(String, (Scheme, Fun))]
data TypeDefRhs = Data [(WithPos String, [Type])] | Alias SrcPos Type
deriving Show
type TypeDefs = Map String ([TVar], TypeDefRhs)
type TypeAliases = Map String ([TVar], Type)
type Ctors = Map String (VariantIx, (String, [TVar]), [Type], Span)
type Externs = Map String Type
instance Eq Con where
(==) (Con c1 _ _) (Con c2 _ _) = c1 == c2
instance Ord Con where
compare (Con c1 _ _) (Con c2 _ _) = compare c1 c2
ftv :: Type -> Set TVar
ftv = \case
TVar tv -> Set.singleton tv
TPrim _ -> Set.empty
TFun pts rt -> Set.unions (ftv rt : map ftv pts)
TBox t -> ftv t
TConst (_, ts) -> Set.unions (map ftv ts)
defSigs :: Def -> [(String, Scheme)]
defSigs = \case
VarDef d -> [defSig d]
RecDefs ds -> map defSig ds
defSig :: (String, (Scheme, a)) -> (String, Scheme)
defSig = second fst
| null | https://raw.githubusercontent.com/bryal/carth/0c6026c82ce8ceb1a621c15a0e7505c4e6bc8782/src/Front/Inferred.hs | haskell | TODO: Can this and Checked be merged to a single, parametrized AST?
have to keep as much state about environment definitions in later passes. | # LANGUAGE TemplateHaskell , DataKinds #
| Type annotated AST as a result of typechecking
module Front.Inferred (module Front.Inferred, Type, TConst, WithPos(..), TVar(..), TPrim(..), Const(..), Type' (..), TConst') where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import Data.Bifunctor
import Lens.Micro.Platform (makeLenses)
import Misc
import qualified Front.Parsed as Parsed
import Front.Parsed (Type, TConst, TVar(..), Const(..))
import Front.SrcPos
import Front.TypeAst
data TypeErr
= MainNotDefined
| InvalidUserTypeSig SrcPos Scheme Scheme
| CtorArityMismatch SrcPos String Int Int
| ConflictingPatVarDefs SrcPos String
| UndefCtor SrcPos String
| UndefVar SrcPos String
| InfType SrcPos Type Type TVar Type
| UnificationFailed SrcPos Type Type Type Type
| ConflictingTypeDef SrcPos String
| ConflictingCtorDef SrcPos String
| RedundantCase SrcPos
| InexhaustivePats SrcPos String
| ExternNotMonomorphic (Parsed.Id 'Parsed.Small) TVar
| FoundHole SrcPos
| RecTypeDef String SrcPos
| UndefType SrcPos String
| WrongMainType SrcPos Parsed.Scheme
| RecursiveVarDef (WithPos String)
| TypeInstArityMismatch SrcPos String Int Int
| ConflictingVarDef SrcPos String
| NoClassInstance SrcPos ClassConstraint
| FunCaseArityMismatch SrcPos Int Int
| FunArityMismatch SrcPos Int Int
| DeBruijnIndexOutOfRange SrcPos Word
| FreeVarsInData SrcPos TVar
| FreeVarsInAlias SrcPos TVar
deriving (Show)
type ClassConstraint = (String, [Type])
data Scheme = Forall
{ _scmParams :: Set TVar
, _scmConstraints :: Set ClassConstraint
, _scmBody :: Type
}
deriving (Show, Eq)
makeLenses ''Scheme
data TypedVar = TypedVar String Type
deriving (Show, Eq, Ord)
type VariantIx = Integer
type Span = Integer
data Variant = VariantIx VariantIx | VariantStr String
deriving (Show, Eq, Ord)
data Con = Con
{ variant :: Variant
, span :: Span
, argTs :: [Type]
}
deriving Show
data Pat'
= PVar TypedVar
| PWild
| PCon Con [Pat]
| PBox Pat
deriving Show
data Pat = Pat SrcPos Type Pat'
deriving Show
type Fun = ([(String, Type)], (Expr, Type))
type Cases = [(WithPos [Pat], Expr)]
type Match = WithPos ([Expr], Cases, [Type], Type)
| Whether a Var refers to a builtin virtual , or a global / local definition . So we do n't
data Virt = Virt | NonVirt deriving (Show, Eq)
type Var = (Virt, TypedVar)
data Expr
= Lit Const
| Var Var
| App Expr [Expr] Type
| If Expr Expr Expr
| Let Def Expr
| Fun Fun
| Match Match
| Ctor VariantIx Span TConst [Type]
| Sizeof Type
deriving Show
type Defs = TopologicalOrder Def
data Def = VarDef VarDef | RecDefs RecDefs deriving Show
type VarDef = (String, (Scheme, Expr))
type RecDefs = [(String, (Scheme, Fun))]
data TypeDefRhs = Data [(WithPos String, [Type])] | Alias SrcPos Type
deriving Show
type TypeDefs = Map String ([TVar], TypeDefRhs)
type TypeAliases = Map String ([TVar], Type)
type Ctors = Map String (VariantIx, (String, [TVar]), [Type], Span)
type Externs = Map String Type
instance Eq Con where
(==) (Con c1 _ _) (Con c2 _ _) = c1 == c2
instance Ord Con where
compare (Con c1 _ _) (Con c2 _ _) = compare c1 c2
ftv :: Type -> Set TVar
ftv = \case
TVar tv -> Set.singleton tv
TPrim _ -> Set.empty
TFun pts rt -> Set.unions (ftv rt : map ftv pts)
TBox t -> ftv t
TConst (_, ts) -> Set.unions (map ftv ts)
defSigs :: Def -> [(String, Scheme)]
defSigs = \case
VarDef d -> [defSig d]
RecDefs ds -> map defSig ds
defSig :: (String, (Scheme, a)) -> (String, Scheme)
defSig = second fst
|
72ec7a6c8daffad54bf24348d00d3456445c1dc0be1d92947d47115a598a5db2 | haroldcarr/learn-haskell-coq-ml-etc | P026_average.hs | module P026_average where
average :: String -> Double
average str =
let numWords = wordCount str
totalLength = sum (allLengths (words str))
in fromIntegral totalLength / fromIntegral numWords
where
wordCount :: String -> Int
wordCount = length . words
allLengths :: [String] -> [Int]
allLengths = map length
showAverage :: String -> String
showAverage str = "The average word length is: " ++ show (average str) ++ "\n"
p026 :: IO ()
p026 = repl "Enter a string: " showAverage
where
repl s f = do
putStr s
l <- getLine
putStrLn (showAverage l)
repl s f
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/idris/book/2017-Type_Driven_Development_with_Idris/src/P026_average.hs | haskell | module P026_average where
average :: String -> Double
average str =
let numWords = wordCount str
totalLength = sum (allLengths (words str))
in fromIntegral totalLength / fromIntegral numWords
where
wordCount :: String -> Int
wordCount = length . words
allLengths :: [String] -> [Int]
allLengths = map length
showAverage :: String -> String
showAverage str = "The average word length is: " ++ show (average str) ++ "\n"
p026 :: IO ()
p026 = repl "Enter a string: " showAverage
where
repl s f = do
putStr s
l <- getLine
putStrLn (showAverage l)
repl s f
|
|
13d4be3e5e018d012c55bfee3863b51101fbd0d02a1c57da91ca563a2a06612b | ekmett/ekmett.github.com | Annotation.hs | # LANGUAGE TypeOperators #
module Data.Rope.Annotation
( MonoidA(..)
, ReducerA(..)
, BreakableA(..)
) where
import Data.Rope (Rope)
class MonoidA f where
-- | build an empty 'Annotation'
emptyA :: f a
| append two annotations
appendA :: Rope -> f a -> Rope -> f b -> f c
class MonoidA f => ReducerA f where
-- | construct an 'Annotation' from a 'Rope' out of whole cloth
unitA :: Rope -> f a
-- | The 'Rope' has been updated to contains n more bytes on the right than the one used to build the 'Annotation', update the 'Annotation'
snocA :: Int -> Rope -> f a -> f b
-- | The 'Rope' contains n more bytes on the left than the one used to build the 'Annotation', update the 'Annotation'
consA :: Int -> Rope -> f a -> f b
class BreakableA f where
| split an ' Annotation ' about a ' Rope ' into two annotations , one about the first n bytes , the other about the remainder
splitAtA :: Int -> Rope -> f a -> (f b, f c)
-- | truncate the 'Annotation' to 'length' n
takeA :: Int -> Rope -> f a -> f b
| drop the first n bytes from the ' Annotation '
dropA :: Int -> Rope -> f a -> f b
takeA n r = fst . splitAtA n r
dropA n r = snd . splitAtA n r
| null | https://raw.githubusercontent.com/ekmett/ekmett.github.com/8d3abab5b66db631e148e1d046d18909bece5893/haskell/rope/Data/Rope/Annotation.hs | haskell | | build an empty 'Annotation'
| construct an 'Annotation' from a 'Rope' out of whole cloth
| The 'Rope' has been updated to contains n more bytes on the right than the one used to build the 'Annotation', update the 'Annotation'
| The 'Rope' contains n more bytes on the left than the one used to build the 'Annotation', update the 'Annotation'
| truncate the 'Annotation' to 'length' n | # LANGUAGE TypeOperators #
module Data.Rope.Annotation
( MonoidA(..)
, ReducerA(..)
, BreakableA(..)
) where
import Data.Rope (Rope)
class MonoidA f where
emptyA :: f a
| append two annotations
appendA :: Rope -> f a -> Rope -> f b -> f c
class MonoidA f => ReducerA f where
unitA :: Rope -> f a
snocA :: Int -> Rope -> f a -> f b
consA :: Int -> Rope -> f a -> f b
class BreakableA f where
| split an ' Annotation ' about a ' Rope ' into two annotations , one about the first n bytes , the other about the remainder
splitAtA :: Int -> Rope -> f a -> (f b, f c)
takeA :: Int -> Rope -> f a -> f b
| drop the first n bytes from the ' Annotation '
dropA :: Int -> Rope -> f a -> f b
takeA n r = fst . splitAtA n r
dropA n r = snd . splitAtA n r
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.