hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f743267d9229af5de9222ee84997d4cb78ad14a3 | 56,774 | exs | Elixir | lib/elixir/test/elixir/kernel/expansion_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/expansion_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/expansion_test.exs | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.ExpansionTarget do
defmacro seventeen, do: 17
defmacro bar, do: "bar"
end
defmodule Kernel.ExpansionTest do
use ExUnit.Case, async: false
describe "__block__" do
test "expands to nil when empty" do
assert expand(quote(do: unquote(:__block__)())) == nil
end
test "expands to argument when arity is 1" do
assert expand(quote(do: unquote(:__block__)(1))) == 1
end
test "is recursive to argument when arity is 1" do
expanded =
quote do
_ = 1
2
end
assert expand(quote(do: unquote(:__block__)(_ = 1, unquote(:__block__)(2)))) == expanded
end
test "accumulates vars" do
before_expansion =
quote do
a = 1
a
end
after_expansion =
quote do
a = 1
a
end
assert expand(before_expansion) == after_expansion
end
end
describe "alias" do
test "expand args, defines alias and returns itself" do
alias true, as: True
input = quote(do: alias(:hello, as: World, warn: True))
{output, env} = expand_env(input, __ENV__)
assert output == :hello
assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}]
end
test "invalid alias" do
message =
~r"invalid value for option :as, expected a simple alias, got nested alias: Sample.Lists"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(:lists, as: Sample.Lists)))
end
message = ~r"invalid argument for alias, expected a compile time atom or alias, got: 1 \+ 2"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(1 + 2)))
end
message = ~r"invalid value for option :as, expected an alias, got: :foobar"
assert_raise CompileError, message, fn ->
expand(quote(do: alias(:lists, as: :foobar)))
end
end
test "invalid expansion" do
assert_raise CompileError, ~r"invalid alias: \"foo\.Foo\"", fn ->
code =
quote do
foo = :foo
foo.Foo
end
expand(code)
end
end
test "raises if :as is passed to multi-alias aliases" do
assert_raise CompileError, ~r":as option is not supported by multi-alias call", fn ->
expand(quote(do: alias(Foo.{Bar, Baz}, as: BarBaz)))
end
end
test "invalid options" do
assert_raise CompileError, ~r"unsupported option :ops given to alias", fn ->
expand(quote(do: alias(Foo, ops: 1)))
end
end
end
describe "__aliases__" do
test "expands even if no alias" do
assert expand(quote(do: World)) == :"Elixir.World"
assert expand(quote(do: Elixir.World)) == :"Elixir.World"
end
test "expands with alias" do
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Hello"
end
test "expands with alias is recursive" do
alias Source, as: Hello
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Source"
end
end
describe "import" do
test "raises on invalid macro" do
message = ~r"cannot import Kernel.invalid/1 because it is undefined or private"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [invalid: 1])))
end
end
test "raises on invalid options" do
message = ~r"invalid :only option for import, expected a keyword list with integer values"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [invalid: nil])))
end
message = ~r"invalid :except option for import, expected a keyword list with integer values"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, except: [invalid: nil])))
end
message = ~r/invalid options for import, expected a keyword list, got: "invalid_options"/
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, "invalid_options")))
end
end
test "raises on conflicting options" do
message =
~r":only and :except can only be given together to import when :only is either :functions or :macros"
assert_raise CompileError, message, fn ->
expand(quote(do: import(Kernel, only: [], except: [])))
end
end
test "invalid import option" do
assert_raise CompileError, ~r"unsupported option :ops given to import", fn ->
expand(quote(do: import(:lists, ops: 1)))
end
end
test "raises for non-compile-time module" do
assert_raise CompileError, ~r"invalid argument for import, .*, got: {:a, :tuple}", fn ->
expand(quote(do: import({:a, :tuple})))
end
end
end
describe "require" do
test "raises for non-compile-time module" do
assert_raise CompileError, ~r"invalid argument for require, .*, got: {:a, :tuple}", fn ->
expand(quote(do: require({:a, :tuple})))
end
end
test "invalid options" do
assert_raise CompileError, ~r"unsupported option :ops given to require", fn ->
expand(quote(do: require(Foo, ops: 1)))
end
end
end
describe "=" do
test "sets context to match" do
assert expand(quote(do: __ENV__.context = :match)) == quote(do: :match = :match)
end
test "defines vars" do
{output, env} = expand_env(quote(do: a = 1), __ENV__)
assert output == quote(do: a = 1)
assert Macro.Env.has_var?(env, {:a, __MODULE__})
end
test "does not define _" do
{output, env} = expand_env(quote(do: _ = 1), __ENV__)
assert output == quote(do: _ = 1)
assert Macro.Env.vars(env) == []
end
end
describe "environment macros" do
test "__MODULE__" do
assert expand(quote(do: __MODULE__)) == __MODULE__
end
test "__DIR__" do
assert expand(quote(do: __DIR__)) == __DIR__
end
test "__ENV__" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__), env) == {Macro.escape(env), env}
end
test "__ENV__.accessor" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env}
assert expand_env(quote(do: __ENV__.unknown), env) ==
{quote(do: unquote(Macro.escape(env)).unknown), env}
end
end
describe "vars" do
test "expand to local call" do
{output, env} = expand_env(quote(do: a), __ENV__)
assert output == quote(do: a())
assert Macro.Env.vars(env) == []
end
test "forces variable to exist" do
code =
quote do
var!(a) = 1
var!(a)
end
assert expand(code)
message = ~r"expected \"a\" to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn ->
expand(quote(do: var!(a)))
end
message =
~r"expected \"a\" \(context Unknown\) to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn ->
expand(quote(do: var!(a, Unknown)))
end
end
test "raises for _ used outside of a match" do
assert_raise CompileError, ~r"invalid use of _", fn ->
expand(quote(do: {1, 2, _}))
end
end
end
describe "^" do
test "expands args" do
before_expansion =
quote do
after_expansion = 1
^after_expansion = 1
end
after_expansion =
quote do
after_expansion = 1
^after_expansion = 1
end
assert expand(before_expansion) == after_expansion
end
test "raises outside match" do
assert_raise CompileError, ~r"cannot use \^a outside of match clauses", fn ->
expand(quote(do: ^a))
end
end
test "raises without var" do
message =
~r"invalid argument for unary operator \^, expected an existing variable, got: \^1"
assert_raise CompileError, message, fn ->
expand(quote(do: ^1 = 1))
end
end
test "raises when the var is undefined" do
assert_raise CompileError, ~r"undefined variable \^foo", fn ->
expand(quote(do: ^foo = :foo))
end
end
end
describe "locals" do
test "expands to remote calls" do
assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} = expand(quote(do: a =~ b))
end
test "in matches" do
message = ~r"cannot invoke local foo/1 inside match, called as: foo\(:bar\)"
assert_raise CompileError, message, fn ->
expand(quote(do: foo(:bar) = :bar))
end
end
test "in guards" do
code = quote(do: fn pid when :erlang.==(pid, self) -> pid end)
expanded_code = quote(do: fn pid when :erlang.==(pid, :erlang.self()) -> pid end)
assert clean_meta(expand(code), [:import, :context]) == expanded_code
message = ~r"cannot invoke local foo/1 inside guard, called as: foo\(arg\)"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when foo(arg) -> arg end))
end
end
test "custom imports" do
before_expansion =
quote do
import Kernel.ExpansionTarget
seventeen()
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
17
end
assert expand(before_expansion) == after_expansion
end
end
describe "tuples" do
test "expanded as arguments" do
assert expand(quote(do: {after_expansion = 1, a})) == quote(do: {after_expansion = 1, a()})
assert expand(quote(do: {b, after_expansion = 1, a})) ==
quote(do: {b(), after_expansion = 1, a()})
end
end
describe "maps" do
test "expanded as arguments" do
assert expand(quote(do: %{a: after_expansion = 1, b: a})) ==
quote(do: %{a: after_expansion = 1, b: a()})
end
test "with variables on keys" do
assert expand(quote(do: %{(x = 1) => 1})) == quote(do: %{(x = 1) => 1})
assert_raise CompileError, ~r"illegal use of variable x inside map key match,", fn ->
expand(quote(do: %{x => 1} = %{}))
end
assert_raise CompileError, ~r"undefined variable \^x", fn ->
expand(quote(do: {x, %{^x => 1}} = %{}))
end
end
test "expects key-value pairs" do
assert_raise CompileError, ~r"expected key-value pairs in a map, got: :foo", fn ->
expand(quote(do: unquote({:%{}, [], [:foo]})))
end
end
end
defmodule User do
defstruct name: "", age: 0
end
describe "structs" do
test "expanded as arguments" do
assert expand(quote(do: %User{})) ==
quote(do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: ""})
assert expand(quote(do: %User{name: "john doe"})) ==
quote(do: %:"Elixir.Kernel.ExpansionTest.User"{age: 0, name: "john doe"})
end
test "expects atoms" do
expand(quote(do: %unknown{a: 1} = x))
message = ~r"expected struct name to be a compile time atom or alias"
assert_raise CompileError, message, fn ->
expand(quote(do: %unknown{a: 1}))
end
message = ~r"expected struct name to be a compile time atom or alias"
assert_raise CompileError, message, fn ->
expand(quote(do: %unquote(1){a: 1}))
end
message = ~r"expected struct name in a match to be a compile time atom, alias or a variable"
assert_raise CompileError, message, fn ->
expand(quote(do: %unquote(1){a: 1} = x))
end
end
test "update syntax" do
expand(quote(do: %{%{a: 0} | a: 1}))
assert_raise CompileError, ~r"cannot use map/struct update syntax in match", fn ->
expand(quote(do: %{%{a: 0} | a: 1} = %{}))
end
end
test "dynamic syntax expands to itself" do
assert expand(quote(do: %x{} = 1)) == quote(do: %x{} = 1)
end
test "unknown ^keys in structs" do
message = ~r"unknown key \^my_key for struct Kernel\.ExpansionTest\.User"
assert_raise CompileError, message, fn ->
code =
quote do
my_key = :my_key
%User{^my_key => :my_value} = %{}
end
expand(code)
end
end
end
describe "quote" do
test "expanded to raw forms" do
assert expand(quote(do: quote(do: hello))) == {:{}, [], [:hello, [], __MODULE__]}
end
test "raises if the :context option is nil or not a compile-time module" do
assert_raise CompileError, ~r"invalid :context for quote, .*, got: :erlang\.self\(\)", fn ->
expand(quote(do: quote(context: self(), do: :ok)))
end
assert_raise CompileError, ~r"invalid :context for quote, .*, got: nil", fn ->
expand(quote(do: quote(context: nil, do: :ok)))
end
end
test "raises for missing do" do
assert_raise CompileError, ~r"missing :do option in \"quote\"", fn ->
expand(quote(do: quote(context: Foo)))
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"quote\"", fn ->
expand(quote(do: quote(1 + 1)))
end
end
test "raises unless its options are a keyword list" do
assert_raise CompileError, ~r"invalid options for quote, expected a keyword list", fn ->
expand(quote(do: quote(:foo, do: :foo)))
end
end
end
describe "anonymous calls" do
test "expands base and args" do
assert expand(quote(do: a.(b))) == quote(do: a().(b()))
end
test "raises on atom base" do
assert_raise CompileError, ~r"invalid function call :foo.()", fn ->
expand(quote(do: :foo.(a)))
end
end
end
describe "remotes" do
test "expands to Erlang" do
assert expand(quote(do: Kernel.is_atom(a))) == quote(do: :erlang.is_atom(a()))
end
test "expands macros" do
assert expand(quote(do: Kernel.ExpansionTest.thirteen())) == 13
end
test "expands receiver and args" do
assert expand(quote(do: a.is_atom(b))) == quote(do: a().is_atom(b()))
assert expand(quote(do: (after_expansion = :foo).is_atom(a))) ==
quote(do: (after_expansion = :foo).is_atom(a()))
end
test "modules must be required for macros" do
before_expansion =
quote do
require Kernel.ExpansionTarget
Kernel.ExpansionTarget.seventeen()
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
17
end
assert expand(before_expansion) == after_expansion
end
test "raises when not required" do
msg =
~r"you must require Kernel\.ExpansionTarget before invoking the macro Kernel\.ExpansionTarget\.seventeen/0"
assert_raise CompileError, msg, fn ->
expand(quote(do: Kernel.ExpansionTarget.seventeen()))
end
end
test "in matches" do
message =
~r"cannot invoke remote function Hello.something_that_does_not_exist/0 inside match"
assert_raise CompileError, message, fn ->
expand(quote(do: Hello.something_that_does_not_exist() = :foo))
end
message = ~r"cannot invoke remote function :erlang.make_ref/0 inside match"
assert_raise CompileError, message, fn -> expand(quote(do: make_ref() = :foo)) end
end
test "in guards" do
message =
~r"cannot invoke remote function Hello.something_that_does_not_exist/1 inside guard"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when Hello.something_that_does_not_exist(arg) -> arg end))
end
message = ~r"cannot invoke remote function :erlang.make_ref/0 inside guard"
assert_raise CompileError, message, fn ->
expand(quote(do: fn arg when make_ref() -> arg end))
end
end
end
describe "comprehensions" do
test "variables do not leak with enums" do
before_expansion =
quote do
for(a <- b, do: c = 1)
c
end
after_expansion =
quote do
for(a <- b(), do: c = 1)
c()
end
assert expand(before_expansion) == after_expansion
end
test "variables do not leak with binaries" do
before_expansion =
quote do
for(<<a <- b>>, do: c = 1)
c
end
after_expansion =
quote do
for(<<(<<a::integer()>> <- b())>>, do: c = 1)
c()
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
test "variables inside filters are available in blocks" do
assert expand(quote(do: for(a <- b, c = a, do: c))) ==
quote(do: for(a <- b(), c = a, do: c))
end
test "variables inside options do not leak" do
before_expansion =
quote do
for(a <- c = b, into: [], do: 1)
c
end
after_expansion =
quote do
for(a <- c = b(), do: 1, into: [])
c()
end
assert expand(before_expansion) == after_expansion
before_expansion =
quote do
for(a <- b, into: c = [], do: 1)
c
end
after_expansion =
quote do
for(a <- b(), do: 1, into: c = [])
c()
end
assert expand(before_expansion) == after_expansion
end
test "must start with generators" do
assert_raise CompileError, ~r"for comprehensions must start with a generator", fn ->
expand(quote(do: for(is_atom(:foo), do: :foo)))
end
assert_raise CompileError, ~r"for comprehensions must start with a generator", fn ->
expand(quote(do: for(do: :foo)))
end
end
test "requires size on binary generators" do
message = ~r"a binary field without size is only allowed at the end of a binary pattern"
assert_raise CompileError, message, fn ->
expand(quote(do: for(<<x::binary <- "123">>, do: x)))
end
end
test "require do option" do
assert_raise CompileError, ~r"missing :do option in \"for\"", fn ->
expand(quote(do: for(_ <- 1..2)))
end
end
test "uniq option is boolean" do
message = ~r":uniq option for comprehensions only accepts a boolean, got: x"
assert_raise CompileError, message, fn ->
expand(quote(do: for(x <- 1..2, uniq: x, do: x)))
end
end
test "raise error for unknown options" do
assert_raise CompileError, ~r"unsupported option :else given to for", fn ->
expand(quote(do: for(_ <- 1..2, do: 1, else: 1)))
end
assert_raise CompileError, ~r"unsupported option :other given to for", fn ->
expand(quote(do: for(_ <- 1..2, do: 1, other: 1)))
end
end
end
describe "with" do
test "variables do not leak" do
before_expansion =
quote do
with({foo} <- {bar}, do: baz = :ok)
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: baz = :ok)
baz()
end
assert expand(before_expansion) == after_expansion
end
test "variables are available in do option" do
before_expansion =
quote do
with({foo} <- {bar}, do: baz = foo)
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: baz = foo)
baz()
end
assert expand(before_expansion) == after_expansion
end
test "variables inside else do not leak" do
before_expansion =
quote do
with({foo} <- {bar}, do: :ok, else: (baz -> baz))
baz
end
after_expansion =
quote do
with({foo} <- {bar()}, do: :ok, else: (baz -> baz))
baz()
end
assert expand(before_expansion) == after_expansion
end
test "fails if \"do\" is missing" do
assert_raise CompileError, ~r"missing :do option in \"with\"", fn ->
expand(quote(do: with(_ <- true, [])))
end
end
test "fails on invalid else option" do
assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: [:error])))
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: :error)))
end
end
test "fails for invalid options" do
# Only the required "do" is present alongside the unexpected option.
assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn ->
expand(quote(do: with(_ <- true, foo: :bar, do: :ok)))
end
# More options are present alongside the unexpected option.
assert_raise CompileError, ~r"unexpected option :foo in \"with\"", fn ->
expand(quote(do: with(_ <- true, do: :ok, else: (_ -> :ok), foo: :bar)))
end
end
end
describe "&" do
test "keeps locals" do
assert expand(quote(do: &unknown/2)) == {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
assert expand(quote(do: &unknown(&1, &2))) == {:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
end
test "expands remotes" do
assert expand(quote(do: &List.flatten/2)) ==
quote(do: &:"Elixir.List".flatten/2) |> clean_meta([:import, :context])
assert expand(quote(do: &Kernel.is_atom/1)) ==
quote(do: &:erlang.is_atom/1) |> clean_meta([:import, :context])
end
test "expands macros" do
before_expansion =
quote do
require Kernel.ExpansionTarget
&Kernel.ExpansionTarget.seventeen/0
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
fn -> 17 end
end
assert expand(before_expansion) == after_expansion
end
test "fails on non-continuous" do
assert_raise CompileError, ~r"capture &0 is not allowed", fn ->
expand(quote(do: &foo(&0)))
end
assert_raise CompileError, ~r"capture &2 cannot be defined without &1", fn ->
expand(quote(do: & &2))
end
assert_raise CompileError, ~r"capture &255 cannot be defined without &1", fn ->
expand(quote(do: & &255))
end
end
test "fails on block" do
message = ~r"invalid args for &, block expressions are not allowed, got: \(\n 1\n 2\n\)"
assert_raise CompileError, message, fn ->
code =
quote do
&(
1
2
)
end
expand(code)
end
end
test "fails on other types" do
message =
~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: :foo"
assert_raise CompileError, message, fn ->
expand(quote(do: &:foo))
end
end
test "fails on invalid arity" do
message = ~r"invalid arity for &, expected a number between 0 and 255, got: 256"
assert_raise CompileError, message, fn ->
expand(quote(do: &Mod.fun/256))
end
end
test "fails when no captures" do
message =
~r"invalid args for &, expected an expression in the format of &Mod.fun/arity, &local/arity or a capture containing at least one argument as &1, got: foo()"
assert_raise CompileError, message, fn ->
expand(quote(do: &foo()))
end
end
test "fails on nested capture" do
assert_raise CompileError, ~r"nested captures via & are not allowed: &\(&1\)", fn ->
expand(quote(do: &(& &1)))
end
end
test "fails on integers" do
assert_raise CompileError, ~r"unhandled &1 outside of a capture", fn ->
expand(quote(do: &1))
end
end
end
describe "fn" do
test "expands each clause" do
before_expansion =
quote do
fn
x -> x
_ -> x
end
end
after_expansion =
quote do
fn
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
fn
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
fn
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
assert expand(quote(do: fn x when x when __ENV__.context -> true end)) ==
quote(do: fn x when x when :guard -> true end)
end
test "does not leak vars" do
before_expansion =
quote do
fn x -> x end
x
end
after_expansion =
quote do
fn x -> x end
x()
end
assert expand(before_expansion) == after_expansion
end
test "raises on mixed arities" do
message = ~r"cannot mix clauses with different arities in anonymous functions"
assert_raise CompileError, message, fn ->
code =
quote do
fn
x -> x
x, y -> x + y
end
end
expand(code)
end
end
end
describe "cond" do
test "expands each clause" do
before_expansion =
quote do
cond do
x = 1 -> x
true -> x
end
end
after_expansion =
quote do
cond do
x = 1 -> x
true -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
cond do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
cond do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks vars on head" do
before_expansion =
quote do
cond do
x = 1 -> x
y = 2 -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
cond do
x = 1 -> x
y = 2 -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
cond do
1 -> x = 1
2 -> y = 2
end
:erlang.+(x, y)
end
after_expansion =
quote do
cond do
1 -> x = 1
2 -> y = 2
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do" do
assert_raise CompileError, ~r"missing :do option in \"cond\"", fn ->
expand(quote(do: cond([])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"cond\"", fn ->
expand(quote(do: cond(do: (x -> x), do: (y -> y))))
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn ->
expand(quote(do: cond(do: :ok)))
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"cond\"", fn ->
expand(quote(do: cond(do: [:not, :clauses])))
end
end
test "expects one argument in clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"cond\"", fn ->
code =
quote do
cond do
_, _ -> :ok
end
end
expand(code)
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"cond\"", fn ->
expand(quote(do: cond(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"cond\"", fn ->
expand(quote(do: cond(do: (1 -> 1), foo: :bar)))
end
end
test "raises for _ in clauses" do
message = ~r"invalid use of _ inside \"cond\"\. If you want the last clause"
assert_raise CompileError, message, fn ->
code =
quote do
cond do
x -> x
_ -> :raise
end
end
expand(code)
end
end
end
describe "case" do
test "expands each clause" do
before_expansion =
quote do
case w do
x -> x
_ -> x
end
end
after_expansion =
quote do
case w() do
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
case w do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
case w() do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
before_expansion =
quote do
case w do
x when x when __ENV__.context -> true
end
end
after_expansion =
quote do
case w() do
x when x when :guard -> true
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks vars on head" do
before_expansion =
quote do
case w do
x -> x
y -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
case w() do
x -> x
y -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
case w do
x -> x = x
y -> y = y
end
:erlang.+(x, y)
end
after_expansion =
quote do
case w() do
x -> x = x
y -> y = y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do" do
assert_raise CompileError, ~r"missing :do option in \"case\"", fn ->
expand(quote(do: case(e, [])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"case\"", fn ->
expand(quote(do: case(e, do: (x -> x), do: (y -> y))))
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn ->
code =
quote do
case e do
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"case\"", fn ->
code =
quote do
case e do
[:not, :clauses]
end
end
expand(code)
end
end
test "expects exactly one argument in clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"case\"", fn ->
code =
quote do
case e do
_, _ -> :ok
end
end
expand(code)
end
end
test "fails with invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"case\"", fn ->
expand(quote(do: case(:foo, :bar)))
end
end
test "fails for invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"case\"", fn ->
expand(quote(do: case(e, do: (x -> x), foo: :bar)))
end
end
end
describe "receive" do
test "expands each clause" do
before_expansion =
quote do
receive do
x -> x
_ -> x
end
end
after_expansion =
quote do
receive do
x -> x
_ -> x()
end
end
assert expand(before_expansion) == after_expansion
end
test "does not share lexical scope between clauses" do
before_expansion =
quote do
receive do
1 -> import List
2 -> flatten([1, 2, 3])
end
end
after_expansion =
quote do
receive do
1 -> :"Elixir.List"
2 -> flatten([1, 2, 3])
end
end
assert expand(before_expansion) == after_expansion
end
test "expands guards" do
before_expansion =
quote do
receive do
x when x when __ENV__.context -> true
end
end
after_expansion =
quote do
receive do
x when x when :guard -> true
end
end
assert expand(before_expansion) == after_expansion
end
test "does not leaks clause vars" do
before_expansion =
quote do
receive do
x -> x
y -> y
end
:erlang.+(x, y)
end
after_expansion =
quote do
receive do
x -> x
y -> y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars" do
before_expansion =
quote do
receive do
x -> x = x
y -> y = y
end
:erlang.+(x, y)
end
after_expansion =
quote do
receive do
x -> x = x
y -> y = y
end
:erlang.+(x(), y())
end
assert expand(before_expansion) == after_expansion
end
test "does not leak vars on after" do
before_expansion =
quote do
receive do
x -> x = x
after
y ->
y
w = y
end
:erlang.+(x, w)
end
after_expansion =
quote do
receive do
x -> x = x
after
y() ->
y()
w = y()
end
:erlang.+(x(), w())
end
assert expand(before_expansion) == after_expansion
end
test "expects exactly one do or after" do
assert_raise CompileError, ~r"missing :do/:after option in \"receive\"", fn ->
expand(quote(do: receive([])))
end
assert_raise CompileError, ~r"duplicated :do clauses given for \"receive\"", fn ->
expand(quote(do: receive(do: (x -> x), do: (y -> y))))
end
assert_raise CompileError, ~r"duplicated :after clauses given for \"receive\"", fn ->
code =
quote do
receive do
x -> x
after
y -> y
after
z -> z
end
end
expand(code)
end
end
test "expects clauses" do
assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn ->
code =
quote do
receive do
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :do in \"receive\"", fn ->
code =
quote do
receive do
[:not, :clauses]
end
end
expand(code)
end
end
test "expects on argument for do/after clauses" do
assert_raise CompileError, ~r"expected one arg for :do clauses \(->\) in \"receive\"", fn ->
code =
quote do
receive do
_, _ -> :ok
end
end
expand(code)
end
message = ~r"expected one arg for :after clauses \(->\) in \"receive\""
assert_raise CompileError, message, fn ->
code =
quote do
receive do
x -> x
after
_, _ -> :ok
end
end
expand(code)
end
end
test "expects a single clause for \"after\"" do
assert_raise CompileError, ~r"expected a single -> clause for :after in \"receive\"", fn ->
code =
quote do
receive do
x -> x
after
1 -> y
2 -> z
end
end
expand(code)
end
end
test "raises for invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"receive\"", fn ->
expand(quote(do: receive(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"receive\"", fn ->
expand(quote(do: receive(do: (x -> x), foo: :bar)))
end
end
end
describe "try" do
test "expands catch" do
before_expansion =
quote do
try do
x
catch
x, y -> z = :erlang.+(x, y)
end
z
end
after_expansion =
quote do
try do
x()
catch
x, y -> z = :erlang.+(x, y)
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands after" do
before_expansion =
quote do
try do
x
after
z = y
end
z
end
after_expansion =
quote do
try do
x()
after
z = y()
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands else" do
before_expansion =
quote do
try do
x
else
z -> z
end
z
end
after_expansion =
quote do
try do
x()
else
z -> z
end
z()
end
assert expand(before_expansion) == after_expansion
end
test "expands rescue" do
before_expansion =
quote do
try do
x
rescue
x -> x
Error -> x
end
x
end
after_expansion =
quote do
try do
x()
rescue
x -> x
unquote(:in)(_, [:"Elixir.Error"]) -> x()
end
x()
end
assert expand(before_expansion) == after_expansion
end
test "expects more than do" do
assert_raise CompileError, ~r"missing :catch/:rescue/:after/:else option in \"try\"", fn ->
code =
quote do
try do
x = y
end
x
end
expand(code)
end
end
test "raises if do is missing" do
assert_raise CompileError, ~r"missing :do option in \"try\"", fn ->
expand(quote(do: try([])))
end
end
test "expects at most one clause" do
assert_raise CompileError, ~r"duplicated :do clauses given for \"try\"", fn ->
expand(quote(do: try(do: e, do: f)))
end
assert_raise CompileError, ~r"duplicated :rescue clauses given for \"try\"", fn ->
code =
quote do
try do
e
rescue
x -> x
rescue
y -> y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :after clauses given for \"try\"", fn ->
code =
quote do
try do
e
after
x = y
after
x = y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :else clauses given for \"try\"", fn ->
code =
quote do
try do
e
else
x -> x
else
y -> y
end
end
expand(code)
end
assert_raise CompileError, ~r"duplicated :catch clauses given for \"try\"", fn ->
code =
quote do
try do
e
catch
x -> x
catch
y -> y
end
end
expand(code)
end
end
test "raises with invalid arguments" do
assert_raise CompileError, ~r"invalid arguments for \"try\"", fn ->
expand(quote(do: try(:foo)))
end
end
test "raises with invalid options" do
assert_raise CompileError, ~r"unexpected option :foo in \"try\"", fn ->
expand(quote(do: try(do: x, foo: :bar)))
end
end
test "expects exactly one argument in rescue clauses" do
assert_raise CompileError, ~r"expected one arg for :rescue clauses \(->\) in \"try\"", fn ->
code =
quote do
try do
x
rescue
_, _ -> :ok
end
end
expand(code)
end
end
test "expects an alias, a variable, or \"var in [alias]\" as the argument of rescue clauses" do
assert_raise CompileError, ~r"invalid \"rescue\" clause\. The clause should match", fn ->
code =
quote do
try do
x
rescue
function(:call) -> :ok
end
end
expand(code)
end
end
test "expects one or two args for catch clauses" do
message = ~r"expected one or two args for :catch clauses \(->\) in \"try\""
assert_raise CompileError, message, fn ->
code =
quote do
try do
x
catch
_, _, _ -> :ok
end
end
expand(code)
end
end
test "expects clauses for rescue, else, catch" do
assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn ->
code =
quote do
try do
e
rescue
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :rescue in \"try\"", fn ->
code =
quote do
try do
e
rescue
[:not, :clauses]
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn ->
code =
quote do
try do
e
catch
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :catch in \"try\"", fn ->
code =
quote do
try do
e
catch
[:not, :clauses]
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn ->
code =
quote do
try do
e
else
x
end
end
expand(code)
end
assert_raise CompileError, ~r"expected -> clauses for :else in \"try\"", fn ->
code =
quote do
try do
e
else
[:not, :clauses]
end
end
expand(code)
end
end
end
describe "bitstrings" do
test "nested match" do
assert expand(quote(do: <<foo = bar>>)) |> clean_meta([:alignment]) ==
quote(do: <<foo = bar()::integer()>>)
assert expand(quote(do: <<?-, <<_, _::binary>> = rest()::binary>>))
|> clean_meta([:alignment]) ==
quote(do: <<45::integer(), <<_::integer(), _::binary()>> = rest()::binary()>>)
message = ~r"cannot pattern match inside a bitstring that is already in match"
assert_raise CompileError, message, fn ->
expand(quote(do: <<bar = baz>> = foo()))
end
assert_raise CompileError, message, fn ->
expand(quote(do: <<?-, <<_, _::binary>> = rest::binary>> = foo()))
end
end
test "inlines binaries inside interpolation" do
import Kernel.ExpansionTarget
assert expand(quote(do: "foo#{bar()}" = "foobar")) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), "bar"::binary()>> = "foobar")
end
test "expands size * unit" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::13>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-size(13)>>)
assert expand(quote(do: <<x::13*6>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-unit(6)-size(13)>>)
assert expand(quote(do: <<x::_*6-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)>>)
assert expand(quote(do: <<x::13*6-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)-size(13)>>)
assert expand(quote(do: <<x::binary-(13 * 6)-binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()-unit(6)-size(13)>>)
end
test "expands binary/bitstring specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::binary>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
assert expand(quote(do: <<x::bytes>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
assert expand(quote(do: <<x::bitstring>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::bitstring()>>)
assert expand(quote(do: <<x::bits>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::bitstring()>>)
assert expand(quote(do: <<x::binary-little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::binary()>>)
message = ~r"signed and unsigned specifiers are supported only on integer and float type"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x()::binary-signed>>))
end
end
test "expands utf* specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::utf8>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf8()>>)
assert expand(quote(do: <<x::utf16>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf16()>>)
assert expand(quote(do: <<x::utf32-little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::utf32()-little()>>)
message = ~r"signed and unsigned specifiers are supported only on integer and float type"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x()::utf8-signed>>))
end
assert_raise CompileError, ~r"size and unit are not supported on utf types", fn ->
expand(quote(do: <<x()::utf8-size(32)>>))
end
end
test "expands numbers specifiers" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x::integer>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()>>)
assert expand(quote(do: <<x::little>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-little()>>)
assert expand(quote(do: <<x::signed>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-signed()>>)
assert expand(quote(do: <<x::signed-native>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-native()-signed()>>)
assert expand(quote(do: <<x::float-signed-native>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::float()-native()-signed()>>)
message =
~r"integer and float types require a size specifier if the unit specifier is given"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x::unit(8)>>))
end
end
test "expands macro specifiers" do
import Kernel, except: [-: 2]
import Kernel.ExpansionTarget
assert expand(quote(do: <<x::seventeen>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer()-size(17)>>)
assert expand(quote(do: <<seventeen::seventeen, x::size(seventeen)>> = 1))
|> clean_meta([:alignment]) ==
quote(do: <<seventeen::integer()-size(17), x::integer()-size(seventeen)>> = 1)
end
test "expands macro in args" do
import Kernel, except: [-: 2]
before_expansion =
quote do
require Kernel.ExpansionTarget
<<x::size(Kernel.ExpansionTarget.seventeen())>>
end
after_expansion =
quote do
:"Elixir.Kernel.ExpansionTarget"
<<x()::integer()-size(17)>>
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
test "supports dynamic size" do
import Kernel, except: [-: 2]
before_expansion =
quote do
var = 1
<<x::size(var)-unit(8)>>
end
after_expansion =
quote do
var = 1
<<x()::integer()-unit(8)-size(var)>>
end
assert expand(before_expansion) |> clean_meta([:alignment]) == after_expansion
end
test "merges bitstrings" do
import Kernel, except: [-: 2]
assert expand(quote(do: <<x, <<y::signed-native>>, z>>)) |> clean_meta([:alignment]) ==
quote(do: <<x()::integer(), y()::integer()-native()-signed(), z()::integer()>>)
assert expand(quote(do: <<x, <<y::signed-native>>::bitstring, z>>))
|> clean_meta([:alignment]) ==
quote(do: <<x()::integer(), y()::integer()-native()-signed(), z()::integer()>>)
end
test "merges binaries" do
import Kernel, except: [-: 2]
assert expand(quote(do: "foo" <> x)) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::binary()>>)
assert expand(quote(do: "foo" <> <<x::size(4), y::size(4)>>)) |> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::integer()-size(4), y()::integer()-size(4)>>)
assert expand(quote(do: <<"foo", <<x::size(4), y::size(4)>>::binary>>))
|> clean_meta([:alignment]) ==
quote(do: <<"foo"::binary(), x()::integer()-size(4), y()::integer()-size(4)>>)
end
test "raises on unaligned binaries in match" do
message = ~r"cannot verify size of binary expression in match"
assert_raise CompileError, message, fn ->
expand(quote(do: <<rest::bits>> <> _ = "foo"))
end
assert_raise CompileError, message, fn ->
expand(quote(do: <<rest::size(3)>> <> _ = "foo"))
end
end
test "raises on size or unit for literal bitstrings" do
message = ~r"literal <<>> in bitstring supports only type specifiers"
assert_raise CompileError, message, fn ->
expand(quote(do: <<(<<"foo">>::32)>>))
end
end
test "raises on size or unit for literal strings" do
message = ~r"literal string in bitstring supports only endianness and type specifiers"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::32>>))
end
end
test "raises for invalid size * unit for floats" do
message = ~r"float requires size\*unit to be 32 or 64 \(default\), got: 128"
assert_raise CompileError, message, fn ->
expand(quote(do: <<12.3::32*4>>))
end
message = ~r"float requires size\*unit to be 32 or 64 \(default\), got: 256"
assert_raise CompileError, message, fn ->
expand(quote(do: <<12.3::256>>))
end
end
test "raises for invalid size" do
message = ~r"size in bitstring expects an integer or a variable as argument, got: :oops"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::size(:oops)>>))
end
end
test "raises for invalid unit" do
message = ~r"unit in bitstring expects an integer as argument, got: :oops"
assert_raise CompileError, message, fn ->
expand(quote(do: <<"foo"::size(8)-unit(:oops)>>))
end
end
test "raises for unknown specifier" do
assert_raise CompileError, ~r"unknown bitstring specifier: unknown()", fn ->
expand(quote(do: <<1::unknown>>))
end
end
test "raises for conflicting specifiers" do
assert_raise CompileError, ~r"conflicting endianness specification for bit field", fn ->
expand(quote(do: <<1::little-big>>))
end
assert_raise CompileError, ~r"conflicting unit specification for bit field", fn ->
expand(quote(do: <<x::bitstring-unit(2)>>))
end
end
test "raises for invalid literals" do
assert_raise CompileError, ~r"invalid literal :foo in <<>>", fn ->
expand(quote(do: <<:foo>>))
end
assert_raise CompileError, ~r"invalid literal \[\] in <<>>", fn ->
expand(quote(do: <<[]::size(8)>>))
end
end
test "raises on binary fields with size in matches" do
assert expand(quote(do: <<x::binary-size(3), y::binary>> = "foobar"))
message = ~r"a binary field without size is only allowed at the end of a binary pattern"
assert_raise CompileError, message, fn ->
expand(quote(do: <<x::binary, y::binary>> = "foobar"))
end
end
end
describe "op ambiguity" do
test "raises when a call is ambiguous" do
message = ~r["a -1" looks like a function call but there is a variable named "a"]
assert_raise CompileError, message, fn ->
# We use string_to_quoted! here to avoid the formatter adding parentheses to "a -1".
code =
Code.string_to_quoted!("""
a = 1
a -1
""")
expand(code)
end
end
end
test "handles invalid expressions" do
assert_raise CompileError, ~r"invalid quoted expression: {1, 2, 3}", fn ->
expand(quote(do: unquote({1, 2, 3})))
end
assert_raise CompileError, ~r"invalid quoted expression: #Function<", fn ->
expand(quote(do: unquote({:sample, fn -> nil end})))
end
assert_raise CompileError, ~r"invalid pattern in match", fn ->
code =
quote do
case true do
true && true -> true
end
end
expand(code)
end
assert_raise CompileError, ~r"invalid pattern in match", fn ->
code =
quote do
x = & &1
case true do
x.(false) -> true
end
end
expand(code)
end
assert_raise CompileError, ~r"invalid expression in guard", fn ->
code =
quote do
x = & &1
case true do
true when x.(true) -> true
end
end
expand(code)
end
assert_raise CompileError, ~r"invalid call foo\(1\)\(2\)", fn ->
expand(quote(do: foo(1)(2)))
end
assert_raise CompileError, ~r"invalid call 1\.foo\(\)", fn ->
expand(quote(do: 1.foo))
end
assert_raise CompileError, ~r"unhandled operator ->", fn ->
expand(quote(do: (foo -> bar)))
end
message = ~r"undefined variable \"foo\""
assert_raise CompileError, message, fn ->
code =
quote do
fn <<_::size(foo)>> -> :ok end
end
expand(code)
end
message = ~r"size in bitstring expects an integer or a variable as argument, got: foo()"
assert_raise CompileError, message, fn ->
code =
quote do
fn <<_::size(foo())>> -> :ok end
end
expand(code)
end
end
## Helpers
defmacro thirteen do
13
end
defp clean_meta(expr, vars) do
cleaner = &Keyword.drop(&1, vars)
Macro.prewalk(expr, &Macro.update_meta(&1, cleaner))
end
defp expand(expr) do
expand_env(expr, __ENV__) |> elem(0)
end
defp expand_env(expr, env) do
ExUnit.CaptureIO.capture_io(:stderr, fn ->
send(self(), {:expand_env, :elixir_expand.expand(expr, env)})
end)
receive do
{:expand_env, {expr, env}} -> {clean_meta(expr, [:version]), env}
end
end
end
| 25.459193 | 164 | 0.531247 |
f743383c43cf5230c5ca3b67e89e57a5e04fdf7d | 3,234 | ex | Elixir | lib/advent_of_code/day5.ex | h4cc/adventofcode-2016 | 0cc75f6238aa90555c395e4d6c5929789756fb53 | [
"MIT"
] | null | null | null | lib/advent_of_code/day5.ex | h4cc/adventofcode-2016 | 0cc75f6238aa90555c395e4d6c5929789756fb53 | [
"MIT"
] | null | null | null | lib/advent_of_code/day5.ex | h4cc/adventofcode-2016 | 0cc75f6238aa90555c395e4d6c5929789756fb53 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.Day5 do
@input_example "abc" #abc3231929
@input "abbhdwsy"
@password_length 8
@chunk_size 100_000
@cores 4
def solve() do
@input
|> find_password_parallel
|> IO.inspect
end
def solve2() do
@input
|> find_password_parallel2
|> IO.inspect
end
defp find_password_parallel(input) do
# Create a source for a stream of numbers
Stream.iterate(0, &(&1+1))
|> Stream.chunk(@chunk_size)
|> Stream.chunk(@cores)
|> Stream.map(fn(chunks) ->
chunks
|> Enum.map(fn(numbers) ->
Task.async(fn ->
numbers
|> Enum.flat_map(fn(number) ->
"#{input}#{number}"
|> md5
|> hash_prefix(number)
|> case do
{:ok, {number, char}} -> [{number, char}]
:error -> []
end
end)
end)
end)
|> Enum.map(&Task.await/1)
|> List.flatten
end)
|> Stream.flat_map(fn(x) -> x end)
|> Enum.take(@password_length)
|> Enum.sort(fn({number1, _char1}, {number2, _char2}) ->
number1 < number2
end)
|> Enum.map(fn({_number, char}) ->
char
end)
|> Enum.join
end
defp find_password_parallel2(input) do
# Create a source for a stream of numbers
Stream.iterate(0, &(&1+1))
|> Stream.chunk(@chunk_size)
|> Stream.chunk(@cores)
|> Stream.map(fn(chunks) ->
chunks
|> Enum.map(fn(numbers) ->
Task.async(fn ->
numbers
|> Enum.flat_map(fn(number) ->
"#{input}#{number}"
|> md5
|> hash_prefix2(number)
|> case do
{:ok, {number, char, pos}} -> [{number, char, pos}]
:error -> []
end
end)
end)
end)
|> Enum.map(&Task.await/1)
|> List.flatten
end)
|> Stream.flat_map(fn(x) -> x end)
|> Enum.take(@password_length * 3)
|> Enum.sort(fn({number1, _char1, _pos1}, {number2, _char2, _pos2}) ->
number1 < number2
end)
|> Enum.reduce(%{}, fn({_number, char, pos}, acc) ->
acc
|> Map.get(pos)
|> case do
nil -> acc |> Map.put(pos, char)
_ -> acc
end
end)
|> IO.inspect
|> Map.values
|> Enum.join
end
# defp find_password(_input, _i, password_chars) when length(password_chars) == 8, do: password_chars |> Enum.reverse |> Enum.join
# defp find_password(input, i, password_chars) do
# "#{input}#{i}"
# |> md5
# |> hash_prefix(i)
# |> case do
# {:ok, {_number, char}} ->
# find_password(input, i+1, [char|password_chars])
# :error ->
# find_password(input, i+1, password_chars)
# end
# end
defp md5(string) do
:crypto.hash(:md5, string) |> Base.encode16(case: :lower)
end
defp hash_prefix(<<"00000", char::bitstring-8, _::binary>>, number), do: {:ok, {number, char}}
defp hash_prefix(<<_::binary>>, _number), do: :error
defp hash_prefix2(<<"00000", pos::bitstring-8, char::bitstring-8, _::binary>>, number) when pos in ["0", "1", "2", "3", "4", "5", "6", "7"], do: {:ok, {number, char, pos |> String.to_integer}}
defp hash_prefix2(<<_::binary>>, _number), do: :error
end | 26.508197 | 194 | 0.53556 |
f74352a90d99ec467c0adbf6455c3192020425d7 | 20,982 | ex | Elixir | lib/nebulex/caching.ex | boriskohen/nebulex | bd733ceb7c09a46f1a35064ed016acdb4e53aeb3 | [
"MIT"
] | null | null | null | lib/nebulex/caching.ex | boriskohen/nebulex | bd733ceb7c09a46f1a35064ed016acdb4e53aeb3 | [
"MIT"
] | null | null | null | lib/nebulex/caching.ex | boriskohen/nebulex | bd733ceb7c09a46f1a35064ed016acdb4e53aeb3 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Decorator.Define) do
defmodule Nebulex.Caching do
@moduledoc """
Declarative annotation-based caching via function
[decorators](https://github.com/arjan/decorator).
For caching declaration, the abstraction provides three Elixir function
decorators: `cacheable `, `cache_evict`, and `cache_put`, which allow
functions to trigger cache population or cache eviction.
Let us take a closer look at each annotation.
> Inspired by [Spring Cache Abstraction](https://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/cache.html).
## `cacheable` decorator
As the name implies, `cacheable` is used to demarcate functions that are
cacheable - that is, functions for whom the result is stored into the cache
so, on subsequent invocations (with the same arguments), the value in the
cache is returned without having to actually execute the function. In its
simplest form, the decorator/annotation declaration requires the name of
the cache associated with the annotated function:
@decorate cacheable(cache: Cache)
def get_account(id) do
# the logic for retrieving the account ...
end
In the snippet above, the function `get_account/1` is associated with the
cache named `Cache`. Each time the function is called, the cache is checked
to see whether the invocation has been already executed and does not have
to be repeated.
### Default Key Generation
Since caches are essentially key-value stores, each invocation of a cached
function needs to be translated into a suitable key for cache access.
Out of the box, the caching abstraction uses a simple key-generator
based on the following algorithm:
* If no params are given, return `0`.
* If only one param is given, return that param as key.
* If more than one param is given, return a key computed from the hashes
of all parameters (`:erlang.phash2(args)`).
> **IMPORTANT:** Since Nebulex v2.1.0, the default key generation implements
the algorithm described above, breaking backward compatibility with older
versions. Therefore, you may need to change your code in case of using the
default key generation.
The default key generator is provided by the cache via the callback
`c:Nebulex.Cache.__default_key_generator__/0` and it is applied only
if the option `key:` or `keys:` is not configured. By default it is
`Nebulex.Caching.SimpleKeyGenerator`. But you can change the default
key generator at compile time with the option `:default_key_generator`.
For example, one can define a cache with a default key generator like so:
defmodule MyApp.Cache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Local,
default_key_generator: __MODULE__
@behaviour Nebulex.Caching.KeyGenerator
@impl true
def generate(mod, fun, args), do: :erlang.phash2({mod, fun, args})
end
The given key generator must implement the `Nebulex.Caching.KeyGenerator`
behaviour.
Also, you can provide a different key generator at any time
(overriding the default one) when using any caching annotation
through the option `:key_generator`. For example:
@decorate cache_put(cache: Cache, key_generator: MyApp.Cache.AnotherKeyGenerator)
def update_account(account) do
# the logic for updating the account ...
end
> The `:key_generator` option is available for all caching annotations.
### Custom Key Generation Declaration
Since caching is generic, it is quite likely the target functions have
various signatures that cannot be simply mapped on top of the cache
structure. This tends to become obvious when the target function has
multiple arguments out of which only some are suitable for caching
(while the rest are used only by the function logic). For example:
@decorate cacheable(cache: Cache)
def get_account(email, include_users?) do
# the logic for retrieving the account ...
end
At first glance, while the boolean argument influences the way the account
is found, it is no use for the cache.
For such cases, the `cacheable` decorator allows the user to specify the
key explicitly based on the function attributes.
@decorate cacheable(cache: Cache, key: {Account, email})
def get_account(email, include_users?) do
# the logic for retrieving the account ...
end
@decorate cacheable(cache: Cache, key: {Account, user.account_id})
def get_user_account(%User{} = user) do
# the logic for retrieving the account ...
end
It is also possible passing options to the cache, like so:
@decorate cacheable(cache: Cache, key: {Account, email}, opts: [ttl: 300_000])
def get_account(email, include_users?) do
# the logic for retrieving the account ...
end
See the **"Shared Options"** section below.
### Functions with multiple clauses
Since [decorator lib](https://github.com/arjan/decorator#functions-with-multiple-clauses)
is used, it is important to be aware of the recommendations, warns,
limitations, and so on. In this case, for functions with multiple clauses
the general advice is to create an empty function head, and call the
decorator on that head, like so:
@decorate cacheable(cache: Cache, key: email)
def get_account(email \\\\ nil)
def get_account(nil), do: nil
def get_account(email) do
# the logic for retrieving the account ...
end
## `cache_put` decorator
For cases where the cache needs to be updated without interfering with the
function execution, one can use the `cache_put` decorator. That is, the
method will always be executed and its result placed into the cache
(according to the `cache_put` options). It supports the same options as
`cacheable`.
@decorate cache_put(cache: Cache, key: {Account, acct.email})
def update_account(%Account{} = acct, attrs) do
# the logic for updating the account ...
end
Note that using `cache_put` and `cacheable` annotations on the same function
is generally discouraged because they have different behaviors. While the
latter causes the method execution to be skipped by using the cache, the
former forces the execution in order to execute a cache update. This leads
to unexpected behavior and with the exception of specific corner-cases
(such as decorators having conditions that exclude them from each other),
such declarations should be avoided.
## `cache_evict` decorator
The cache abstraction allows not just the population of a cache store but
also eviction. This process is useful for removing stale or unused data from
the cache. Opposed to `cacheable`, the decorator `cache_evict` demarcates
functions that perform cache eviction, which are functions that act as
triggers for removing data from the cache. The `cache_evict` decorator not
only allows a key to be specified, but also a set of keys. Besides, extra
options like`all_entries` which indicates whether a cache-wide eviction
needs to be performed rather than just an entry one (based on the key or
keys):
@decorate cache_evict(cache: Cache, key: {Account, email})
def delete_account_by_email(email) do
# the logic for deleting the account ...
end
@decorate cacheable(cache: Cache, keys: [{Account, acct.id}, {Account, acct.email}])
def delete_account(%Account{} = acct) do
# the logic for deleting the account ...
end
@decorate cacheable(cache: Cache, all_entries: true)
def delete_all_accounts do
# the logic for deleting all the accounts ...
end
The option `all_entries:` comes in handy when an entire cache region needs
to be cleared out - rather than evicting each entry (which would take a
long time since it is inefficient), all the entries are removed in one
operation as shown above.
## Shared Options
All three cache annotations explained previously accept the following
options:
* `:cache` - Defines what cache to use (required). Raises `ArgumentError`
if the option is not present.
* `:key` - Defines the cache access key (optional). It overrides the
`:key_generator` option. If this option is not present, a default
key is generated by the configured or default key generator.
* `:opts` - Defines the cache options that will be passed as argument
to the invoked cache function (optional).
* `:match` - Match function `(term -> boolean | {true, term})` (optional).
This function is for matching and decide whether the code-block
evaluation result is cached or not. If `true` the code-block evaluation
result is cached as it is (the default). If `{true, value}` is returned,
then the `value` is what is cached (useful to control what is meant to
be cached). Returning `false` will cause that nothing is stored in the
cache.
* `:key_generator` - The custom key generator module implementing the
`Nebulex.Caching.KeyGenerator` behaviour. If present, this option
overrides the default key generator provided by the cache, and it is
applied only if the option `key:` or `keys:` is not configured.
In other words, the option `key:` or `keys:` overrides this option.
## Putting all together
Supposing we are using `Ecto` and we want to define some cacheable functions
within the context `MyApp.Accounts`:
# The config
config :my_app, MyApp.Cache,
gc_interval: 86_400_000, #=> 1 day
backend: :shards
# The Cache
defmodule MyApp.Cache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Local
end
# Some Ecto schema
defmodule MyApp.Accounts.User do
use Ecto.Schema
schema "users" do
field(:username, :string)
field(:password, :string)
field(:role, :string)
end
def changeset(user, attrs) do
user
|> cast(attrs, [:username, :password, :role])
|> validate_required([:username, :password, :role])
end
end
# Accounts context
defmodule MyApp.Accounts do
use Nebulex.Caching
alias MyApp.Accounts.User
alias MyApp.{Cache, Repo}
@ttl :timer.hours(1)
@decorate cacheable(cache: Cache, key: {User, id}, opts: [ttl: @ttl])
def get_user!(id) do
Repo.get!(User, id)
end
@decorate cacheable(cache: Cache, key: {User, username}, opts: [ttl: @ttl])
def get_user_by_username(username) do
Repo.get_by(User, [username: username])
end
@decorate cache_put(
cache: Cache,
keys: [{User, usr.id}, {User, usr.username}],
match: &match_update/1
)
def update_user(%User{} = usr, attrs) do
usr
|> User.changeset(attrs)
|> Repo.update()
end
defp match_update({:ok, usr}), do: {true, usr}
defp match_update({:error, _}), do: false
@decorate cache_evict(cache: Cache, keys: [{User, usr.id}, {User, usr.username}])
def delete_user(%User{} = usr) do
Repo.delete(usr)
end
def create_user(attrs \\\\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
end
See [Cache Usage Patters Guide](http://hexdocs.pm/nebulex/cache-usage-patterns.html).
"""
use Decorator.Define, cacheable: 1, cache_evict: 1, cache_put: 1
alias Nebulex.Caching
@doc """
Provides a way of annotating functions to be cached (cacheable aspect).
The returned value by the code block is cached if it doesn't exist already
in cache, otherwise, it is returned directly from cache and the code block
is not executed.
## Options
See the "Shared options" section at the module documentation.
## Examples
defmodule MyApp.Example do
use Nebulex.Caching
alias MyApp.Cache
@ttl :timer.hours(1)
@decorate cacheable(cache: Cache, key: name)
def get_by_name(name, age) do
# your logic (maybe the loader to retrieve the value from the SoR)
end
@decorate cacheable(cache: Cache, key: age, opts: [ttl: @ttl])
def get_by_age(age) do
# your logic (maybe the loader to retrieve the value from the SoR)
end
@decorate cacheable(cache: Cache, key: clauses, match: &match_fun/1)
def all(clauses) do
# your logic (maybe the loader to retrieve the value from the SoR)
end
defp match_fun([]), do: false
defp match_fun(_), do: true
end
The **Read-through** pattern is supported by this decorator. The loader to
retrieve the value from the system-of-record (SoR) is your function's logic
and the rest is provided by the macro under-the-hood.
"""
def cacheable(attrs, block, context) do
caching_action(:cacheable, attrs, block, context)
end
@doc """
Provides a way of annotating functions to be evicted; but updating the
cached key instead of deleting it.
The content of the cache is updated without interfering with the function
execution. That is, the method would always be executed and the result
cached.
The difference between `cacheable/3` and `cache_put/3` is that `cacheable/3`
will skip running the function if the key exists in the cache, whereas
`cache_put/3` will actually run the function and then put the result in
the cache.
## Options
See the "Shared options" section at the module documentation.
## Examples
defmodule MyApp.Example do
use Nebulex.Caching
alias MyApp.Cache
@ttl :timer.hours(1)
@decorate cache_put(cache: Cache, key: id, opts: [ttl: @ttl])
def update!(id, attrs \\ %{}) do
# your logic (maybe write data to the SoR)
end
@decorate cache_put(cache: Cache, key: id, match: &match_fun/1, opts: [ttl: @ttl])
def update(id, attrs \\ %{}) do
# your logic (maybe write data to the SoR)
end
defp match_fun({:ok, updated}), do: {true, updated}
defp match_fun({:error, _}), do: false
end
The **Write-through** pattern is supported by this decorator. Your function
provides the logic to write data to the system-of-record (SoR) and the rest
is provided by the decorator under-the-hood.
"""
def cache_put(attrs, block, context) do
caching_action(:cache_put, attrs, block, context)
end
@doc """
Provides a way of annotating functions to be evicted (eviction aspect).
On function's completion, the given key or keys (depends on the `:key` and
`:keys` options) are deleted from the cache.
## Options
* `:keys` - Defines the set of keys to be evicted from cache on function
completion. It overrides the `:key_generator` option.
* `:all_entries` - Defines if all entries must be removed on function
completion. Defaults to `false`.
* `:before_invocation` - Boolean to indicate whether the eviction should
occur after (the default) or before the function executes. The former
provides the same semantics as the rest of the annotations; once the
function completes successfully, an action (in this case eviction)
on the cache is executed. If the function does not execute (as it might
be cached) or an exception is raised, the eviction does not occur.
The latter (`before_invocation: true`) causes the eviction to occur
always, before the function is invoked; this is useful in cases where
the eviction does not need to be tied to the function outcome.
See the "Shared options" section at the module documentation.
## Examples
defmodule MyApp.Example do
use Nebulex.Caching
alias MyApp.Cache
@decorate cache_evict(cache: Cache, key: id)
def delete(id) do
# your logic (maybe write/delete data to the SoR)
end
@decorate cache_evict(cache: Cache, keys: [object.name, object.id])
def delete_object(object) do
# your logic (maybe write/delete data to the SoR)
end
@decorate cache_evict(cache: Cache, all_entries: true)
def delete_all do
# your logic (maybe write/delete data to the SoR)
end
end
The **Write-through** pattern is supported by this decorator. Your function
provides the logic to write data to the system-of-record (SoR) and the rest
is provided by the decorator under-the-hood. But in contrast with `update`
decorator, when the data is written to the SoR, the key for that value is
deleted from cache instead of updated.
"""
def cache_evict(attrs, block, context) do
caching_action(:cache_evict, attrs, block, context)
end
## Private Functions
defp caching_action(action, attrs, block, context) do
cache = attrs[:cache] || raise ArgumentError, "expected cache: to be given as argument"
match_var = attrs[:match] || quote(do: fn _ -> true end)
opts_var = attrs[:opts] || []
keygen_block = keygen_block(attrs, context)
action_block = action_block(action, block, attrs, keygen_block)
quote do
cache = unquote(cache)
opts = unquote(opts_var)
match = unquote(match_var)
unquote(action_block)
end
end
defp keygen_block(attrs, ctx) do
args =
for arg <- ctx.args do
case arg do
{:\\, _, [var, _]} -> var
var -> var
end
end
cond do
key = Keyword.get(attrs, :key) ->
quote(do: unquote(key))
keygen = Keyword.get(attrs, :key_generator) ->
quote do
unquote(keygen).generate(unquote(ctx.module), unquote(ctx.name), unquote(args))
end
true ->
quote do
cache.__default_key_generator__().generate(
unquote(ctx.module),
unquote(ctx.name),
unquote(args)
)
end
end
end
defp action_block(:cacheable, block, _attrs, keygen) do
quote do
key = unquote(keygen)
case cache.get(key, opts) do
nil -> Caching.eval_match(unquote(block), match, cache, key, opts)
val -> val
end
end
end
defp action_block(:cache_put, block, _attrs, keygen) do
quote do
Caching.eval_match(unquote(block), match, cache, unquote(keygen), opts)
end
end
defp action_block(:cache_evict, block, attrs, keygen) do
before_invocation? = attrs[:before_invocation] || false
eviction = eviction_block(attrs, keygen)
if is_boolean(before_invocation?) && before_invocation? do
quote do
unquote(eviction)
unquote(block)
end
else
quote do
result = unquote(block)
unquote(eviction)
result
end
end
end
defp eviction_block(attrs, keygen) do
keys = Keyword.get(attrs, :keys)
all_entries? = attrs[:all_entries] || false
cond do
is_boolean(all_entries?) && all_entries? ->
quote(do: cache.delete_all())
is_list(keys) and length(keys) > 0 ->
delete_keys_block(keys)
true ->
quote(do: cache.delete(unquote(keygen)))
end
end
defp delete_keys_block(keys) do
quote do
Enum.each(unquote(keys), fn k -> if k, do: cache.delete(k) end)
end
end
@doc """
This function is for internal purposes.
**NOTE:** Workaround to avoid dialyzer warnings when using declarative
annotation-based caching via decorators.
"""
@spec eval_match(term, (term -> boolean | {true, term}), module, term, Keyword.t()) :: term
def eval_match(result, match, cache, key, opts) do
case match.(result) do
{true, value} ->
:ok = cache.put(key, value, opts)
result
true ->
:ok = cache.put(key, result, opts)
result
false ->
result
end
end
end
end
| 35.62309 | 130 | 0.642884 |
f743d20f3e048e5d81c5a1e4f200cc09f99f3147 | 1,154 | ex | Elixir | lib/excoin/extensions/list/ext.ex | johncosch/excoin | 2444d8ea4619013a5da5bb94f92c42e278bbdef4 | [
"MIT"
] | 1 | 2016-10-18T23:41:33.000Z | 2016-10-18T23:41:33.000Z | lib/excoin/extensions/list/ext.ex | johncosch/excoin | 2444d8ea4619013a5da5bb94f92c42e278bbdef4 | [
"MIT"
] | null | null | null | lib/excoin/extensions/list/ext.ex | johncosch/excoin | 2444d8ea4619013a5da5bb94f92c42e278bbdef4 | [
"MIT"
] | 1 | 2016-06-28T21:28:48.000Z | 2016-06-28T21:28:48.000Z | defmodule List.Ext do
@doc """
Shift takes a list as the first param and the number of records that should be shifted.
Returns a tuple with an list containing the values that were removed from the original list
and the new list after those values were removed.
"""
def shift(list, num_to_remove) when num_to_remove > length(list) do
{list, []}
end
def shift(list, num_to_remove \\ 1) when num_to_remove > 0 do
_shift(list, num_to_remove)
end
defp _shift(list, num_to_remove) do
{values, new_list} = _shift_value(list, [], num_to_remove)
end
defp _shift_value(list, values, 0), do: {values, list}
defp _shift_value(list, values, num_values_remaining) do
value = List.first(list)
list = List.delete_at(list, 0)
_shift_value(list, values ++ [value], num_values_remaining - 1)
end
@doc """
Pop takes a list and optionally a number of elements to be popped.
Returns a tuple of the values that were popped and a new list with the last n elements removed.
"""
def pop(list, n \\ 1) do
{values, new_list} = Enum.reverse(list) |> shift(n)
{values, Enum.reverse(new_list)}
end
end | 29.589744 | 97 | 0.697574 |
f743d5e45a410de7eb7985b6ae47d98143f43f2e | 203 | ex | Elixir | lib/phone/gl.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/gl.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/gl.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | defmodule Phone.GL do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(299)(..)(.{4})/
def country, do: "Greenland"
def a2, do: "GL"
def a3, do: "GRL"
matcher :regex, ["299"]
end
| 15.615385 | 37 | 0.596059 |
f743de91adcdfbacc7065c31eee60ff78996e1f1 | 907 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/application_gateway_firewall_rule_group.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/application_gateway_firewall_rule_group.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/application_gateway_firewall_rule_group.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Model.ApplicationGatewayFirewallRuleGroup do
@moduledoc """
A web application firewall rule group.
"""
@derive [Poison.Encoder]
defstruct [
:"ruleGroupName",
:"description",
:"rules"
]
@type t :: %__MODULE__{
:"ruleGroupName" => String.t,
:"description" => String.t,
:"rules" => [ApplicationGatewayFirewallRule]
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Network.Model.ApplicationGatewayFirewallRuleGroup do
import Microsoft.Azure.Management.Network.Deserializer
def decode(value, options) do
value
|> deserialize(:"rules", :list, Microsoft.Azure.Management.Network.Model.ApplicationGatewayFirewallRule, options)
end
end
| 28.34375 | 117 | 0.737596 |
f744033e2753f9f6553e47d88b275d7837347dd4 | 1,524 | exs | Elixir | test/features/admin_edits_post_test.exs | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | test/features/admin_edits_post_test.exs | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | test/features/admin_edits_post_test.exs | dlederle/tilex | 63520eeb013a2a18174de0b6e41e8b99977dbc20 | [
"MIT"
] | null | null | null | defmodule AdminEditsPostTest do
use Tilex.IntegrationCase, async: Application.get_env(:tilex, :async_feature_test)
alias Tilex.Integration.Pages.{
PostForm,
PostShowPage
}
test "fills out form and updates post from post show", %{session: session} do
Factory.insert!(:channel, name: "phoenix")
developer = Factory.insert!(:developer)
admin = Factory.insert!(:developer, %{admin: true})
post =
Factory.insert!(
:post,
title: "Awesome Post!",
developer: developer,
body: "This is how to be *awesome*!"
)
session
|> sign_in(admin)
|> PostForm.navigate(post)
|> PostForm.ensure_page_loaded()
|> PostForm.expect_title_preview("Awesome Post!")
|> PostForm.fill_in_title("Even Awesomer Post!")
|> PostForm.click_submit()
session
|> PostShowPage.ensure_page_loaded("Even Awesomer Post!")
end
test "enters a title that is too long", %{session: session} do
Factory.insert!(:channel, name: "phoenix")
developer = Factory.insert!(:developer)
post =
Factory.insert!(
:post,
title: "Limited characters",
developer: developer,
body: "Bugs revealed"
)
session
|> sign_in(developer)
|> PostForm.navigate(post)
|> PostForm.ensure_page_loaded()
|> PostForm.fill_in_title(String.duplicate("I can codez ", 10))
|> PostForm.click_submit()
session
|> PostForm.expect_form_has_error("Title should be at most 50 character(s)")
end
end
| 26.736842 | 84 | 0.64895 |
f744055877efd205b2fa3bf6aa0776740e0b7200 | 1,065 | ex | Elixir | lib/delta_sharing/profile.ex | instadeq/elixir-delta-sharing-client | 9d7b29f842f7310404746747a1299c705d6c0aa3 | [
"MIT"
] | 2 | 2021-12-18T19:29:41.000Z | 2021-12-18T20:27:10.000Z | lib/delta_sharing/profile.ex | instadeq/elixir-delta-sharing-client | 9d7b29f842f7310404746747a1299c705d6c0aa3 | [
"MIT"
] | null | null | null | lib/delta_sharing/profile.ex | instadeq/elixir-delta-sharing-client | 9d7b29f842f7310404746747a1299c705d6c0aa3 | [
"MIT"
] | null | null | null | defmodule DeltaSharing.Profile do
alias __MODULE__
@derive {Inspect, except: [:bearerToken]}
defstruct path: nil, shareCredentialsVersion: 1, endpoint: "", bearerToken: ""
def new(endpoint, bearer_token, path \\ nil, share_credentials_version \\ 1) do
%Profile{
path: path,
shareCredentialsVersion: share_credentials_version,
endpoint: endpoint,
bearerToken: bearer_token
}
end
def from_file(path) do
case File.read(path) do
{:ok, binary} ->
case Jason.decode(binary) do
{:ok, data} ->
case data do
%{
"shareCredentialsVersion" => share_credentials_version,
"endpoint" => endpoint,
"bearerToken" => bearer_token
} ->
new(endpoint, bearer_token, path, share_credentials_version)
_other ->
{:error, %{reason: :bad_profile_format, data: data}}
end
error ->
error
end
error ->
error
end
end
end
| 25.357143 | 81 | 0.56338 |
f7443709126b49a2c49ad11317ea6c0ff84596c3 | 1,393 | ex | Elixir | lib/bank_account_opening_web/endpoint.ex | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | lib/bank_account_opening_web/endpoint.ex | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | lib/bank_account_opening_web/endpoint.ex | viniciusd/bank-account-opening | a36e5c3c8f32e48bf42af958119aef85ff1eeeaa | [
"MIT"
] | null | null | null | defmodule BankAccountWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :bank_account_opening
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_bank_account_opening_key",
signing_salt: "Gd300dIt"
]
socket "/socket", BankAccountWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :bank_account_opening,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug BankAccountWeb.Router
end
| 29.020833 | 97 | 0.719311 |
f7443b11f9d56cb43bc624505fffb54d4adedc14 | 446 | exs | Elixir | test/test_helper.exs | livinginthepast/ecto_temp | 32f57e1aa4b736bdac624376267fdcfa20a3efa0 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | livinginthepast/ecto_temp | 32f57e1aa4b736bdac624376267fdcfa20a3efa0 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | livinginthepast/ecto_temp | 32f57e1aa4b736bdac624376267fdcfa20a3efa0 | [
"Apache-2.0"
] | null | null | null | Logger.configure(level: :info)
ExUnit.start()
alias EctoTemp.Test.Repo
Application.put_env(:ecto, Repo,
url: "ecto://postgres@localhost/ecto_temp_test",
pool: Ecto.Adapters.SQL.Sandbox
)
# Load up the repository, start it, and run migrations
_ = Ecto.Adapters.Postgres.storage_down(Repo.config())
Ecto.Adapters.Postgres.storage_up(Repo.config())
|> case do
:ok -> :ok
{:error, :already_up} -> :ok
end
{:ok, _pid} = Repo.start_link()
| 21.238095 | 54 | 0.719731 |
f74441a4c088c7b4304b1c1c85044f932f355e13 | 461 | exs | Elixir | test/places_alloverse_com_web/views/error_view_test.exs | alloverse/places_alloverse_com | 5c15ea08a88429378ce8f41047418e166cbde7e9 | [
"BSD-2-Clause"
] | null | null | null | test/places_alloverse_com_web/views/error_view_test.exs | alloverse/places_alloverse_com | 5c15ea08a88429378ce8f41047418e166cbde7e9 | [
"BSD-2-Clause"
] | 1 | 2021-03-10T11:40:47.000Z | 2021-03-10T11:40:47.000Z | test/places_alloverse_com_web/views/error_view_test.exs | alloverse/places_alloverse_com | 5c15ea08a88429378ce8f41047418e166cbde7e9 | [
"BSD-2-Clause"
] | null | null | null | defmodule PlacesAlloverseComWeb.ErrorViewTest do
use PlacesAlloverseComWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(PlacesAlloverseComWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(PlacesAlloverseComWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 30.733333 | 103 | 0.759219 |
f7444264521e2e97c769528f914db5177297300e | 574 | exs | Elixir | mix.exs | henrique-marcomini-movile/docimacy | 8571021102408b7526b47170cdbda261bcac6087 | [
"Apache-2.0"
] | null | null | null | mix.exs | henrique-marcomini-movile/docimacy | 8571021102408b7526b47170cdbda261bcac6087 | [
"Apache-2.0"
] | null | null | null | mix.exs | henrique-marcomini-movile/docimacy | 8571021102408b7526b47170cdbda261bcac6087 | [
"Apache-2.0"
] | null | null | null | defmodule Docimacy.MixProject do
use Mix.Project
def project do
[
app: :docimacy,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.793103 | 87 | 0.578397 |
f74448000cc8f8e94914267cd4068b218dedb73f | 502 | ex | Elixir | data/templates/elixir.ex | jjlorenzo/scriptisto | 0595ddd4281099e7c9bd74898d75ffbf6c860526 | [
"Apache-2.0"
] | 213 | 2019-10-24T23:56:40.000Z | 2022-03-23T13:15:55.000Z | data/templates/elixir.ex | jjlorenzo/scriptisto | 0595ddd4281099e7c9bd74898d75ffbf6c860526 | [
"Apache-2.0"
] | 33 | 2019-10-22T20:08:06.000Z | 2022-02-11T11:06:19.000Z | data/templates/elixir.ex | jjlorenzo/scriptisto | 0595ddd4281099e7c9bd74898d75ffbf6c860526 | [
"Apache-2.0"
] | 10 | 2019-10-25T15:38:56.000Z | 2022-01-28T13:03:44.000Z | #!/usr/bin/env scriptisto
# scriptisto-begin
# script_src: lib/script.ex
# build_cmd: MIX_ENV=prod mix escript.build
# files:
# - path: mix.exs
# content: |
# defmodule Script.MixProject do
# use Mix.Project
# def project do
# [
# app: :script,
# version: "0.1.0",
# elixir: "~> 1.8",
# escript: [main_module: Script.CLI],
# ]
# end
# end
# scriptisto-end
defmodule Script.CLI do
def main(_) do
IO.puts "Hello, Elixir!"
end
end
| 18.592593 | 44 | 0.577689 |
f74453c7a4cd122b4fe22f3dbc696f7bf3753a71 | 361 | exs | Elixir | test/hydra/transformer_test.exs | doomspork/hydra | 641b7c588c2b08ed144d05476118bb165a18a132 | [
"Apache-2.0"
] | 71 | 2016-01-18T11:30:59.000Z | 2021-02-15T04:15:31.000Z | test/hydra/transformer_test.exs | doomspork/hydra | 641b7c588c2b08ed144d05476118bb165a18a132 | [
"Apache-2.0"
] | 7 | 2016-01-21T22:31:42.000Z | 2018-04-06T17:03:59.000Z | test/hydra/transformer_test.exs | doomspork/hydra | 641b7c588c2b08ed144d05476118bb165a18a132 | [
"Apache-2.0"
] | 5 | 2016-06-13T15:19:27.000Z | 2019-12-06T23:07:52.000Z | defmodule Hydra.TransformerTest do
use ExUnit.Case, async: true
alias Hydra.Transformer
@json """
[{
"name": "Sean"
},
{
"name": "Jeff"
}]
"""
@filter "'.[0]'"
@expected_output """
{
"name": "Sean"
}
"""
test "transforms JSON" do
assert Transformer.transform(@json, @filter) == {:ok, @expected_output}
end
end
| 13.37037 | 75 | 0.562327 |
f7446c22c2d45f9babebf67b35b03813ccae27aa | 2,102 | exs | Elixir | extended_example/mix.exs | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | 19 | 2020-10-08T14:05:30.000Z | 2022-03-18T08:43:11.000Z | extended_example/mix.exs | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | null | null | null | extended_example/mix.exs | PJUllrich/event-sourcing-with-elixir | 7f70e6bc49d9d93f1d86513a1f358e41e07b8304 | [
"MIT"
] | 3 | 2021-02-19T08:31:58.000Z | 2021-12-09T05:28:55.000Z | defmodule Demo.MixProject do
use Mix.Project
def project do
[
app: :demo,
version: "0.1.0",
build_path: "./_build",
config_path: "./config/config.exs",
deps_path: "./deps",
lockfile: "mix.lock",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Demo.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.4"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:tzdata, "~> 1.0.1"},
{:phoenix_live_view, "~> 0.14.4"},
{:floki, ">= 0.0.0", only: :test},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:eventstore, "~> 1.1"},
{:phoenix_slime, "~> 0.13.1"},
{:faker, "~> 0.15"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: [
"deps.get",
"ecto.setup",
"cmd npm install --prefix assets"
],
"ecto.setup": ["ecto.create", "event_store.init", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "event_store.init", "test"]
]
end
end
| 26.948718 | 84 | 0.55471 |
f7446df72f5c03782a98ceb2410eea7eaef4deb4 | 2,493 | exs | Elixir | test/liblink/socket/recvmsg_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 3 | 2018-10-26T12:55:15.000Z | 2019-05-03T22:41:34.000Z | test/liblink/socket/recvmsg_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 4 | 2018-08-26T14:43:57.000Z | 2020-09-23T21:14:56.000Z | test/liblink/socket/recvmsg_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 (c) Xerpa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Liblink.Socket.RecvmsgTest do
use ExUnit.Case, async: true
alias Liblink.Socket.Device
alias Liblink.Socket.Recvmsg
@moduletag capture_log: true
setup do
{:ok, pid} = Recvmsg.start()
device = %Device{}
:ok = Recvmsg.attach(pid, device)
{:ok, [pid: pid]}
end
describe "recvmsg/poll" do
test "recvmsg when queue is empty", %{pid: pid} do
assert {:error, :empty} == Recvmsg.recvmsg(pid, :infinity)
end
test "recvmsg after receiving message", %{pid: pid} do
send(pid, {:liblink_message, ["foobar"]})
assert {:ok, ["foobar"]} = Recvmsg.recvmsg(pid, :infinity)
end
test "recvmsg consumes message from the queue", %{pid: pid} do
send(pid, {:liblink_message, ["foobar"]})
assert {:ok, _} = Recvmsg.recvmsg(pid, :infinity)
assert {:error, :empty} = Recvmsg.recvmsg(pid, :infinity)
end
test "poll notifies when a message arrives", %{pid: pid} do
assert {:ok, tag} = Recvmsg.poll(pid, :infinity)
send(pid, {:liblink_message, ["foobar"]})
assert_receive {^tag, :data}
end
test "poll notifies when timeout expires", %{pid: pid} do
assert {:ok, tag} = Recvmsg.poll(pid, 100)
assert_receive {^tag, :timeout}, :infinity
end
end
describe "consume" do
test "consume proxy all messages", %{pid: pid} do
assert :ok = Recvmsg.consume(pid, self(), :infinity)
send(pid, {:liblink_message, ["foobar"]})
assert_receive {Liblink.Socket, :data, ["foobar"]}
end
test "cancel subscription if proc dies", %{pid: pid} do
{consumer, ref} = spawn_monitor(fn -> :timer.sleep(100) end)
assert :ok == Recvmsg.consume(pid, consumer, :infinity)
assert_receive {:DOWN, ^ref, :process, _, _}, :infinity
# XXX: recv only works if the recvmsg halts the consumer
assert {:error, :empty} == Recvmsg.recvmsg(pid, :infinity)
end
end
end
| 32.802632 | 74 | 0.660249 |
f74474fa757712d7dd932c5e78986de2e1dd0ff3 | 5,739 | exs | Elixir | test/teslamate/vehicles/vehicle/updating_test.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 1 | 2021-11-03T02:08:43.000Z | 2021-11-03T02:08:43.000Z | test/teslamate/vehicles/vehicle/updating_test.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 171 | 2020-07-08T18:42:57.000Z | 2022-03-23T00:55:30.000Z | test/teslamate/vehicles/vehicle/updating_test.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 1 | 2021-03-26T15:46:37.000Z | 2021-03-26T15:46:37.000Z | defmodule TeslaMate.Vehicles.Vehicle.UpdatingTest do
use TeslaMate.VehicleCase, async: true
@tag :capture_log
test "logs an update cycle", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, update_event(now_ts - 1, "available", nil, update_version: "2019.8.5 3aaa23d")},
{:ok, update_event(now_ts, "installing", "2019.8.4 530d1d3")},
{:ok, update_event(now_ts + 1, "installing", "2019.8.4 530d1d3")},
{:ok, update_event(now_ts + 2, "installing", "2019.8.4 530d1d3")},
{:ok, %TeslaApi.Vehicle{state: "online", vehicle_state: nil}},
{:error, :vehicle_unavailable},
{:ok, %TeslaApi.Vehicle{state: "offline"}},
{:ok, update_event(now_ts + 5, "installing", "2019.8.4 530d1d3")},
{:ok, update_event(now_ts + 6, "", "2019.8.5 3aaa23d")},
fn -> Process.sleep(10_000) end
]
start_date = DateTime.from_unix!(now_ts, :millisecond)
end_date = DateTime.from_unix!(now_ts + 6, :millisecond)
:ok = start_vehicle(name, events, settings: %{use_streaming_api: false})
d0 = DateTime.from_unix!(now_ts - 1, :millisecond)
assert_receive {:start_state, car_id, :online, date: ^d0}, 600
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{
state: :online,
since: s0,
update_available: true,
update_version: "2019.8.5"
}}}
assert_receive {:start_update, ^car_id, [date: ^start_date]}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{state: :updating, since: s1, version: "2019.8.4"}}}
assert DateTime.diff(s0, s1, :nanosecond) < 0
assert_receive {:finish_update, _upate_id, "2019.8.5 3aaa23d", date: ^end_date}, 200
d1 = DateTime.from_unix!(now_ts + 6, :millisecond)
assert_receive {:start_state, ^car_id, :online, date: ^d1}
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{state: :online, since: s2, version: "2019.8.5"}}}
assert DateTime.diff(s1, s2, :nanosecond) < 0
refute_receive _
end
@tag :capture_log
test "logs an update if the status is not empty", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok, update_event(now_ts - 1, "available", nil, update_version: "2019.8.5 3aaa23d")},
{:ok, update_event(now_ts, "installing", "2019.8.4 530d1d3")},
{:ok, update_event(now_ts + 1, "foo", "2019.8.5 3aaa23d")},
fn -> Process.sleep(10_000) end
]
start_date = DateTime.from_unix!(now_ts, :millisecond)
end_date = DateTime.from_unix!(now_ts + 1, :millisecond)
:ok = start_vehicle(name, events, settings: %{use_streaming_api: false})
d0 = DateTime.from_unix!(now_ts - 1, :millisecond)
assert_receive {:start_state, car_id, :online, date: ^d0}, 600
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{
state: :online,
since: s0,
update_available: true,
update_version: "2019.8.5"
}}}
assert_receive {:start_update, ^car_id, date: ^start_date}
assert_receive {:pubsub, {:broadcast, _server, _topic, %Summary{state: :updating, since: s1}}}
assert DateTime.diff(s0, s1, :nanosecond) < 0
assert_receive {:finish_update, _upate_id, "2019.8.5 3aaa23d", date: ^end_date}, 200
d1 = DateTime.from_unix!(now_ts + 1, :millisecond)
assert_receive {:start_state, ^car_id, :online, date: ^d1}
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub, {:broadcast, _server, _topic, %Summary{state: :online, since: s2}}}
assert DateTime.diff(s1, s2, :nanosecond) < 0
refute_receive _
end
@tag :capture_log
test "cancels an update", %{test: name} do
now = DateTime.utc_now()
now_ts = DateTime.to_unix(now, :millisecond)
events = [
{:ok, online_event()},
{:ok,
update_event(now_ts, "installing", "2019.8.4 530d1d3", update_version: "2019.8.5 3aaa23d")},
{:ok,
update_event(now_ts + 10, "available", "2019.8.4 530d1d3",
update_version: "2019.8.5 3aaa23d"
)},
fn -> Process.sleep(10_000) end
]
:ok = start_vehicle(name, events, settings: %{use_streaming_api: false})
d0 = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_state, car_id, :online, date: ^d0}
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub, {:broadcast, _server, _topic, %Summary{state: :online}}}
date = DateTime.from_unix!(now_ts, :millisecond)
assert_receive {:start_update, ^car_id, date: ^date}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{state: :updating, version: "2019.8.4", update_version: "2019.8.5"}}}
assert_receive {:cancel_update, _upate_id}, 200
d1 = DateTime.from_unix!(now_ts + 10, :millisecond)
assert_receive {:start_state, ^car_id, :online, date: ^d1}, 600
assert_receive {:insert_position, ^car_id, %{}}
assert_receive {:pubsub,
{:broadcast, _server, _topic,
%Summary{state: :online, version: "2019.8.4", update_version: "2019.8.5"}}}
refute_receive _
end
end
| 38.006623 | 99 | 0.607597 |
f7448ac96b10fd90155da8ae64926d20ab15557e | 2,121 | exs | Elixir | config/dev.exs | gildesmarais/website-1 | 7a19bd98d06a064e52fa279e226002e9c3b986f0 | [
"MIT"
] | null | null | null | config/dev.exs | gildesmarais/website-1 | 7a19bd98d06a064e52fa279e226002e9c3b986f0 | [
"MIT"
] | null | null | null | config/dev.exs | gildesmarais/website-1 | 7a19bd98d06a064e52fa279e226002e9c3b986f0 | [
"MIT"
] | null | null | null | import Config
# Configure your database
config :hierbautberlin, Hierbautberlin.Repo,
username: "postgres",
password: "postgres",
database: "hierbautberlin_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :hierbautberlin, HierbautberlinWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
yarn: [
"run",
"watch",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :hierbautberlin, HierbautberlinWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/hierbautberlin_web/(live|views)/.*(ex)$",
~r"lib/hierbautberlin_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.28 | 68 | 0.702499 |
f7448d4913ad9d0494c9656be7916ef7360c597c | 5,902 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1beta2/model/instance.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1beta2/model/instance.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/memcache/lib/google_api/memcache/v1beta2/model/instance.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1beta2.Model.Instance do
@moduledoc """
## Attributes
* `authorizedNetwork` (*type:* `String.t`, *default:* `nil`) - The full name of the Google Compute Engine [network](https://cloud.google.com/vpc/docs/vpc) to which the instance is connected. If left unspecified, the `default` network will be used.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the instance was created.
* `discoveryEndpoint` (*type:* `String.t`, *default:* `nil`) - Output only. Endpoint for Discovery API
* `displayName` (*type:* `String.t`, *default:* `nil`) - User provided name for the instance only used for display purposes. Cannot be more than 80 characters.
* `instanceMessages` (*type:* `list(GoogleApi.Memcache.V1beta2.Model.InstanceMessage.t)`, *default:* `nil`) - List of messages that describe current statuses of memcached instance.
* `labels` (*type:* `map()`, *default:* `nil`) - Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. https://cloud.google.com/compute/docs/labeling-resources
* `memcacheFullVersion` (*type:* `String.t`, *default:* `nil`) - Output only. The full version of memcached server running on this instance. System automatically determines the full memcached version for an instance based on the input MemcacheVersion. The full version format will be "memcached-1.5.16".
* `memcacheNodes` (*type:* `list(GoogleApi.Memcache.V1beta2.Model.Node.t)`, *default:* `nil`) - Output only. List of Memcached nodes. Refer to [Node] message for more details.
* `memcacheVersion` (*type:* `String.t`, *default:* `nil`) - The major version of Memcached software. If not provided, latest supported version will be used. Currently the latest supported major version is MEMCACHE_1_5. The minor version will be automatically determined by our system based on the latest supported minor version.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. Unique name of the resource in this scope including project and location using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` Note: Memcached instances are managed and addressed at regional level so location_id here refers to a GCP region; however, users may choose which zones Memcached nodes within an instances should be provisioned in. Refer to [zones] field for more details.
* `nodeConfig` (*type:* `GoogleApi.Memcache.V1beta2.Model.NodeConfig.t`, *default:* `nil`) - Required. Configuration for Memcached nodes.
* `nodeCount` (*type:* `integer()`, *default:* `nil`) - Required. Number of nodes in the Memcached instance.
* `parameters` (*type:* `GoogleApi.Memcache.V1beta2.Model.MemcacheParameters.t`, *default:* `nil`) - Optional: User defined parameters to apply to the memcached process on each node.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. The state of this Memcached instance.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the instance was updated.
* `zones` (*type:* `list(String.t)`, *default:* `nil`) - Zones where Memcached nodes should be provisioned in. Memcached nodes will be equally distributed across these zones. If not provided, the service will by default create nodes in all zones in the region for the instance.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authorizedNetwork => String.t(),
:createTime => DateTime.t(),
:discoveryEndpoint => String.t(),
:displayName => String.t(),
:instanceMessages => list(GoogleApi.Memcache.V1beta2.Model.InstanceMessage.t()),
:labels => map(),
:memcacheFullVersion => String.t(),
:memcacheNodes => list(GoogleApi.Memcache.V1beta2.Model.Node.t()),
:memcacheVersion => String.t(),
:name => String.t(),
:nodeConfig => GoogleApi.Memcache.V1beta2.Model.NodeConfig.t(),
:nodeCount => integer(),
:parameters => GoogleApi.Memcache.V1beta2.Model.MemcacheParameters.t(),
:state => String.t(),
:updateTime => DateTime.t(),
:zones => list(String.t())
}
field(:authorizedNetwork)
field(:createTime, as: DateTime)
field(:discoveryEndpoint)
field(:displayName)
field(:instanceMessages, as: GoogleApi.Memcache.V1beta2.Model.InstanceMessage, type: :list)
field(:labels, type: :map)
field(:memcacheFullVersion)
field(:memcacheNodes, as: GoogleApi.Memcache.V1beta2.Model.Node, type: :list)
field(:memcacheVersion)
field(:name)
field(:nodeConfig, as: GoogleApi.Memcache.V1beta2.Model.NodeConfig)
field(:nodeCount)
field(:parameters, as: GoogleApi.Memcache.V1beta2.Model.MemcacheParameters)
field(:state)
field(:updateTime, as: DateTime)
field(:zones, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1beta2.Model.Instance do
def decode(value, options) do
GoogleApi.Memcache.V1beta2.Model.Instance.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1beta2.Model.Instance do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 64.152174 | 479 | 0.71569 |
f74495ef8dd4dcd4b8b5a260eb661646c98c3c08 | 910 | ex | Elixir | lib/mock_watcher.ex | mpalmer/elixir_mock | ee939960217f89a2387a294cdac6cd755c8a731c | [
"MIT"
] | 25 | 2017-07-17T03:21:58.000Z | 2022-02-12T06:30:33.000Z | lib/mock_watcher.ex | mpalmer/elixir_mock | ee939960217f89a2387a294cdac6cd755c8a731c | [
"MIT"
] | 7 | 2017-04-04T02:10:42.000Z | 2017-06-18T00:21:56.000Z | lib/mock_watcher.ex | wanderanimrod/mockex | ee939960217f89a2387a294cdac6cd755c8a731c | [
"MIT"
] | 2 | 2017-10-16T16:00:37.000Z | 2021-10-09T05:45:04.000Z | defmodule MockWatcher do
@moduledoc false
use GenServer
def start_link(mock_name) do
watcher_name = get_watcher_name_for(mock_name)
GenServer.start_link(__MODULE__, %{calls: []}, name: watcher_name)
end
def init(init_arg) do
{:ok, init_arg}
end
def handle_call({:record_call, fn_name, args}, _from, state) do
calls = state.calls ++ [{fn_name, args}]
{:reply, :ok, %{state | calls: calls}}
end
def handle_call({:call_exists, fn_name, args}, _from, state) do
call_exists = ElixirMock.Matchers.find_call({fn_name, args}, state.calls)
{:reply, {call_exists, state.calls}, state}
end
def handle_call(:clear_calls, _from, state) do
{:reply, :ok, %{state | calls: []}}
end
def handle_call(:list_calls, _from, state) do
{:reply, state.calls, state}
end
def get_watcher_name_for(mock_name) do
:"__elixir_mock__watcher_#{mock_name}"
end
end | 25.277778 | 77 | 0.685714 |
f744a6b890d3e3b361ae8eece5bb07dad8115d67 | 2,994 | ex | Elixir | lib/glimesh_web/emails/email.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | lib/glimesh_web/emails/email.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | lib/glimesh_web/emails/email.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.Emails.Email do
use Bamboo.Phoenix, view: GlimeshWeb.EmailView
alias Glimesh.Accounts.User
alias Glimesh.Streams.Channel
alias Glimesh.Streams.Stream
alias GlimeshWeb.Router.Helpers, as: Routes
import Bamboo.Email
def user_base_email do
new_email()
|> put_html_layout({GlimeshWeb.LayoutView, "email.html"})
|> put_text_layout({GlimeshWeb.LayoutView, "email.text"})
|> from("Glimesh <[email protected]>")
end
def user_sub_button_enabled(user, url) do
user_base_email()
|> to(user.email)
|> subject("Your Glimesh Sub & Donation buttons are LIVE!")
|> assign(:user, user)
|> assign(:url, url)
|> render(:sub_button)
end
def user_launch_info(user) do
user_base_email()
|> to(user.email)
|> subject("Glimesh Alpha Launch News & Account Updates!")
|> assign(:user, user)
|> render(:launch)
end
def user_confirmation_instructions(user, url) do
user_base_email()
|> to(user.email)
|> subject("Confirm your email with Glimesh!")
|> assign(:user, user)
|> assign(:url, url)
|> render(:user_confirmation)
end
def user_reset_password_instructions(user, url) do
user_base_email()
|> to(user.email)
|> subject("Reset your password on Glimesh!")
|> assign(:user, user)
|> assign(:url, url)
|> render(:user_reset_password)
end
def user_update_email_instructions(user, url) do
user_base_email()
|> to(user.email)
|> subject("Change your email on Glimesh!")
|> assign(:user, user)
|> assign(:url, url)
|> render(:user_update_email)
end
def user_report_alert(admin, reporting_user, reported_user, reason, location, notes) do
user_base_email()
|> to(admin.email)
|> put_header("Reply-To", reporting_user.email)
|> subject("User Alert Report for #{reported_user.displayname}!")
|> text_body("""
==============================
Hi #{admin.displayname},
A new user alert has come in!
Reported User:
Username: #{reported_user.username}
Reason: #{reason}
Location: #{location}
Notes: #{notes}
Reported By:
Username: #{reporting_user.username}
==============================
""")
end
def channel_live(%User{} = user, %User{} = streamer, %Channel{} = channel, %Stream{} = stream) do
user_base_email()
|> to(user.email)
|> subject("#{streamer.displayname} is live on Glimesh!")
|> assign(:user, user)
|> assign(:stream_thumbnail, Glimesh.StreamThumbnail.url({stream.thumbnail, stream}))
|> assign(:stream_title, channel.title)
|> assign(
:stream_link,
Routes.user_stream_url(GlimeshWeb.Endpoint, :index, streamer.username,
utm_source: "follow_alert",
utm_medium: "email"
)
)
|> assign(:unsubscribe_link, Routes.user_settings_url(GlimeshWeb.Endpoint, :notifications))
|> assign(:streamer_name, streamer.displayname)
|> render(:channel_live)
end
end
| 28.245283 | 99 | 0.644623 |
f744bab4106f89d95bfe0c770df1918b858390ac | 1,105 | ex | Elixir | lib/mix/tasks/h3_geojson_to_db.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 32 | 2021-04-22T01:55:31.000Z | 2022-02-25T13:17:21.000Z | lib/mix/tasks/h3_geojson_to_db.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 58 | 2021-06-04T18:42:59.000Z | 2022-03-31T07:17:01.000Z | lib/mix/tasks/h3_geojson_to_db.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 13 | 2021-04-10T06:09:15.000Z | 2022-03-23T13:07:37.000Z | defmodule Mix.Tasks.H3GeojsonToDb do
use Mix.Task
@shortdoc "Sync Mappers Opt In State on Console with Database"
def run(_) do
# start our application
Mix.Task.run("app.start")
features = getFeatures()
# Enum.each(features.geometries, &IO.inspect/1)
# Kernel.inspect(:h3.from_geo(Enum.at(Enum.at(feature.coordinates, 0), 0), 9))
Enum.each(features.geometries, fn feature ->
geom_v =
%{}
|> Map.put(:id, UUID.uuid4())
|> Map.put(:state, "unmapped")
|> Map.put(:avg_rssi, Kernel.round(feature.properties["rssi"]))
|> Map.put(:geom, %Geo.Polygon{coordinates: feature.coordinates, srid: 4326})
%Mappers.H3.Res9{}
|> Mappers.H3.Res9.changeset(geom_v)
|> Mappers.Repo.insert()
|> case do
{:ok, _} -> IO.puts("Insert Successful")
{:error, changeset} -> IO.puts("Insert Error #{changeset}")
end
end)
end
def getFeatures() do
"../../../coverage_uplinks_h3_9.geojson"
|> Path.expand(__DIR__)
|> File.read!()
|> Jason.decode!()
|> Geo.JSON.decode!()
end
end
| 27.625 | 85 | 0.602715 |
f744bd20c1fc411c431ff6c91d09d734b6f8e8a1 | 155 | exs | Elixir | .formatter.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 30 | 2021-02-05T18:50:38.000Z | 2022-03-12T22:42:29.000Z | .formatter.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 19 | 2021-01-15T19:14:24.000Z | 2022-02-05T14:57:18.000Z | .formatter.exs | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | 17 | 2021-02-01T20:57:51.000Z | 2022-03-20T17:06:57.000Z | [
import_deps: [:phoenix, :surface, :ecto],
inputs: [
"{mix,.formatter}.exs",
"{config,lib,test}/**/*.{ex,exs}",
"priv/catalogue/**"
]
]
| 17.222222 | 43 | 0.522581 |
f744fbc332b054bef6807924c44a90b9eec82abf | 210 | exs | Elixir | test/phoenix_hello_web/controllers/page_controller_test.exs | haojiwu/phoenix_hello | aee3d40a018e73b24767de1be5692a9cd9cf2017 | [
"MIT"
] | 6 | 2019-06-11T22:16:47.000Z | 2021-05-19T10:32:58.000Z | test/phoenix_hello_web/controllers/page_controller_test.exs | haojiwu/phoenix_hello | aee3d40a018e73b24767de1be5692a9cd9cf2017 | [
"MIT"
] | 2 | 2021-08-22T19:41:42.000Z | 2021-12-16T15:26:14.000Z | test/phoenix_hello_web/controllers/page_controller_test.exs | haojiwu/phoenix_hello | aee3d40a018e73b24767de1be5692a9cd9cf2017 | [
"MIT"
] | 16 | 2019-06-12T19:39:41.000Z | 2022-03-26T07:34:44.000Z | defmodule PhoenixHelloWeb.PageControllerTest do
use PhoenixHelloWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23.333333 | 60 | 0.695238 |
f7450363899dc478873d73830d407553a255a02d | 8,719 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/token.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/lib/ewallet_db/token.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/lib/ewallet_db/token.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Token do
@moduledoc """
Ecto Schema representing tokens.
"""
use Ecto.Schema
use Arc.Ecto.Schema
use Utils.Types.ExternalID
use ActivityLogger.ActivityLogging
import Ecto.{Changeset, Query}
import EWalletDB.Helpers.Preloader
import EWalletDB.Validator
alias Ecto.UUID
alias EWalletDB.{Account, Repo, Token}
alias ExULID.ULID
@primary_key {:uuid, UUID, autogenerate: true}
@timestamps_opts [type: :naive_datetime_usec]
schema "token" do
# tok_eur_01cbebcdjprhpbzp1pt7h0nzvt
field(:id, :string)
# "eur"
field(:symbol, :string)
# "EUR"
field(:iso_code, :string)
# "Euro"
field(:name, :string)
# Official currency of the European Union
field(:description, :string)
# "€"
field(:short_symbol, :string)
# "Cent"
field(:subunit, :string)
# 100
field(:subunit_to_unit, Utils.Types.Integer)
# true
field(:symbol_first, :boolean)
# "€"
field(:html_entity, :string)
# "978"
field(:iso_numeric, :string)
# 1
field(:smallest_denomination, :integer)
# false
field(:locked, :boolean)
field(:avatar, EWalletDB.Uploaders.Avatar.Type)
field(:metadata, :map, default: %{})
field(:encrypted_metadata, EWalletDB.Encrypted.Map, default: %{})
field(:enabled, :boolean)
field(:blockchain_address, :string)
belongs_to(
:account,
Account,
foreign_key: :account_uuid,
references: :uuid,
type: UUID
)
timestamps()
activity_logging()
end
defp changeset(%Token{} = token, attrs) do
token
|> cast_and_validate_required_for_activity_log(
attrs,
cast: [
:symbol,
:iso_code,
:name,
:description,
:short_symbol,
:subunit,
:subunit_to_unit,
:symbol_first,
:html_entity,
:iso_numeric,
:smallest_denomination,
:locked,
:account_uuid,
:blockchain_address,
:metadata,
:encrypted_metadata
],
required: [
:symbol,
:name,
:subunit_to_unit,
:account_uuid
],
encrypted: [:encrypted_metadata]
)
|> validate_number(
:subunit_to_unit,
greater_than: 0,
less_than_or_equal_to: 1_000_000_000_000_000_000
)
|> validate_immutable(:symbol)
|> unique_constraint(:symbol)
|> unique_constraint(:iso_code)
|> unique_constraint(:name)
|> unique_constraint(:short_symbol)
|> unique_constraint(:iso_numeric)
|> unique_constraint(:blockchain_address)
|> validate_length(:symbol, count: :bytes, max: 255)
|> validate_length(:iso_code, count: :bytes, max: 255)
|> validate_length(:name, count: :bytes, max: 255)
|> validate_length(:description, count: :bytes, max: 255)
|> validate_length(:short_symbol, count: :bytes, max: 255)
|> validate_length(:subunit, count: :bytes, max: 255)
|> validate_length(:html_entity, count: :bytes, max: 255)
|> validate_length(:iso_numeric, count: :bytes, max: 255)
|> validate_length(:blockchain_address, count: :bytes, max: 255)
|> foreign_key_constraint(:account_uuid)
|> assoc_constraint(:account)
|> set_id(prefix: "tok_")
end
defp update_changeset(%Token{} = token, attrs) do
token
|> cast_and_validate_required_for_activity_log(
attrs,
cast: [
:iso_code,
:name,
:description,
:short_symbol,
:symbol_first,
:html_entity,
:iso_numeric,
:blockchain_address,
:metadata,
:encrypted_metadata
],
required: [
:name
],
encrypted: [:encrypted_metadata]
)
|> validate_length(:iso_code, count: :bytes, max: 255)
|> validate_length(:name, count: :bytes, max: 255)
|> validate_length(:description, count: :bytes, max: 255)
|> validate_length(:short_symbol, count: :bytes, max: 255)
|> validate_length(:html_entity, count: :bytes, max: 255)
|> validate_length(:iso_numeric, count: :bytes, max: 255)
|> validate_length(:blockchain_address, count: :bytes, max: 255)
|> unique_constraint(:blockchain_address)
|> unique_constraint(:iso_code)
|> unique_constraint(:name)
|> unique_constraint(:short_symbol)
|> unique_constraint(:iso_numeric)
end
defp enable_changeset(%Token{} = token, attrs) do
token
|> cast_and_validate_required_for_activity_log(attrs, cast: [:enabled], required: [:enabled])
end
defp set_id(changeset, opts) do
case get_field(changeset, :id) do
nil ->
symbol = get_field(changeset, :symbol)
ulid = ULID.generate() |> String.downcase()
put_change(changeset, :id, build_id(symbol, ulid, opts))
_ ->
changeset
end
end
defp build_id(symbol, ulid, opts) do
case opts[:prefix] do
nil ->
"#{symbol}_#{ulid}"
prefix ->
"#{prefix}#{symbol}_#{ulid}"
end
end
@doc """
Returns all tokens in the system
"""
def all do
Repo.all(Token)
end
@doc """
Returns a query of Tokens that have a blockchain address
"""
@spec query_all_blockchain(Ecto.Queryable.t()) :: [%Token{}]
def query_all_blockchain(query \\ Token) do
where(query, [t], not is_nil(t.blockchain_address))
end
@doc """
Returns a query of Tokens that have an address matching in the provided list
"""
@spec query_all_by_blockchain_addresses([String.t()], Ecto.Queryable.t()) :: [
Ecto.Queryable.t()
]
def query_all_by_blockchain_addresses(addresses, query \\ Token) do
where(query, [t], t.blockchain_address in ^addresses)
end
@doc """
Returns a query of Tokens that have an id matching in the provided list
"""
@spec query_all_by_ids([String.t()], Ecto.Queryable.t()) :: [Ecto.Queryable.t()]
def query_all_by_ids(ids, query \\ Token) do
where(query, [t], t.id in ^ids)
end
@spec avatar_changeset(Ecto.Changeset.t() | %Token{}, map()) ::
Ecto.Changeset.t() | %Token{} | no_return()
defp avatar_changeset(changeset, attrs) do
changeset
|> cast_and_validate_required_for_activity_log(attrs)
|> cast_attachments(attrs, [:avatar])
end
@doc """
Stores an avatar for the given token.
"""
@spec store_avatar(%Token{}, map()) :: %Token{} | nil | no_return()
def store_avatar(%Token{} = token, %{"originator" => originator} = attrs) do
attrs =
attrs["avatar"]
|> case do
"" -> %{avatar: nil}
"null" -> %{avatar: nil}
avatar -> %{avatar: avatar}
end
|> Map.put(:originator, originator)
changeset = avatar_changeset(token, attrs)
case Repo.update_record_with_activity_log(changeset) do
{:ok, token} -> get(token.id)
result -> result
end
end
@doc """
Create a new token with the passed attributes.
"""
def insert(attrs) do
changeset = changeset(%Token{}, attrs)
case Repo.insert_record_with_activity_log(changeset) do
{:ok, token} ->
{:ok, get(token.id)}
{:error, changeset} ->
{:error, changeset}
end
end
@doc """
Update an existing token with the passed attributes.
"""
def update(token, attrs) do
token
|> update_changeset(attrs)
|> Repo.update_record_with_activity_log()
end
@doc """
Retrieve a token by id.
"""
@spec get_by(String.t(), opts :: keyword()) :: %Token{} | nil
def get(id, opts \\ [])
def get(nil, _), do: nil
def get(id, opts) do
get_by([id: id], opts)
end
@doc """
Retrieves a token using one or more fields.
"""
@spec get_by(fields :: map() | keyword(), opts :: keyword()) :: %Token{} | nil
def get_by(fields, opts \\ []) do
Token
|> Repo.get_by(fields)
|> preload_option(opts)
end
@doc """
Retrieve a list of tokens by supplying a list of IDs.
"""
def get_all(ids) do
Repo.all(from(m in Token, where: m.id in ^ids))
end
@doc """
Enables or disables a token.
"""
def enable_or_disable(token, attrs) do
token
|> enable_changeset(attrs)
|> Repo.update_record_with_activity_log()
end
end
| 27.07764 | 97 | 0.635279 |
f74517fc0cabf4d35aa35f934abf2e78d148fadd | 724 | ex | Elixir | lib/crawly/pipeline.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | 1 | 2020-10-02T18:33:23.000Z | 2020-10-02T18:33:23.000Z | lib/crawly/pipeline.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | lib/crawly/pipeline.ex | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | defmodule Crawly.Pipeline do
@moduledoc """
A behavior module for implementing a pipeline module
A pipeline is a module which takes a given item, and executes a
run callback on a given item.
A state argument is used to share common information accros multiple
items.
An `opts` argument is used to pass configuration to the pipeline through tuple-based declarations.
"""
@callback run(item :: map, state :: map()) ::
{new_item :: map, new_state :: map}
| {false, new_state :: map}
@callback run(item :: map, state :: map(), args :: list(any())) ::
{new_item :: map, new_state :: map}
| {false, new_state :: map}
@optional_callbacks run: 3
end
| 32.909091 | 100 | 0.640884 |
f74525371b5b8ab536023ea47410bcfb7b32dd94 | 984 | ex | Elixir | 2017/elixir/day20/lib/viz.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day20/lib/viz.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day20/lib/viz.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | defmodule Viz do
@regex ~r/p=<(-?\d+),(-?\d+),(-?\d+)>, v=<(-?\d+),(-?\d+),(-?\d+)>, a=<(-?\d+),(-?\d+),(-?\d+)>/
def run(filename, n) do
particules =
filename
|> File.stream!([:utf8], :line)
|> Enum.map(&parse_line/1)
out(particules, n)
end
def parse_line(line) do
Regex.run(@regex, line, capture: :all_but_first)
|> Enum.map(&String.to_integer/1)
end
def out(_particules, 0), do: :ok
def out(particules, n) do
print(particules)
particules =
particules
|> Enum.map(fn [px, py, pz, vx, vy, vz, ax, ay, az] ->
[px + vx + ax,
py + vy + ay,
pz + vz + az,
vx + ax,
vy + ay,
vz + az,
ax,
ay,
az]
end)
out(particules, n - 1)
end
def print(particules) do
particules
|> Enum.reduce("", fn [px, py| _], acc ->
acc <> " " <> Integer.to_string(px) <> " " <> Integer.to_string(py)
end)
|> IO.puts
end
end
| 21.866667 | 98 | 0.474593 |
f7452a89c116b4e945e41511cfff41fd3f4cf544 | 470 | ex | Elixir | lib/salty/stream.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 23 | 2017-07-04T19:29:43.000Z | 2021-02-16T19:44:38.000Z | lib/salty/stream.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 16 | 2017-08-13T15:31:25.000Z | 2019-06-19T14:44:13.000Z | lib/salty/stream.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 19 | 2017-08-10T19:01:49.000Z | 2021-06-20T01:34:59.000Z | defmodule Salty.Stream do
defmacro __using__(_opts) do
quote do
@behaviour Salty.Stream
alias Salty.Nif, as: C
end
end
def primitive do
Salty.Stream.Xsalsa20
end
@callback noncebytes() :: non_neg_integer()
@callback keybytes() :: non_neg_integer()
@callback stream(pos_integer(), binary(), binary()) :: {:ok, binary()} | {:error, atom()}
@callback xor(binary(), binary(), binary()) :: {:ok, binary()} | {:error, atom()}
end
| 21.363636 | 91 | 0.634043 |
f74532a457306a4cd8e5a91dbddcf25a602091ac | 421 | ex | Elixir | lib/hl7/2.5/segments/aff.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/aff.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/aff.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5.Segments.AFF do
@moduledoc false
require Logger
alias HL7.V2_5.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
set_id_aff: nil,
professional_organization: DataTypes.Xon,
professional_organization_address: DataTypes.Xad,
professional_organization_affiliation_date_range: DataTypes.Dr,
professional_affiliation_additional_information: nil
]
end
| 24.764706 | 69 | 0.743468 |
f7458d73fff756632c1987a1a9a0fb926ea825ec | 2,654 | ex | Elixir | elixir/epi_book/lib/chapter_9/binary_search_tree.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | elixir/epi_book/lib/chapter_9/binary_search_tree.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | elixir/epi_book/lib/chapter_9/binary_search_tree.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | defmodule Chapter9.BinarySearchTree do
defstruct [:value, :left, :right]
alias Chapter9.BinarySearchTree, as: BST
@type t :: %BST{value: number, left: BST.t, right: BST.t} | :empty_node
@spec new() :: :empty_node
def new(), do: :empty_node
@spec insert(BST.t, number) :: BST.t
def insert(:empty_node, value), do: %BST{value: value, left: :empty_node, right: :empty_node}
def insert(%BST{value: v, left: l, right: r}, node) do
cond do
v == node -> %BST{value: v, left: l, right: r}
v < node -> %BST{value: v, left: l, right: insert(r, node)}
v > node -> %BST{value: v, left: insert(l, node), right: r}
end
end
@spec left(BST.t):: BST.t
def left(%BST{left: left}) do
left
end
@spec right(BST.t):: BST.t
def right(%BST{right: right}) do
right
end
@spec contains?(BST.t, number) :: boolean
def contains?(:empty_node, _), do: false
def contains?(%BST{value: value, left: left, right: right}, number) do
cond do
value == number -> true
number < value -> contains?(left, number)
number > value -> contains?(right, number)
end
end
@spec in_order_traversal(BST.t) :: [number]
def in_order_traversal(tree), do: _in_order_traversal(tree, [])
@spec _in_order_traversal(BST.t, [number]) :: [number]
defp _in_order_traversal(:empty_node, _), do: []
defp _in_order_traversal(%BST{value: value, left: left, right: right}, acc) do
l = _in_order_traversal(left, acc)
r = _in_order_traversal(right, acc)
l ++ [value] ++ r
end
@spec min(BST.t) :: number | nil
def min(:empty_node), do: nil
def min(%BST{value: value, left: :empty_node}), do: value
def min(%BST{left: left}), do: min(left)
@spec max(BST.t) :: number | nil
def max(:empty_node), do: nil
def max(%BST{value: value, right: :empty_node}), do: value
def max(%BST{right: right}), do: max(right)
@spec lowest_common_ancestor(BST.t, number, number) :: number
def lowest_common_ancestor(tree, a, b) when a < b, do: lca(tree, a, b)
def lowest_common_ancestor(tree, a, b) when a > b, do: lca(tree, b, a)
@spec lca(BST.t, number, number) :: number
defp lca(%BST{value: value}, smaller, larger) when smaller < value and larger > value do
value
end
defp lca(%BST{value: value, left: left}, smaller, larger) when smaller < value and larger < value do
lca(left, smaller, larger)
end
defp lca(%BST{value: value, right: right}, smaller, larger) when smaller > value and larger > value do
lca(right, smaller, larger)
end
defp lca(%BST{value: value}, smaller, larger) when smaller == value or larger == value do
value
end
end
| 30.505747 | 104 | 0.64318 |
f7458fad81fd8d89db589d6f282393c1fb057e37 | 282 | exs | Elixir | test/price_register_web/views/layout_view_test.exs | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | 4 | 2021-03-09T19:35:37.000Z | 2022-03-20T23:31:19.000Z | test/price_register_web/views/layout_view_test.exs | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | null | null | null | test/price_register_web/views/layout_view_test.exs | civictech-ie/price-register | 9112c183722b76c09e98063ec2829dc2d0b55113 | [
"Apache-2.0"
] | null | null | null | defmodule PriceRegisterWeb.LayoutViewTest do
use PriceRegisterWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 31.333333 | 65 | 0.776596 |
f745b55cab6cf36f57864cbcd9df387320415b10 | 1,685 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_return_line_item_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/orders_return_line_item_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_return_line_item_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersReturnLineItemResponse do
@moduledoc """
## Attributes
* `executionStatus` (*type:* `String.t`, *default:* `nil`) - The status of the execution.
* `kind` (*type:* `String.t`, *default:* `content#ordersReturnLineItemResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#ordersReturnLineItemResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:executionStatus => String.t(),
:kind => String.t()
}
field(:executionStatus)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersReturnLineItemResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersReturnLineItemResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersReturnLineItemResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.7 | 192 | 0.738872 |
f745bf3a0a8fd6872712388c2af382d87bac5d89 | 2,985 | ex | Elixir | test/support/mocker_server.ex | kintu-games/elidactyl | 2d95694ef4a85c72e962379d8d12fc08bd8352ac | [
"MIT"
] | null | null | null | test/support/mocker_server.ex | kintu-games/elidactyl | 2d95694ef4a85c72e962379d8d12fc08bd8352ac | [
"MIT"
] | null | null | null | test/support/mocker_server.ex | kintu-games/elidactyl | 2d95694ef4a85c72e962379d8d12fc08bd8352ac | [
"MIT"
] | null | null | null | defmodule Elidactyl.MockedServer do
@moduledoc false
alias Elidactyl.MockedServer.Router
alias Elidactyl.MockedServer.Factory
alias Elidactyl.MockedServer.ExternalSchema.List, as: ExternalList
alias Elidactyl.MockedServer.ExternalSchema.NullResource
@objs ~w[server database nest egg egg_variable user server_subuser node node_created_response allocation stats]a
@type record :: %{
required(:object) => binary,
required(:attributes) => map
}
@spec start :: {:ok, pid} | {:error, any}
def start do
Plug.Cowboy.http(Router, [], port: 8081)
end
@spec stop :: :ok | {:error, any}
def stop do
Plug.Cowboy.shutdown(Router.HTTP)
end
@spec get(Factory.obj(), any) :: struct | NullResource
def get(obj, id) when obj in @objs do
storage_get({obj, id}, %NullResource{})
end
@spec list(Factory.obj()) :: ExternalList.t()
def list(obj) when obj in @objs do
data = {obj, :ids} |> storage_get([]) |> Enum.map(&storage_get({obj, &1}))
%ExternalList{data: data}
end
@spec put(Factory.obj()) :: struct
@spec put(Factory.obj(), Factory.attributes()) :: struct
def put(obj, attributes \\ %{}) when obj in @objs do
%{attributes: %{id: id}} = record = Factory.build(obj, attributes)
storage_put({obj, id}, record)
storage_update({obj, :ids}, [], &Enum.uniq([id | &1]))
record
end
@spec add_relationship(record, atom, any) :: record
def add_relationship(%{attributes: attributes} = record, key, value) do
rels = attributes |> Map.get(:relationships, %{}) |> Map.put(key, value)
%{record | attributes: Map.put(attributes, :relationships, rels)}
end
@spec delete(Factory.obj(), any) :: :ok
def delete(obj, id) when obj in @objs do
storage_delete({obj, id})
storage_update({obj, :ids}, [], &List.delete(&1, id))
:ok
end
@spec cleanup() :: :ok
def cleanup do
Enum.each(@objs, fn obj ->
{obj, :ids} |> storage_get([]) |> Enum.each(&storage_delete({obj, &1}))
storage_delete({obj, :ids})
end)
:ok
end
@spec storage_get(:persistent_term.key(), :persistent_term.value()) :: :persistent_term.value()
def storage_get(key, default \\ nil) do
:persistent_term.get(key(key), default)
end
@spec storage_put(:persistent_term.key(), :persistent_term.value()) :: :ok
def storage_put(key, value) do
:persistent_term.put(key(key), value)
end
@spec storage_delete(:persistent_term.key()) :: :ok
def storage_delete(key) do
:persistent_term.erase(key(key))
:ok
end
@spec storage_update(:persistent_term.key(), any, (any -> :persistent_term.value())) :: :ok
def storage_update(key, default, fun) do
key = key(key)
:persistent_term.put(key, fun.(:persistent_term.get(key, default)))
end
@spec key(any) :: {module, pid, any}
# |> Tuple.insert_at(1, self())
def key(key) when is_tuple(key), do: key |> Tuple.insert_at(0, __MODULE__)
# self(), key}
def key(key), do: {__MODULE__, key}
end
| 31.09375 | 114 | 0.654941 |
f745cfaa664da945b0963308401eb98037b0deea | 298 | exs | Elixir | config/config.exs | maennchen/cldr_messages | eef6d643e705ba3ecd8b6bb6ce56377c0e036f7f | [
"Apache-2.0"
] | 6 | 2019-06-09T11:55:49.000Z | 2021-08-24T07:58:32.000Z | config/config.exs | maennchen/cldr_messages | eef6d643e705ba3ecd8b6bb6ce56377c0e036f7f | [
"Apache-2.0"
] | 10 | 2020-11-14T00:29:20.000Z | 2022-01-18T21:19:59.000Z | config/config.exs | maennchen/cldr_messages | eef6d643e705ba3ecd8b6bb6ce56377c0e036f7f | [
"Apache-2.0"
] | 6 | 2020-02-12T11:37:47.000Z | 2022-01-03T14:36:52.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
import Config
config :ex_cldr,
default_backend: MyApp.Cldr,
default_locale: "en"
config :ex_money,
default_cldr_backend: MyApp.Cldr
config :ex_doc,
pure_links: true
| 21.285714 | 61 | 0.778523 |
f745e6ffb8c78cbd48d85cd663a91b3afab85b2d | 38,579 | ex | Elixir | lib/elixir/lib/code.ex | ludios/elixir | 1a4018adb9863c1265b306028a6b5d1146479266 | [
"Apache-2.0"
] | 1 | 2019-05-07T12:01:44.000Z | 2019-05-07T12:01:44.000Z | lib/elixir/lib/code.ex | ludios/elixir | 1a4018adb9863c1265b306028a6b5d1146479266 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/code.ex | ludios/elixir | 1a4018adb9863c1265b306028a6b5d1146479266 | [
"Apache-2.0"
] | null | null | null | defmodule Code do
@moduledoc """
Utilities for managing code compilation, code evaluation, and code loading.
This module complements Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html)
to add behaviour which is specific to Elixir. Almost all of the functions in this module
have global side effects on the behaviour of Elixir.
## Working with files
This module contains three functions for compiling and evaluating files.
Here is a summary of them and their behaviour:
* `require_file/2` - compiles a file and tracks its name. It does not
compile the file again if it has been previously required.
* `compile_file/2` - compiles a file without tracking its name. Compiles the
file multiple times when invoked multiple times.
* `eval_file/2` - evaluates the file contents without tracking its name. It
returns the result of the last expression in the file, instead of the modules
defined in it.
In a nutshell, the first must be used when you want to keep track of the files
handled by the system, to avoid the same file from being compiled multiple
times. This is common in scripts.
`compile_file/2` must be used when you are interested in the modules defined in a
file, without tracking. `eval_file/2` should be used when you are interested in
the result of evaluating the file rather than the modules it defines.
"""
@doc """
Lists all required files.
## Examples
Code.require_file("../eex/test/eex_test.exs")
List.first(Code.required_files()) =~ "eex_test.exs"
#=> true
"""
@doc since: "1.7.0"
@spec required_files() :: [binary]
def required_files do
:elixir_code_server.call(:required)
end
# TODO: Deprecate on v1.9
@doc false
def loaded_files do
required_files()
end
@doc """
Removes files from the required files list.
The modules defined in the file are not removed;
calling this function only removes them from the list,
allowing them to be required again.
## Examples
# Require EEx test code
Code.require_file("../eex/test/eex_test.exs")
# Now unrequire all files
Code.unrequire_files(Code.required_files())
# Notice modules are still available
function_exported?(EExTest.Compiled, :before_compile, 0)
#=> true
"""
@doc since: "1.7.0"
@spec unrequire_files([binary]) :: :ok
def unrequire_files(files) do
:elixir_code_server.cast({:unrequire_files, files})
end
# TODO: Deprecate on v1.9
@doc false
def unload_files(files) do
unrequire_files(files)
end
@doc """
Appends a path to the end of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for
finding module code.
The path is expanded with `Path.expand/1` before being appended.
If this path does not exist, an error is returned.
## Examples
Code.append_path(".")
#=> true
Code.append_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec append_path(Path.t()) :: true | {:error, :bad_directory}
def append_path(path) do
:code.add_pathz(to_charlist(Path.expand(path)))
end
@doc """
Prepends a path to the beginning of the Erlang VM code path list.
This is the list of directories the Erlang VM uses for finding
module code.
The path is expanded with `Path.expand/1` before being prepended.
If this path does not exist, an error is returned.
## Examples
Code.prepend_path(".")
#=> true
Code.prepend_path("/does_not_exist")
#=> {:error, :bad_directory}
"""
@spec prepend_path(Path.t()) :: true | {:error, :bad_directory}
def prepend_path(path) do
:code.add_patha(to_charlist(Path.expand(path)))
end
@doc """
Deletes a path from the Erlang VM code path list. This is the list of
directories the Erlang VM uses for finding module code.
The path is expanded with `Path.expand/1` before being deleted. If the
path does not exist, this function returns `false`.
## Examples
Code.prepend_path(".")
Code.delete_path(".")
#=> true
Code.delete_path("/does_not_exist")
#=> false
"""
@spec delete_path(Path.t()) :: boolean
def delete_path(path) do
:code.del_path(to_charlist(Path.expand(path)))
end
@doc """
Evaluates the contents given by `string`.
The `binding` argument is a keyword list of variable bindings.
The `opts` argument is a keyword list of environment options.
**Warning**: `string` can be any Elixir code and will be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `eval_string/3` with untrusted input (such as strings coming
from the network).
## Options
Options can be:
* `:file` - the file to be considered in the evaluation
* `:line` - the line on which the script starts
Additionally, the following scope values can be configured:
* `:aliases` - a list of tuples with the alias and its target
* `:requires` - a list of modules required
* `:functions` - a list of tuples where the first element is a module
and the second a list of imported function names and arity; the list
of function names and arity must be sorted
* `:macros` - a list of tuples where the first element is a module
and the second a list of imported macro names and arity; the list
of function names and arity must be sorted
Notice that setting any of the values above overrides Elixir's default
values. For example, setting `:requires` to `[]` will no longer
automatically require the `Kernel` module. In the same way setting
`:macros` will no longer auto-import `Kernel` macros like `Kernel.if/2`,
`Kernel.SpecialForms.case/2`, and so on.
Returns a tuple of the form `{value, binding}`,
where `value` is the value returned from evaluating `string`.
If an error occurs while evaluating `string` an exception will be raised.
`binding` is a keyword list with the value of all variable bindings
after evaluating `string`. The binding key is usually an atom, but it
may be a tuple for variables defined in a different context.
## Examples
iex> Code.eval_string("a + b", [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
iex> Code.eval_string("c = a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2, c: 3]}
iex> Code.eval_string("a = a + b", [a: 1, b: 2])
{3, [a: 3, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all imports, requires and aliases defined in the current environment
will be automatically carried over:
iex> Code.eval_string("a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_string(List.Chars.t(), list, Macro.Env.t() | keyword) :: {term, binding :: list}
def eval_string(string, binding \\ [], opts \\ [])
def eval_string(string, binding, %Macro.Env{} = env) do
{value, binding, _env, _scope} = :elixir.eval(to_charlist(string), binding, Map.to_list(env))
{value, binding}
end
def eval_string(string, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{value, binding, _env, _scope} = :elixir.eval(to_charlist(string), binding, opts)
{value, binding}
end
@doc ~S"""
Formats the given code `string`.
The formatter receives a string representing Elixir code and
returns iodata representing the formatted code according to
pre-defined rules.
## Options
* `:file` - the file which contains the string, used for error
reporting
* `:line` - the line the string starts, used for error reporting
* `:line_length` - the line length to aim for when formatting
the document. Defaults to 98. Note this value is used as
reference but it is not enforced by the formatter as sometimes
user intervention is required. See "Running the formatter"
section
* `:locals_without_parens` - a keyword list of name and arity
pairs that should be kept without parens whenever possible.
The arity may be the atom `:*`, which implies all arities of
that name. The formatter already includes a list of functions
and this option augments this list.
* `:rename_deprecated_at` - rename all known deprecated functions
at the given version to their non-deprecated equivalent. It
expects a valid `Version` which is usually the minimum Elixir
version supported by the project.
## Design principles
The formatter was designed under three principles.
First, the formatter never changes the semantics of the code by
default. This means the input AST and the output AST are equivalent.
Optional behaviour, such as `:rename_deprecated_at`, is allowed to
break this guarantee.
The second principle is to provide as little configuration as possible.
This eases the formatter adoption by removing contention points while
making sure a single style is followed consistently by the community as
a whole.
The formatter does not hard code names. The formatter will not behave
specially because a function is named `defmodule`, `def`, etc. This
principle mirrors Elixir's goal of being an extensible language where
developers can extend the language with new constructs as if they were
part of the language. When it is absolutely necessary to change behaviour
based on the name, this behaviour should be configurable, such as the
`:locals_without_parens` option.
## Running the formatter
The formatter attempts to fit the most it can on a single line and
introduces line breaks wherever possible when it cannot.
In some cases, this may lead to undesired formatting. Therefore, **some
code generated by the formatter may not be aesthetically pleasing and
may require explicit intervention from the developer**. That's why we
do not recommend to run the formatter blindly in an existing codebase.
Instead you should format and sanity check each formatted file.
Let's see some examples. The code below:
"this is a very long string ... #{inspect(some_value)}"
may be formatted as:
"this is a very long string ... #{
inspect(some_value)
}"
This happens because the only place the formatter can introduce a
new line without changing the code semantics is in the interpolation.
In those scenarios, we recommend developers to directly adjust the
code. Here we can use the binary concatenation operator `<>/2`:
"this is a very long string " <>
"... #{inspect(some_value)}"
The string concatenation makes the code fit on a single line and also
gives more options to the formatter.
A similar example is when the formatter breaks a function definition
over multiple clauses:
def my_function(
%User{name: name, age: age, ...},
arg1,
arg2
) do
...
end
While the code above is completely valid, you may prefer to match on
the struct variables inside the function body in order to keep the
definition on a single line:
def my_function(%User{} = user, arg1, arg2) do
%{name: name, age: age, ...} = user
...
end
In some situations, you can use the fact the formatter does not generate
elegant code as a hint for refactoring. Take this code:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions == Enum.to_list(MapSet.intersection(MapSet.new(required_permissions), MapSet.new(available_permissions)))
end
The code above has very long lines and running the formatter is not going
to address this issue. In fact, the formatter may make it more obvious that
you have complex expressions:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
required_permissions ==
Enum.to_list(
MapSet.intersection(
MapSet.new(required_permissions),
MapSet.new(available_permissions)
)
)
end
Take such cases as a suggestion that your code should be refactored:
def board?(board_id, %User{} = user, available_permissions, required_permissions) do
Tracker.OrganizationMembers.user_in_organization?(user.id, board.organization_id) and
matching_permissions?(required_permissions, available_permissions)
end
defp matching_permissions?(required_permissions, available_permissions) do
intersection =
required_permissions
|> MapSet.new()
|> MapSet.intersection(MapSet.new(available_permissions))
|> Enum.to_list()
required_permissions == intersection
end
To sum it up: since the formatter cannot change the semantics of your
code, sometimes it is necessary to tweak or refactor the code to get
optimal formatting. To help better understand how to control the formatter,
we describe in the next sections the cases where the formatter keeps the
user encoding and how to control multiline expressions.
## Keeping user's formatting
The formatter respects the input format in some cases. Those are
listed below:
* Insignificant digits in numbers are kept as is. The formatter
however always inserts underscores for decimal numbers with more
than 5 digits and converts hexadecimal digits to uppercase
* Strings, charlists, atoms and sigils are kept as is. No character
is automatically escaped or unescaped. The choice of delimiter is
also respected from the input
* Newlines inside blocks are kept as in the input except for:
1) expressions that take multiple lines will always have an empty
line before and after and 2) empty lines are always squeezed
together into a single empty line
* The choice between `:do` keyword and `do/end` blocks is left
to the user
* Lists, tuples, bitstrings, maps, structs and function calls will be
broken into multiple lines if they are followed by a newline in the
opening bracket and preceded by a new line in the closing bracket
* Pipeline operators, like `|>` and others with the same precedence,
will span multiple lines if they spanned multiple lines in the input
The behaviours above are not guaranteed. We may remove or add new
rules in the future. The goal of documenting them is to provide better
understanding on what to expect from the formatter.
### Multi-line lists, maps, tuples, etc.
You can force lists, tuples, bitstrings, maps, structs and function
calls to have one entry per line by adding a newline after the opening
bracket and a new line before the closing bracket lines. For example:
[
foo,
bar
]
If there are no newlines around the brackets, then the formatter will
try to fit everything on a single line, such that the snippet below
[foo,
bar]
will be formatted as
[foo, bar]
You can also force function calls and keywords to be rendered on multiple
lines by having each entry on its own line:
defstruct name: nil,
age: 0
The code above will be kept with one keyword entry per line by the
formatter. To avoid that, just squash everything into a single line.
### Parens and no parens in function calls
Elixir has two syntaxes for function calls. With parens and no parens.
By default, Elixir will add parens to all calls except for:
1. calls that have do/end blocks
2. local calls without parens where the name and arity of the local
call is also listed under `:locals_without_parens` (except for
calls with arity 0, where the compiler always require parens)
The choice of parens and no parens also affects indentation. When a
function call with parens doesn't fit on the same line, the formatter
introduces a newline around parens and indents the arguments with two
spaces:
some_call(
arg1,
arg2,
arg3
)
On the other hand, function calls without parens are always indented
by the function call length itself, like this:
some_call arg1,
arg2,
arg3
If the last argument is a data structure, such as maps and lists, and
the beginning of the data structure fits on the same line as the function
call, then no indentation happens, this allows code like this:
Enum.reduce(some_collection, initial_value, fn element, acc ->
# code
end)
some_function_without_parens %{
foo: :bar,
baz: :bat
}
## Code comments
The formatter also handles code comments in a way to guarantee a space
is always added between the beginning of the comment (#) and the next
character.
The formatter also extracts all trailing comments to their previous line.
For example, the code below
hello #world
will be rewritten to
# world
hello
Because code comments are handled apart from the code representation (AST),
there are some situations where code comments are seen as ambiguous by the
code formatter. For example, the comment in the anonymous function below
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
and in this one
fn
arg1 ->
body1
# comment
arg2 ->
body2
end
are considered equivalent (the nesting is discarded alongside most of
user formatting). In such cases, the code formatter will always format to
the latter.
"""
@doc since: "1.6.0"
@spec format_string!(binary, keyword) :: iodata
def format_string!(string, opts \\ []) when is_binary(string) and is_list(opts) do
line_length = Keyword.get(opts, :line_length, 98)
algebra = Code.Formatter.to_algebra!(string, opts)
Inspect.Algebra.format(algebra, line_length)
end
@doc """
Formats a file.
See `format_string!/2` for more information on code formatting and
available options.
"""
@doc since: "1.6.0"
@spec format_file!(binary, keyword) :: iodata
def format_file!(file, opts \\ []) when is_binary(file) and is_list(opts) do
string = File.read!(file)
formatted = format_string!(string, [file: file, line: 1] ++ opts)
[formatted, ?\n]
end
@doc """
Evaluates the quoted contents.
**Warning**: Calling this function inside a macro is considered bad
practice as it will attempt to evaluate runtime values at compile time.
Macro arguments are typically transformed by unquoting them into the
returned quoted expressions (instead of evaluated).
See `eval_string/3` for a description of bindings and options.
## Examples
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
For convenience, you can pass `__ENV__/0` as the `opts` argument and
all options will be automatically extracted from the current environment:
iex> contents = quote(do: var!(a) + var!(b))
iex> Code.eval_quoted(contents, [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
@spec eval_quoted(Macro.t(), list, Macro.Env.t() | keyword) :: {term, binding :: list}
def eval_quoted(quoted, binding \\ [], opts \\ [])
def eval_quoted(quoted, binding, %Macro.Env{} = env) do
{value, binding, _env, _scope} = :elixir.eval_quoted(quoted, binding, Map.to_list(env))
{value, binding}
end
def eval_quoted(quoted, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{value, binding, _env, _scope} = :elixir.eval_quoted(quoted, binding, opts)
{value, binding}
end
defp validate_eval_opts(opts) do
if f = opts[:functions], do: validate_imports(:functions, f)
if m = opts[:macros], do: validate_imports(:macros, m)
if a = opts[:aliases], do: validate_aliases(:aliases, a)
if r = opts[:requires], do: validate_requires(:requires, r)
end
defp validate_requires(kind, requires) do
valid = is_list(requires) and Enum.all?(requires, &is_atom(&1))
unless valid do
raise ArgumentError, "expected :#{kind} option given to eval in the format: [module]"
end
end
defp validate_aliases(kind, aliases) do
valid = is_list(aliases) and Enum.all?(aliases, fn {k, v} -> is_atom(k) and is_atom(v) end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, module}]"
end
end
defp validate_imports(kind, imports) do
valid =
is_list(imports) and
Enum.all?(imports, fn {k, v} ->
is_atom(k) and is_list(v) and
Enum.all?(v, fn {name, arity} -> is_atom(name) and is_integer(arity) end)
end)
unless valid do
raise ArgumentError,
"expected :#{kind} option given to eval in the format: [{module, [{name, arity}]}]"
end
end
@doc """
Converts the given string to its quoted form.
Returns `{:ok, quoted_form}` if it succeeds,
`{:error, {line, error, token}}` otherwise.
## Options
* `:file` - the filename to be reported in case of parsing errors.
Defaults to "nofile".
* `:line` - the starting line of the string being parsed.
Defaults to 1.
* `:columns` - when `true`, attach a `:column` key to the quoted
metadata. Defaults to `false`.
* `:existing_atoms_only` - when `true`, raises an error
when non-existing atoms are found by the tokenizer.
Defaults to `false`.
* `:warn_on_unnecessary_quotes` - when `false`, does not warn
when atoms, keywords or calls have unnecessary quotes on
them. Defaults to `true`.
## `Macro.to_string/2`
The opposite of converting a string to its quoted form is
`Macro.to_string/2`, which converts a quoted form to a string/binary
representation.
"""
@spec string_to_quoted(List.Chars.t(), keyword) ::
{:ok, Macro.t()} | {:error, {line :: pos_integer, term, term}}
def string_to_quoted(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
case :elixir.string_to_tokens(to_charlist(string), line, file, opts) do
{:ok, tokens} ->
:elixir.tokens_to_quoted(tokens, file, opts)
{:error, _error_msg} = error ->
error
end
end
@doc """
Converts the given string to its quoted form.
It returns the ast if it succeeds,
raises an exception otherwise. The exception is a `TokenMissingError`
in case a token is missing (usually because the expression is incomplete),
`SyntaxError` otherwise.
Check `string_to_quoted/2` for options information.
"""
@spec string_to_quoted!(List.Chars.t(), keyword) :: Macro.t()
def string_to_quoted!(string, opts \\ []) when is_list(opts) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
:elixir.string_to_quoted!(to_charlist(string), line, file, opts)
end
@doc """
Evals the given file.
Accepts `relative_to` as an argument to tell where the file is located.
While `require_file/2` and `compile_file/2` return the loaded modules and their
bytecode, `eval_file/2` simply evaluates the file contents and returns the
evaluation result and its bindings (exactly the same return value as `eval_string/3`).
"""
@spec eval_file(binary, nil | binary) :: {term, binding :: list}
def eval_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
eval_string(File.read!(file), [], file: file, line: 1)
end
# TODO: Deprecate on v1.9
@doc false
def load_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
:elixir_code_server.call({:acquire, file})
loaded = :elixir_compiler.file(file)
:elixir_code_server.cast({:required, file})
loaded
end
@doc """
Requires the given `file`.
Accepts `relative_to` as an argument to tell where the file is located.
If the file was already required, `require_file/2` doesn't do anything and
returns `nil`.
Notice that if `require_file/2` is invoked by different processes concurrently,
the first process to invoke `require_file/2` acquires a lock and the remaining
ones will block until the file is available. This means that if `require_file/2`
is called more than once with a given file, that file will be compiled only once.
The first process to call `require_file/2` will get the list of loaded modules,
others will get `nil`.
See `compile_file/2` if you would like to compile a file without tracking its
filenames. Finally, if you would like to get the result of evaluating a file rather
than the modules defined in it, see `eval_file/2`.
## Examples
If the file has not been required, it returns the list of modules:
modules = Code.require_file("eex_test.exs", "../eex/test")
List.first(modules)
#=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>}
If the file has been required, it returns `nil`:
Code.require_file("eex_test.exs", "../eex/test")
#=> nil
"""
@spec require_file(binary, nil | binary) :: [{module, binary}] | nil
def require_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
# TODO: Simply block until :required or :proceed once load_file is removed on v2.0
case :elixir_code_server.call({:acquire, file}) do
:required ->
nil
{:queued, ref} ->
receive do
{:elixir_code_server, ^ref, :required} -> nil
end
:proceed ->
loaded = :elixir_compiler.file(file)
:elixir_code_server.cast({:required, file})
loaded
end
end
@doc """
Gets the compilation options from the code server.
Check `compiler_options/1` for more information.
## Examples
Code.compiler_options()
#=> %{debug_info: true, docs: true,
#=> warnings_as_errors: false, ignore_module_conflict: false}
"""
@spec compiler_options() :: %{optional(atom) => boolean}
def compiler_options do
:elixir_config.get(:compiler_options)
end
@doc """
Returns a list with the available compiler options.
See `compiler_options/1` for more information.
## Examples
iex> Code.available_compiler_options()
[:docs, :debug_info, :ignore_module_conflict, :relative_paths, :warnings_as_errors]
"""
@spec available_compiler_options() :: [atom]
def available_compiler_options do
[:docs, :debug_info, :ignore_module_conflict, :relative_paths, :warnings_as_errors]
end
@doc """
Purge compiler modules.
The compiler utilizes temporary modules to compile code. For example,
`elixir_compiler_1`, `elixir_compiler_2`, etc. In case the compiled code
stores references to anonymous functions or similar, the Elixir compiler
may be unable to reclaim those modules, keeping an unnecessary amount of
code in memory and eventually leading to modules such as `elixir_compiler_12345`.
This function purges all modules currently kept by the compiler, allowing
old compiler module names to be reused. If there are any processes running
any code from such modules, they will be terminated too.
It returns `{:ok, number_of_modules_purged}`.
"""
@doc since: "1.7.0"
@spec purge_compiler_modules() :: {:ok, non_neg_integer()}
def purge_compiler_modules() do
:elixir_code_server.call(:purge_compiler_modules)
end
@doc """
Sets compilation options.
These options are global since they are stored by Elixir's Code Server.
Available options are:
* `:docs` - when `true`, retain documentation in the compiled module.
Defaults to `true`.
* `:debug_info` - when `true`, retain debug information in the compiled
module. This allows a developer to reconstruct the original source
code. Defaults to `false`.
* `:ignore_module_conflict` - when `true`, override modules that were
already defined without raising errors. Defaults to `false`.
* `:relative_paths` - when `true`, use relative paths in quoted nodes,
warnings and errors generated by the compiler. Note disabling this option
won't affect runtime warnings and errors. Defaults to `true`.
* `:warnings_as_errors` - causes compilation to fail when warnings are
generated. Defaults to `false`.
It returns the new map of compiler options.
## Examples
Code.compiler_options(debug_info: true)
#=> %{debug_info: true, docs: true,
#=> warnings_as_errors: false, ignore_module_conflict: false}
"""
@spec compiler_options(Enumerable.t()) :: %{optional(atom) => boolean}
def compiler_options(opts) do
available = available_compiler_options()
Enum.each(opts, fn {key, value} ->
cond do
key not in available ->
raise "unknown compiler option: #{inspect(key)}"
not is_boolean(value) ->
raise "compiler option #{inspect(key)} should be a boolean, got: #{inspect(value)}"
true ->
:ok
end
end)
:elixir_config.update(:compiler_options, &Enum.into(opts, &1))
end
@doc """
Compiles the given string.
Returns a list of tuples where the first element is the module name
and the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
**Warning**: `string` can be any Elixir code and code can be executed with
the same privileges as the Erlang VM: this means that such code could
compromise the machine (for example by executing system commands).
Don't use `compile_string/2` with untrusted input (such as strings coming
from the network).
"""
@spec compile_string(List.Chars.t(), binary) :: [{module, binary}]
def compile_string(string, file \\ "nofile") when is_binary(file) do
:elixir_compiler.string(to_charlist(string), file)
end
@doc """
Compiles the quoted expression.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). A `file` can be
given as second argument which will be used for reporting warnings
and errors.
"""
@spec compile_quoted(Macro.t(), binary) :: [{module, binary}]
def compile_quoted(quoted, file \\ "nofile") when is_binary(file) do
:elixir_compiler.quoted(quoted, file)
end
@doc """
Compiles the given file.
Accepts `relative_to` as an argument to tell where the file is located.
Returns a list of tuples where the first element is the module name and
the second one is its bytecode (as a binary). Opposite to `require_file/2`,
it does not track the filename of the compiled file.
If you would like to get the result of evaluating file rather than the
modules defined in it, see `eval_file/2`.
For compiling many files concurrently, see `Kernel.ParallelCompiler.compile/2`.
"""
@spec compile_file(binary, nil | binary) :: [{module, binary}]
def compile_file(file, relative_to \\ nil) when is_binary(file) do
:elixir_compiler.file(find_file(file, relative_to))
end
@doc """
Ensures the given module is loaded.
If the module is already loaded, this works as no-op. If the module
was not yet loaded, it tries to load it.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
## Code loading on the Erlang VM
Erlang has two modes to load code: interactive and embedded.
By default, the Erlang VM runs in interactive mode, where modules
are loaded as needed. In embedded mode the opposite happens, as all
modules need to be loaded upfront or explicitly.
Therefore, this function is used to check if a module is loaded
before using it and allows one to react accordingly. For example, the `URI`
module uses this function to check if a specific parser exists for a given
URI scheme.
## `ensure_compiled/1`
Elixir also contains an `ensure_compiled/1` function that is a
superset of `ensure_loaded/1`.
Since Elixir's compilation happens in parallel, in some situations
you may need to use a module that was not yet compiled, therefore
it can't even be loaded.
When invoked, `ensure_compiled/1` halts the compilation of the caller
until the module given to `ensure_compiled/1` becomes available or
all files for the current project have been compiled. If compilation
finishes and the module is not available, an error tuple is returned.
`ensure_compiled/1` does not apply to dependencies, as dependencies
must be compiled upfront.
In most cases, `ensure_loaded/1` is enough. `ensure_compiled/1`
must be used in rare cases, usually involving macros that need to
invoke a module for callback information.
## Examples
iex> Code.ensure_loaded(Atom)
{:module, Atom}
iex> Code.ensure_loaded(DoesNotExist)
{:error, :nofile}
"""
@spec ensure_loaded(module) ::
{:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure}
def ensure_loaded(module) when is_atom(module) do
:code.ensure_loaded(module)
end
@doc """
Ensures the given module is loaded.
Similar to `ensure_loaded/1`, but returns `true` if the module
is already loaded or was successfully loaded. Returns `false`
otherwise.
## Examples
iex> Code.ensure_loaded?(Atom)
true
"""
@spec ensure_loaded?(module) :: boolean
def ensure_loaded?(module) when is_atom(module) do
match?({:module, ^module}, ensure_loaded(module))
end
@doc """
Ensures the given module is compiled and loaded.
If the module is already loaded, it works as no-op. If the module was
not loaded yet, it checks if it needs to be compiled first and then
tries to load it.
If it succeeds in loading the module, it returns `{:module, module}`.
If not, returns `{:error, reason}` with the error reason.
Check `ensure_loaded/1` for more information on module loading
and when to use `ensure_loaded/1` or `ensure_compiled/1`.
"""
@spec ensure_compiled(module) ::
{:module, module} | {:error, :embedded | :badfile | :nofile | :on_load_failure}
def ensure_compiled(module) when is_atom(module) do
case :code.ensure_loaded(module) do
{:error, :nofile} = error ->
if is_pid(:erlang.get(:elixir_compiler_pid)) and
Kernel.ErrorHandler.ensure_compiled(module, :module) do
{:module, module}
else
error
end
other ->
other
end
end
@doc """
Ensures the given module is compiled and loaded.
Similar to `ensure_compiled/1`, but returns `true` if the module
is already loaded or was successfully loaded and compiled.
Returns `false` otherwise.
"""
@spec ensure_compiled?(module) :: boolean
def ensure_compiled?(module) when is_atom(module) do
match?({:module, ^module}, ensure_compiled(module))
end
@doc ~S"""
Returns the docs for the given module or path to `.beam` file.
When given a module name, it finds its BEAM code and reads the docs from it.
When given a path to a `.beam` file, it will load the docs directly from that
file.
It returns the term stored in the documentation chunk in the format defined by
[EEP 48](http://erlang.org/eep/eeps/eep-0048.html) or `{:error, reason}` if
the chunk is not available.
## Examples
# Module documentation of an existing module
iex> {:docs_v1, _, :elixir, _, %{"en" => module_doc}, _, _} = Code.fetch_docs(Atom)
iex> module_doc |> String.split("\n") |> Enum.at(0)
"Convenience functions for working with atoms."
# A module that doesn't exist
iex> Code.fetch_docs(ModuleNotGood)
{:error, :module_not_found}
"""
@doc since: "1.7.0"
@spec fetch_docs(module | String.t()) ::
{:docs_v1, annotation, beam_language, format, module_doc :: doc_content, metadata,
docs :: [doc_element]}
| {:error, :module_not_found | :chunk_not_found | {:invalid_chunk, binary}}
when annotation: :erl_anno.anno(),
beam_language: :elixir | :erlang | :lfe | :alpaca | atom(),
doc_content: %{required(binary) => binary} | :none | :hidden,
doc_element:
{{kind :: atom, function_name :: atom, arity}, annotation, signature, doc_content,
metadata},
format: binary,
signature: [binary],
metadata: map
def fetch_docs(module_or_path)
def fetch_docs(module) when is_atom(module) do
case :code.get_object_code(module) do
{_module, bin, _beam_path} -> do_fetch_docs(bin)
:error -> {:error, :module_not_found}
end
end
def fetch_docs(path) when is_binary(path) do
do_fetch_docs(String.to_charlist(path))
end
@docs_chunk 'Docs'
defp do_fetch_docs(bin_or_path) do
case :beam_lib.chunks(bin_or_path, [@docs_chunk]) do
{:ok, {_module, [{@docs_chunk, bin}]}} ->
try do
:erlang.binary_to_term(bin)
rescue
_ -> {:error, {:invalid_chunk, bin}}
end
{:error, :beam_lib, {:missing_chunk, _, @docs_chunk}} ->
{:error, :chunk_not_found}
end
end
@doc ~S"""
Deprecated function to retrieve old documentation format.
Elixir v1.7 adopts [EEP 48](http://erlang.org/eep/eeps/eep-0048.html)
which is a new documentation format meant to be shared across all
BEAM languages. The old format, used by `Code.get_docs/2`, is no
longer available, and therefore this function always returns `nil`.
Use `Code.fetch_docs/1` instead.
"""
@deprecated "Code.get_docs/2 always returns nil as its outdated documentation is no longer stored on BEAM files. Use Code.fetch_docs/1 instead"
@spec get_docs(module, :moduledoc | :docs | :callback_docs | :type_docs | :all) :: nil
def get_docs(_module, _kind) do
nil
end
## Helpers
# Finds the file given the relative_to path.
#
# If the file is found, returns its path in binary, fails otherwise.
defp find_file(file, relative_to) do
file =
if relative_to do
Path.expand(file, relative_to)
else
Path.expand(file)
end
if File.regular?(file) do
file
else
raise Code.LoadError, file: file
end
end
end
| 33.517811 | 145 | 0.689131 |
f745e99fb8d5dccb3701cef03d72a017784a9bb6 | 316 | exs | Elixir | apps/commuter_rail_boarding/test/uploader_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 1 | 2022-01-30T20:53:07.000Z | 2022-01-30T20:53:07.000Z | apps/commuter_rail_boarding/test/uploader_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 47 | 2021-05-05T10:31:05.000Z | 2022-03-30T22:18:14.000Z | apps/commuter_rail_boarding/test/uploader_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 1 | 2021-05-14T00:35:08.000Z | 2021-05-14T00:35:08.000Z | defmodule UploaderTest do
@moduledoc false
use ExUnit.Case, async: true
import Uploader
describe "upload/1" do
test "uploads a binary" do
assert :ok = upload("filename", "binary")
# message received from Uploader.Mock
assert_received {:upload, "filename", "binary"}
end
end
end
| 21.066667 | 53 | 0.674051 |
f7460df4c84c939db2126e9f389a2babab941ad3 | 729 | ex | Elixir | lib/hologram/template/parser.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | lib/hologram/template/parser.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | lib/hologram/template/parser.ex | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Template.Parser do
alias Hologram.Template.{DOMTreeBuilder, TagAssembler, Tokenizer}
def parse!(markup) do
context = %{
attrs: [],
attr_key: nil,
double_quote_opened?: 0,
num_open_braces: 0,
prev_tokens: [],
tag_name: nil,
token_buffer: []
}
markup
|> remove_doctype()
|> remove_comments()
|> String.trim()
|> Tokenizer.tokenize()
|> TagAssembler.assemble(:text_tag, context, [])
|> DOMTreeBuilder.build()
end
defp remove_comments(markup) do
Regex.replace(~r/<!\-\-.*\-\->/sU, markup, "")
end
defp remove_doctype(markup) do
regex = ~r/^\s*<!DOCTYPE[^>]*>\s*/i
String.replace(markup, regex, "")
end
end
| 22.090909 | 67 | 0.603567 |
f7465d7b549440c0b65b966dabaff3860d54bac9 | 350 | ex | Elixir | lib/data_providers/ecto_provider/context_role.ex | Simon-Initiative/lti_1p3_ecto_provider | 15b84b35aa2de266a5199bfab73cf339220e2b87 | [
"MIT"
] | null | null | null | lib/data_providers/ecto_provider/context_role.ex | Simon-Initiative/lti_1p3_ecto_provider | 15b84b35aa2de266a5199bfab73cf339220e2b87 | [
"MIT"
] | null | null | null | lib/data_providers/ecto_provider/context_role.ex | Simon-Initiative/lti_1p3_ecto_provider | 15b84b35aa2de266a5199bfab73cf339220e2b87 | [
"MIT"
] | null | null | null | defmodule Lti_1p3.DataProviders.EctoProvider.ContextRole do
use Ecto.Schema
import Ecto.Changeset
schema "lti_1p3_context_roles" do
field :uri, :string
end
@doc false
def changeset(context_role, attrs \\ %{}) do
context_role
|> cast(attrs, [:uri])
|> validate_required([:uri])
|> unique_constraint(:uri)
end
end
| 18.421053 | 59 | 0.688571 |
f74662f9ab50a733909a397ee51f3e83348414fe | 2,046 | ex | Elixir | clients/jobs/lib/google_api/jobs/v2/model/delete_jobs_by_filter_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/jobs/lib/google_api/jobs/v2/model/delete_jobs_by_filter_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/jobs/lib/google_api/jobs/v2/model/delete_jobs_by_filter_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Jobs.V2.Model.DeleteJobsByFilterRequest do
@moduledoc """
Deprecated. Use BatchDeleteJobsRequest instead. Input only. Delete job by filter request. The job typically becomes unsearchable within 10 seconds, but it may take up to 5 minutes.
## Attributes
- disableFastProcess (boolean()): Optional. If set to true, this call waits for all processing steps to complete before the job is cleaned up. Otherwise, the call returns while some steps are still taking place asynchronously, hence faster. Defaults to: `null`.
- filter (Filter): Required. Restrictions on the scope of the delete request. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:disableFastProcess => any(),
:filter => GoogleApi.Jobs.V2.Model.Filter.t()
}
field(:disableFastProcess)
field(:filter, as: GoogleApi.Jobs.V2.Model.Filter)
end
defimpl Poison.Decoder, for: GoogleApi.Jobs.V2.Model.DeleteJobsByFilterRequest do
def decode(value, options) do
GoogleApi.Jobs.V2.Model.DeleteJobsByFilterRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Jobs.V2.Model.DeleteJobsByFilterRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.117647 | 264 | 0.753177 |
f746925c64d2c83839209f8116095715805531cb | 628 | ex | Elixir | lib/tentacat/repositories/contents.ex | hi-rustin/tentacat | be0b4a671f90faab2598b6d58a691d506f46cfb5 | [
"MIT"
] | 432 | 2015-01-19T20:38:35.000Z | 2022-01-11T14:32:28.000Z | lib/tentacat/repositories/contents.ex | hi-rustin/tentacat | be0b4a671f90faab2598b6d58a691d506f46cfb5 | [
"MIT"
] | 183 | 2015-01-19T08:55:29.000Z | 2022-03-01T20:26:03.000Z | lib/tentacat/repositories/contents.ex | hi-rustin/tentacat | be0b4a671f90faab2598b6d58a691d506f46cfb5 | [
"MIT"
] | 189 | 2015-01-04T14:56:59.000Z | 2021-12-14T20:48:18.000Z | defmodule Tentacat.Repositories.Contents do
import Tentacat
alias Tentacat.Client
@doc """
Gets the contents of a file or directory in a repository.
## Example
Tentacat.Repositories.Contents.content "elixir-lang", "elixir", "CHANGELOG.md"
Tentacat.Repositories.Contents.content client, "elixir-lang", "elixir", "CHANGELOG.md"
More info at: https://developer.github.com/v3/repos/contents/#get-contents
"""
@spec content(Client.t(), binary, binary) :: Tentacat.response()
def content(client \\ %Client{}, owner, repo, path) do
get("repos/#{owner}/#{repo}/contents/#{path}", client)
end
end
| 31.4 | 92 | 0.702229 |
f746b209856c66ea8314abdd82f99e7e173255e9 | 2,425 | exs | Elixir | deps/phoenix/priv/templates/phoenix.gen.json/controller_test.exs | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | deps/phoenix/priv/templates/phoenix.gen.json/controller_test.exs | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | deps/phoenix/priv/templates/phoenix.gen.json/controller_test.exs | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | defmodule <%= module %>ControllerTest do
use <%= base %>.ConnCase
alias <%= module %>
@valid_attrs <%= inspect params %>
@invalid_attrs %{}
setup do
conn = conn() |> put_req_header("accept", "application/json")
{:ok, conn: conn}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, <%= singular %>_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
test "shows chosen resource", %{conn: conn} do
<%= singular %> = Repo.insert! %<%= alias %>{}
conn = get conn, <%= singular %>_path(conn, :show, <%= singular %>)
assert json_response(conn, 200)["data"] == %{"id" => <%= singular %>.id<%= for {k, _} <- attrs do %>,
"<%= k %>" => <%= singular %>.<%= k %><% end %>}
end
test "does not show resource and instead throw error when id is nonexistent", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, <%= singular %>_path(conn, :show, -1)
end
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn = post conn, <%= singular %>_path(conn, :create), <%= singular %>: @valid_attrs
assert json_response(conn, 201)["data"]["id"]
assert Repo.get_by(<%= alias %>, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, <%= singular %>_path(conn, :create), <%= singular %>: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders chosen resource when data is valid", %{conn: conn} do
<%= singular %> = Repo.insert! %<%= alias %>{}
conn = put conn, <%= singular %>_path(conn, :update, <%= singular %>), <%= singular %>: @valid_attrs
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(<%= alias %>, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
<%= singular %> = Repo.insert! %<%= alias %>{}
conn = put conn, <%= singular %>_path(conn, :update, <%= singular %>), <%= singular %>: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen resource", %{conn: conn} do
<%= singular %> = Repo.insert! %<%= alias %>{}
conn = delete conn, <%= singular %>_path(conn, :delete, <%= singular %>)
assert response(conn, 204)
refute Repo.get(<%= alias %>, <%= singular %>.id)
end
end
| 39.112903 | 106 | 0.602887 |
f746c685e1603c5d327427a12031373e7a1af9f1 | 1,526 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_row.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_row.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_row.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Row do
@moduledoc """
Values of the row.
## Attributes
* `values` (*type:* `list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Value.t)`, *default:* `nil`) - Individual cells.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:values => list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Value.t()) | nil
}
field(:values, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Value, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Row do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Row.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Row do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.468085 | 119 | 0.742464 |
f746fbd7391170731dad72574e45b7a907341eba | 1,430 | exs | Elixir | bench/sfsobject_decode_bench.exs | splattael/sfsobject | 220c696ede7dbc66bd81530861122a83427e7a9d | [
"MIT"
] | 2 | 2020-05-04T16:08:39.000Z | 2020-07-19T18:46:02.000Z | bench/sfsobject_decode_bench.exs | neopoly/sfsobject | 220c696ede7dbc66bd81530861122a83427e7a9d | [
"MIT"
] | null | null | null | bench/sfsobject_decode_bench.exs | neopoly/sfsobject | 220c696ede7dbc66bd81530861122a83427e7a9d | [
"MIT"
] | null | null | null | defmodule SFSObject.DecodeBench do
use Benchfella
@empty SFSObject.new
@sfsobject @empty
@data SFSObject.encode(@sfsobject)
bench "decode empty" do
SFSObject.decode(@data)
end
@sfsobject @empty |> SFSObject.put_null("a")
@data SFSObject.encode(@sfsobject)
bench "decode null" do
SFSObject.decode(@data)
end
@sfsobject @empty |> SFSObject.put_int("a", 1)
@data SFSObject.encode(@sfsobject)
bench "decode int" do
SFSObject.decode(@data)
end
@sfsobject @empty |> SFSObject.put_string("a", "hello world")
@data SFSObject.encode(@sfsobject)
bench "decode string" do
SFSObject.decode(@data)
end
@sfsobject @empty |> SFSObject.put_bool_array("a", [true, false])
@data SFSObject.encode(@sfsobject)
bench "decode bool array" do
SFSObject.decode(@data)
end
@sfsobject @empty |> SFSObject.put_int_array("a", [1,2,3])
@data SFSObject.encode(@sfsobject)
bench "decode int array" do
SFSObject.decode(@data)
end
@sfsobject @empty
|> SFSObject.put_byte("c", 1)
|> SFSObject.put_short("a", 13)
|> SFSObject.put_object("p", SFSObject.new
|> SFSObject.put_string("c", "User.Me")
|> SFSObject.put_object("p", SFSObject.new
|> SFSObject.put_string("username", "foobar")
|> SFSObject.put_int("rank", 1)
)
)
@data SFSObject.encode(@sfsobject)
bench "decode nested object" do
SFSObject.decode(@data)
end
end
| 25.087719 | 67 | 0.670629 |
f7470542130280f3f326fa6b64a9bbc11bae8307 | 93 | ex | Elixir | test/examples/basic/string.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 75 | 2021-11-17T11:55:13.000Z | 2022-03-28T04:35:04.000Z | test/examples/basic/string.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 48 | 2021-11-15T13:56:14.000Z | 2022-03-31T15:55:47.000Z | test/examples/basic/string.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 6 | 2021-12-22T20:41:27.000Z | 2022-03-09T09:07:38.000Z | defmodule Basic.String do
def string, do: "abc"
def string_block do
"abc"
end
end
| 11.625 | 25 | 0.666667 |
f747075797085f6dcff5d9b518eb578e74bae4dc | 1,670 | exs | Elixir | lib/perspective/event_chain/page_buffer/tests/page_buffer_state_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | 2 | 2020-04-24T19:43:06.000Z | 2020-04-24T19:52:27.000Z | lib/perspective/event_chain/page_buffer/tests/page_buffer_state_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | lib/perspective/event_chain/page_buffer/tests/page_buffer_state_test.exs | backmath/perspective | a0a577d0ffb06805b64e4dcb171a093e051884b0 | [
"MIT"
] | null | null | null | defmodule Perspective.EventChain.PageBuffer.State.Test do
use ExUnit.Case, async: true
alias Perspective.EventChain.PageBuffer.State
test "new" do
assert %State{events: []} == State.new()
end
test "add" do
result =
State.new()
|> State.add(%{some: :event})
assert %State{events: [%{some: :event}]} == result
end
test "has_events returns false for empty state" do
assert false == State.new() |> State.has_events?()
result =
State.new()
|> State.add(%{some: :event})
assert %State{events: [%{some: :event}]} == result
end
test "has_events returns true for non-empty state" do
state =
State.new()
|> State.add(%{some: :event})
assert true == State.has_events?(state)
end
test "take_out returns empty results when called against empty state" do
{state, events} =
State.new()
|> State.take_out(10)
assert [] == events
assert %State{events: []} == state
end
test "take_out returns the requested amount and leaves the remainder" do
state =
State.new()
|> State.add(%{some: :event})
|> State.add(%{some: :other_event})
{state, events} = State.take_out(state, 1)
assert [%{some: :event}] == events
assert %State{events: [%{some: :other_event}]} == state
end
test "take_out returns everything and leaves nothing when requesting in excess" do
state =
State.new()
|> State.add(%{some: :event})
|> State.add(%{some: :other_event})
{state, events} = State.take_out(state, 3)
assert [%{some: :event}, %{some: :other_event}] == events
assert %State{events: []} == state
end
end
| 24.558824 | 84 | 0.613174 |
f7471d3757bd3a045a3399dbec9747e111885222 | 1,623 | ex | Elixir | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 4 | 2020-07-24T07:02:57.000Z | 2022-01-08T17:37:38.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 10 | 2019-06-28T09:01:45.000Z | 2022-02-26T12:19:16.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 2 | 2020-04-24T15:18:41.000Z | 2021-12-07T09:39:40.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenapiPetstore.Model.AdditionalPropertiesClass do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"map_string",
:"map_number",
:"map_integer",
:"map_boolean",
:"map_array_integer",
:"map_array_anytype",
:"map_map_string",
:"map_map_anytype",
:"anytype_1",
:"anytype_2",
:"anytype_3"
]
@type t :: %__MODULE__{
:"map_string" => %{optional(String.t) => String.t} | nil,
:"map_number" => %{optional(String.t) => float()} | nil,
:"map_integer" => %{optional(String.t) => integer()} | nil,
:"map_boolean" => %{optional(String.t) => boolean()} | nil,
:"map_array_integer" => %{optional(String.t) => [integer()]} | nil,
:"map_array_anytype" => %{optional(String.t) => [Map]} | nil,
:"map_map_string" => %{optional(String.t) => %{optional(String.t) => String.t}} | nil,
:"map_map_anytype" => %{optional(String.t) => %{optional(String.t) => Map}} | nil,
:"anytype_1" => Map | nil,
:"anytype_2" => Map | nil,
:"anytype_3" => Map | nil
}
end
defimpl Poison.Decoder, for: OpenapiPetstore.Model.AdditionalPropertiesClass do
import OpenapiPetstore.Deserializer
def decode(value, options) do
value
|> deserialize(:"anytype_1", :struct, OpenapiPetstore.Model.Map, options)
|> deserialize(:"anytype_2", :struct, OpenapiPetstore.Model.Map, options)
|> deserialize(:"anytype_3", :struct, OpenapiPetstore.Model.Map, options)
end
end
| 32.46 | 91 | 0.640173 |
f74736cf099a47c2b33d06758fc92086f5de30da | 496 | ex | Elixir | lib/leetcode/defanging_ip_address.ex | ornj/leetcode_ex | a2245c6d7eece967fd095c296d7b5b10120632fd | [
"Unlicense"
] | null | null | null | lib/leetcode/defanging_ip_address.ex | ornj/leetcode_ex | a2245c6d7eece967fd095c296d7b5b10120632fd | [
"Unlicense"
] | null | null | null | lib/leetcode/defanging_ip_address.ex | ornj/leetcode_ex | a2245c6d7eece967fd095c296d7b5b10120632fd | [
"Unlicense"
] | null | null | null | defmodule Leetcode.DefangingIpAddress do
@moduledoc """
Given a valid (IPv4) IP address, return a defanged version of that IP address.
A defanged IP address replaces every period "." with "[.]".
Example 1:
Input: address = "1.1.1.1"
Output: "1[.]1[.]1[.]1"
Example 2:
Input: address = "255.100.50.0"
Output: "255[.]100[.]50[.]0"
Constraints:
The given address is a valid IPv4 address.
"""
def defange(ip_address), do: String.replace(ip_address, ".", "[.]")
end
| 21.565217 | 80 | 0.643145 |
f74738b39f690ae3cab1aa631f467fa0d0a15a5c | 176 | exs | Elixir | apps/frontend/config/dev.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/frontend/config/dev.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/frontend/config/dev.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | use Mix.Config
config :frontend, Frontend.Endpoint,
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
config :phoenix, :stacktrace_depth, 20
| 17.6 | 38 | 0.744318 |
f74749a729efcf0570de953ba53c935fa8e27fd3 | 40,270 | ex | Elixir | lib/ecto/adapters/myxql/connection.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/myxql/connection.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/myxql/connection.ex | hauleth/ecto_sql | 1d7f4b73bfa04e02a26bba8b3ea79a457850af0f | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(MyXQL) do
defmodule Ecto.Adapters.MyXQL.Connection do
@moduledoc false
@behaviour Ecto.Adapters.SQL.Connection
## Connection
@impl true
def child_spec(opts) do
MyXQL.child_spec(opts)
end
## Query
@impl true
def prepare_execute(conn, name, sql, params, opts) do
MyXQL.prepare_execute(conn, name, sql, params, opts)
end
@impl true
def query(conn, sql, params, opts) do
opts = Keyword.put_new(opts, :query_type, :binary_then_text)
MyXQL.query(conn, sql, params, opts)
end
@impl true
def execute(conn, query, params, opts) do
case MyXQL.execute(conn, query, params, opts) do
{:ok, _, result} -> {:ok, result}
{:error, _} = error -> error
end
end
@impl true
def stream(conn, sql, params, opts) do
MyXQL.stream(conn, sql, params, opts)
end
@impl true
def to_constraints(%MyXQL.Error{mysql: %{name: :ER_DUP_ENTRY}, message: message}, opts) do
case :binary.split(message, " for key ") do
[_, quoted] -> [unique: normalize_index_name(quoted, opts[:source])]
_ -> []
end
end
def to_constraints(%MyXQL.Error{mysql: %{name: name}, message: message}, _opts)
when name in [:ER_ROW_IS_REFERENCED_2, :ER_NO_REFERENCED_ROW_2] do
case :binary.split(message, [" CONSTRAINT ", " FOREIGN KEY "], [:global]) do
[_, quoted, _] -> [foreign_key: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(_, _),
do: []
defp strip_quotes(quoted) do
size = byte_size(quoted) - 2
<<_, unquoted::binary-size(size), _>> = quoted
unquoted
end
defp normalize_index_name(quoted, source) do
name = strip_quotes(quoted)
if source do
String.trim_leading(name, "#{source}.")
else
name
end
end
## Query
alias Ecto.Query.{BooleanExpr, JoinExpr, QueryExpr, WithExpr}
@parent_as 0
@impl true
def all(query, as_prefix \\ []) do
sources = create_names(query, as_prefix)
cte = cte(query, sources)
from = from(query, sources)
select = select(query, sources)
join = join(query, sources)
where = where(query, sources)
group_by = group_by(query, sources)
having = having(query, sources)
window = window(query, sources)
combinations = combinations(query)
order_by = order_by(query, sources)
limit = limit(query, sources)
offset = offset(query, sources)
lock = lock(query, sources)
[cte, select, from, join, where, group_by, having, window, combinations, order_by, limit, offset | lock]
end
@impl true
def update_all(query, prefix \\ nil) do
%{from: %{source: source}, select: select} = query
if select do
error!(nil, ":select is not supported in update_all by MySQL")
end
sources = create_names(query, [])
cte = cte(query, sources)
{from, name} = get_source(query, sources, 0, source)
fields = if prefix do
update_fields(:on_conflict, query, sources)
else
update_fields(:update, query, sources)
end
{join, wheres} = using_join(query, :update_all, sources)
prefix = prefix || ["UPDATE ", from, " AS ", name, join, " SET "]
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
[cte, prefix, fields | where]
end
@impl true
def delete_all(query) do
if query.select do
error!(nil, ":select is not supported in delete_all by MySQL")
end
sources = create_names(query, [])
cte = cte(query, sources)
{_, name, _} = elem(sources, 0)
from = from(query, sources)
join = join(query, sources)
where = where(query, sources)
[cte, "DELETE ", name, ".*", from, join | where]
end
@impl true
def insert(prefix, table, header, rows, on_conflict, []) do
fields = intersperse_map(header, ?,, "e_name/1)
["INSERT INTO ", quote_table(prefix, table), " (", fields, ") VALUES ",
insert_all(rows) | on_conflict(on_conflict, header)]
end
def insert(_prefix, _table, _header, _rows, _on_conflict, _returning) do
error!(nil, ":returning is not supported in insert/insert_all by MySQL")
end
defp on_conflict({_, _, [_ | _]}, _header) do
error!(nil, "The :conflict_target option is not supported in insert/insert_all by MySQL")
end
defp on_conflict({:raise, _, []}, _header) do
[]
end
defp on_conflict({:nothing, _, []}, [field | _]) do
quoted = quote_name(field)
[" ON DUPLICATE KEY UPDATE ", quoted, " = " | quoted]
end
defp on_conflict({fields, _, []}, _header) when is_list(fields) do
[" ON DUPLICATE KEY UPDATE " |
intersperse_map(fields, ?,, fn field ->
quoted = quote_name(field)
[quoted, " = VALUES(", quoted, ?)]
end)]
end
defp on_conflict({%{wheres: []} = query, _, []}, _header) do
[" ON DUPLICATE KEY " | update_all(query, "UPDATE ")]
end
defp on_conflict({_query, _, []}, _header) do
error!(nil, "Using a query with :where in combination with the :on_conflict option is not supported by MySQL")
end
defp insert_all(rows) do
intersperse_map(rows, ?,, fn row ->
[?(, intersperse_map(row, ?,, &insert_all_value/1), ?)]
end)
end
defp insert_all_value(nil), do: "DEFAULT"
defp insert_all_value({%Ecto.Query{} = query, _params_counter}), do: [?(, all(query), ?)]
defp insert_all_value(_), do: '?'
@impl true
def update(prefix, table, fields, filters, _returning) do
fields = intersperse_map(fields, ", ", &[quote_name(&1), " = ?"])
filters = intersperse_map(filters, " AND ", fn
{field, nil} ->
[quote_name(field), " IS NULL"]
{field, _value} ->
[quote_name(field), " = ?"]
end)
["UPDATE ", quote_table(prefix, table), " SET ", fields, " WHERE " | filters]
end
@impl true
def delete(prefix, table, filters, _returning) do
filters = intersperse_map(filters, " AND ", fn
{field, nil} ->
[quote_name(field), " IS NULL"]
{field, _value} ->
[quote_name(field), " = ?"]
end)
["DELETE FROM ", quote_table(prefix, table), " WHERE " | filters]
end
@impl true
# DB explain opts are deprecated, so they aren't used to build the explain query.
# See Notes at https://dev.mysql.com/doc/refman/5.7/en/explain.html
def explain_query(conn, query, params, opts) do
case query(conn, build_explain_query(query), params, opts) do
{:ok, %MyXQL.Result{columns: columns, rows: rows}} ->
{:ok, format_result_as_table(columns, rows)}
error ->
error
end
end
def build_explain_query(query) do
["EXPLAIN ", query]
|> IO.iodata_to_binary()
end
defp format_result_as_table(columns, rows) do
column_widths =
[columns | rows]
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(fn column_with_rows ->
column_with_rows |> Enum.map(&binary_length/1) |> Enum.max()
end)
[
separator(column_widths),
"\n",
cells(columns, column_widths),
"\n",
separator(column_widths),
"\n",
Enum.map(rows, &cells(&1, column_widths) ++ ["\n"]),
separator(column_widths)
]
|> IO.iodata_to_binary()
end
defp binary_length(nil), do: 4 # NULL
defp binary_length(binary) when is_binary(binary), do: String.length(binary)
defp binary_length(other), do: other |> inspect() |> String.length()
defp separator(widths) do
Enum.map(widths, & [?+, ?-, String.duplicate("-", &1), ?-]) ++ [?+]
end
defp cells(items, widths) do
cell =
[items, widths]
|> List.zip()
|> Enum.map(fn {item, width} -> [?|, " ", format_item(item, width) , " "] end)
[cell | [?|]]
end
defp format_item(nil, width), do: String.pad_trailing("NULL", width)
defp format_item(item, width) when is_binary(item), do: String.pad_trailing(item, width)
defp format_item(item, width) when is_number(item), do: item |> inspect() |> String.pad_leading(width)
defp format_item(item, width), do: item |> inspect() |> String.pad_trailing(width)
## Query generation
binary_ops =
[==: " = ", !=: " != ", <=: " <= ", >=: " >= ", <: " < ", >: " > ",
+: " + ", -: " - ", *: " * ", /: " / ",
and: " AND ", or: " OR ", like: " LIKE "]
@binary_ops Keyword.keys(binary_ops)
Enum.map(binary_ops, fn {op, str} ->
defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)}
end)
defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)}
defp select(%{select: %{fields: fields}, distinct: distinct} = query, sources) do
["SELECT ", distinct(distinct, sources, query) | select(fields, sources, query)]
end
defp distinct(nil, _sources, _query), do: []
defp distinct(%QueryExpr{expr: true}, _sources, _query), do: "DISTINCT "
defp distinct(%QueryExpr{expr: false}, _sources, _query), do: []
defp distinct(%QueryExpr{expr: exprs}, _sources, query) when is_list(exprs) do
error!(query, "DISTINCT with multiple columns is not supported by MySQL")
end
defp select([], _sources, _query),
do: "TRUE"
defp select(fields, sources, query) do
intersperse_map(fields, ", ", fn
{:&, _, [idx]} ->
case elem(sources, idx) do
{source, _, nil} ->
error!(query, "MySQL does not support selecting all fields from #{source} without a schema. " <>
"Please specify a schema or specify exactly which fields you want to select")
{_, source, _} ->
source
end
{key, value} ->
[expr(value, sources, query), " AS ", quote_name(key)]
value ->
expr(value, sources, query)
end)
end
defp from(%{from: %{source: source, hints: hints}} = query, sources) do
{from, name} = get_source(query, sources, 0, source)
[" FROM ", from, " AS ", name | Enum.map(hints, &[?\s | &1])]
end
defp cte(%{with_ctes: %WithExpr{recursive: recursive, queries: [_ | _] = queries}} = query, sources) do
recursive_opt = if recursive, do: "RECURSIVE ", else: ""
ctes = intersperse_map(queries, ", ", &cte_expr(&1, sources, query))
["WITH ", recursive_opt, ctes, " "]
end
defp cte(%{with_ctes: _}, _), do: []
defp cte_expr({name, cte}, sources, query) do
[quote_name(name), " AS ", cte_query(cte, sources, query)]
end
defp cte_query(%Ecto.Query{} = query, _, _), do: ["(", all(query), ")"]
defp cte_query(%QueryExpr{expr: expr}, sources, query), do: expr(expr, sources, query)
defp update_fields(type, %{updates: updates} = query, sources) do
fields = for(%{expr: expr} <- updates,
{op, kw} <- expr,
{key, value} <- kw,
do: update_op(op, update_key(type, key, query, sources), value, sources, query))
Enum.intersperse(fields, ", ")
end
defp update_key(:update, key, %{from: from} = query, sources) do
{_from, name} = get_source(query, sources, 0, from)
[name, ?. | quote_name(key)]
end
defp update_key(:on_conflict, key, _query, _sources) do
quote_name(key)
end
defp update_op(:set, quoted_key, value, sources, query) do
[quoted_key, " = " | expr(value, sources, query)]
end
defp update_op(:inc, quoted_key, value, sources, query) do
[quoted_key, " = ", quoted_key, " + " | expr(value, sources, query)]
end
defp update_op(command, _quoted_key, _value, _sources, query) do
error!(query, "Unknown update operation #{inspect command} for MySQL")
end
defp using_join(%{joins: []}, _kind, _sources), do: {[], []}
defp using_join(%{joins: joins} = query, kind, sources) do
froms =
intersperse_map(joins, ", ", fn
%JoinExpr{qual: :inner, ix: ix, source: source} ->
{join, name} = get_source(query, sources, ix, source)
[join, " AS " | name]
%JoinExpr{qual: qual} ->
error!(query, "MySQL adapter supports only inner joins on #{kind}, got: `#{qual}`")
end)
wheres =
for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins,
value != true,
do: expr |> Map.put(:__struct__, BooleanExpr) |> Map.put(:op, :and)
{[?,, ?\s | froms], wheres}
end
defp join(%{joins: []}, _sources), do: []
defp join(%{joins: joins} = query, sources) do
Enum.map(joins, fn
%JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source, hints: hints} ->
{join, name} = get_source(query, sources, ix, source)
[join_qual(qual, query), join, " AS ", name, Enum.map(hints, &[?\s | &1]) | join_on(qual, expr, sources, query)]
end)
end
defp join_on(:cross, true, _sources, _query), do: []
defp join_on(_qual, expr, sources, query), do: [" ON " | expr(expr, sources, query)]
defp join_qual(:inner, _), do: " INNER JOIN "
defp join_qual(:left, _), do: " LEFT OUTER JOIN "
defp join_qual(:right, _), do: " RIGHT OUTER JOIN "
defp join_qual(:full, _), do: " FULL OUTER JOIN "
defp join_qual(:cross, _), do: " CROSS JOIN "
defp join_qual(mode, q), do: error!(q, "join `#{inspect mode}` not supported by MySQL")
defp where(%{wheres: wheres} = query, sources) do
boolean(" WHERE ", wheres, sources, query)
end
defp having(%{havings: havings} = query, sources) do
boolean(" HAVING ", havings, sources, query)
end
defp group_by(%{group_bys: []}, _sources), do: []
defp group_by(%{group_bys: group_bys} = query, sources) do
[" GROUP BY " |
intersperse_map(group_bys, ", ", fn %QueryExpr{expr: expr} ->
intersperse_map(expr, ", ", &expr(&1, sources, query))
end)]
end
defp window(%{windows: []}, _sources), do: []
defp window(%{windows: windows} = query, sources) do
[" WINDOW " |
intersperse_map(windows, ", ", fn {name, %{expr: kw}} ->
[quote_name(name), " AS " | window_exprs(kw, sources, query)]
end)]
end
defp window_exprs(kw, sources, query) do
[?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)]
end
defp window_expr({:partition_by, fields}, sources, query) do
["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))]
end
defp window_expr({:order_by, fields}, sources, query) do
["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))]
end
defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do
expr(fragment, sources, query)
end
defp order_by(%{order_bys: []}, _sources), do: []
defp order_by(%{order_bys: order_bys} = query, sources) do
[" ORDER BY " |
intersperse_map(order_bys, ", ", fn %QueryExpr{expr: expr} ->
intersperse_map(expr, ", ", &order_by_expr(&1, sources, query))
end)]
end
defp order_by_expr({dir, expr}, sources, query) do
str = expr(expr, sources, query)
case dir do
:asc -> str
:desc -> [str | " DESC"]
_ -> error!(query, "#{dir} is not supported in ORDER BY in MySQL")
end
end
defp limit(%{limit: nil}, _sources), do: []
defp limit(%{limit: %QueryExpr{expr: expr}} = query, sources) do
[" LIMIT " | expr(expr, sources, query)]
end
defp offset(%{offset: nil}, _sources), do: []
defp offset(%{offset: %QueryExpr{expr: expr}} = query, sources) do
[" OFFSET " | expr(expr, sources, query)]
end
defp combinations(%{combinations: combinations}) do
Enum.map(combinations, fn
{:union, query} -> [" UNION (", all(query), ")"]
{:union_all, query} -> [" UNION ALL (", all(query), ")"]
{:except, query} -> [" EXCEPT (", all(query), ")"]
{:except_all, query} -> [" EXCEPT ALL (", all(query), ")"]
{:intersect, query} -> [" INTERSECT (", all(query), ")"]
{:intersect_all, query} -> [" INTERSECT ALL (", all(query), ")"]
end)
end
defp lock(%{lock: nil}, _sources), do: []
defp lock(%{lock: binary}, _sources) when is_binary(binary), do: [?\s | binary]
defp lock(%{lock: expr} = query, sources), do: [?\s | expr(expr, sources, query)]
defp boolean(_name, [], _sources, _query), do: []
defp boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do
[name,
Enum.reduce(query_exprs, {op, paren_expr(expr, sources, query)}, fn
%BooleanExpr{expr: expr, op: op}, {op, acc} ->
{op, [acc, operator_to_boolean(op) | paren_expr(expr, sources, query)]}
%BooleanExpr{expr: expr, op: op}, {_, acc} ->
{op, [?(, acc, ?), operator_to_boolean(op) | paren_expr(expr, sources, query)]}
end) |> elem(1)]
end
defp operator_to_boolean(:and), do: " AND "
defp operator_to_boolean(:or), do: " OR "
defp parens_for_select([first_expr | _] = expr) do
if is_binary(first_expr) and String.starts_with?(first_expr, ["SELECT", "select"]) do
[?(, expr, ?)]
else
expr
end
end
defp paren_expr(expr, sources, query) do
[?(, expr(expr, sources, query), ?)]
end
defp expr({:^, [], [_ix]}, _sources, _query) do
'?'
end
defp expr({{:., _, [{:parent_as, _, [{:&, _, [idx]}]}, field]}, _, []}, _sources, query)
when is_atom(field) do
{_, name, _} = elem(query.aliases[@parent_as], idx)
[name, ?. | quote_name(field)]
end
defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query)
when is_atom(field) do
{_, name, _} = elem(sources, idx)
[name, ?. | quote_name(field)]
end
defp expr({:&, _, [idx]}, sources, _query) do
{_, source, _} = elem(sources, idx)
source
end
defp expr({:in, _, [_left, []]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do
args = intersperse_map(right, ?,, &expr(&1, sources, query))
[expr(left, sources, query), " IN (", args, ?)]
end
defp expr({:in, _, [_, {:^, _, [_, 0]}]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, {:^, _, [_, length]}]}, sources, query) do
args = Enum.intersperse(List.duplicate(??, length), ?,)
[expr(left, sources, query), " IN (", args, ?)]
end
defp expr({:in, _, [left, %Ecto.SubQuery{} = subquery]}, sources, query) do
[expr(left, sources, query), " IN ", expr(subquery, sources, query)]
end
defp expr({:in, _, [left, right]}, sources, query) do
[expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)]
end
defp expr({:is_nil, _, [arg]}, sources, query) do
[expr(arg, sources, query) | " IS NULL"]
end
defp expr({:not, _, [expr]}, sources, query) do
["NOT (", expr(expr, sources, query), ?)]
end
defp expr({:filter, _, _}, _sources, query) do
error!(query, "MySQL adapter does not support aggregate filters")
end
defp expr(%Ecto.SubQuery{query: query}, sources, _query) do
query = put_in(query.aliases[@parent_as], sources)
[?(, all(query, [?s]), ?)]
end
defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do
error!(query, "MySQL adapter does not support keyword or interpolated fragments")
end
defp expr({:fragment, _, parts}, sources, query) do
Enum.map(parts, fn
{:raw, part} -> part
{:expr, expr} -> expr(expr, sources, query)
end)
|> parens_for_select
end
defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do
["date_add(", expr(datetime, sources, query), ", ",
interval(count, interval, sources, query) | ")"]
end
defp expr({:date_add, _, [date, count, interval]}, sources, query) do
["CAST(date_add(", expr(date, sources, query), ", ",
interval(count, interval, sources, query) | ") AS date)"]
end
defp expr({:ilike, _, [_, _]}, _sources, query) do
error!(query, "ilike is not supported by MySQL")
end
defp expr({:over, _, [agg, name]}, sources, query) when is_atom(name) do
aggregate = expr(agg, sources, query)
[aggregate, " OVER " | quote_name(name)]
end
defp expr({:over, _, [agg, kw]}, sources, query) do
aggregate = expr(agg, sources, query)
[aggregate, " OVER " | window_exprs(kw, sources, query)]
end
defp expr({:{}, _, elems}, sources, query) do
[?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)]
end
defp expr({:count, _, []}, _sources, _query), do: "count(*)"
defp expr({:json_extract_path, _, [expr, path]}, sources, query) do
path =
Enum.map(path, fn
binary when is_binary(binary) ->
[?., ?", escape_json_key(binary), ?"]
integer when is_integer(integer) ->
"[#{integer}]"
end)
["json_extract(", expr(expr, sources, query), ", '$", path, "')"]
end
defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do
{modifier, args} =
case args do
[rest, :distinct] -> {"DISTINCT ", [rest]}
_ -> {[], args}
end
case handle_call(fun, length(args)) do
{:binary_op, op} ->
[left, right] = args
[op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)]
{:fun, fun} ->
[fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)]
end
end
defp expr(list, _sources, query) when is_list(list) do
error!(query, "Array type is not supported by MySQL")
end
defp expr(%Decimal{} = decimal, _sources, _query) do
Decimal.to_string(decimal, :normal)
end
defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query)
when is_binary(binary) do
hex = Base.encode16(binary, case: :lower)
[?x, ?', hex, ?']
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query)
when type in [:decimal, :float] do
[expr(other, sources, query), " + 0"]
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do
["CAST(", expr(other, sources, query), " AS ", ecto_cast_to_db(type, query), ?)]
end
defp expr(nil, _sources, _query), do: "NULL"
defp expr(true, _sources, _query), do: "TRUE"
defp expr(false, _sources, _query), do: "FALSE"
defp expr(literal, _sources, _query) when is_binary(literal) do
[?', escape_string(literal), ?']
end
defp expr(literal, _sources, _query) when is_integer(literal) do
Integer.to_string(literal)
end
defp expr(literal, _sources, _query) when is_float(literal) do
# MySQL doesn't support float cast
["(0 + ", Float.to_string(literal), ?)]
end
defp interval(count, "millisecond", sources, query) do
["INTERVAL (", expr(count, sources, query) | " * 1000) microsecond"]
end
defp interval(count, interval, sources, query) do
["INTERVAL ", expr(count, sources, query), ?\s | interval]
end
defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops,
do: paren_expr(expr, sources, query)
defp op_to_binary({:is_nil, _, [_]} = expr, sources, query),
do: paren_expr(expr, sources, query)
defp op_to_binary(expr, sources, query),
do: expr(expr, sources, query)
defp create_names(%{sources: sources}, as_prefix) do
create_names(sources, 0, tuple_size(sources), as_prefix) |> List.to_tuple()
end
defp create_names(sources, pos, limit, as_prefix) when pos < limit do
[create_name(sources, pos, as_prefix) | create_names(sources, pos + 1, limit, as_prefix)]
end
defp create_names(_sources, pos, pos, _as_prefix) do
[]
end
defp create_name(sources, pos, as_prefix) do
case elem(sources, pos) do
{:fragment, _, _} ->
{nil, as_prefix ++ [?f | Integer.to_string(pos)], nil}
{table, schema, prefix} ->
name = as_prefix ++ [create_alias(table) | Integer.to_string(pos)]
{quote_table(prefix, table), name, schema}
%Ecto.SubQuery{} ->
{nil, as_prefix ++ [?s | Integer.to_string(pos)], nil}
end
end
defp create_alias(<<first, _rest::binary>>) when first in ?a..?z when first in ?A..?Z do
first
end
defp create_alias(_) do
?t
end
## DDL
alias Ecto.Migration.{Table, Index, Reference, Constraint}
@impl true
def execute_ddl({command, %Table{} = table, columns}) when command in [:create, :create_if_not_exists] do
table_structure =
case column_definitions(table, columns) ++ pk_definitions(columns, ", ") do
[] -> []
list -> [?\s, ?(, list, ?)]
end
[["CREATE TABLE ",
if_do(command == :create_if_not_exists, "IF NOT EXISTS "),
quote_table(table.prefix, table.name),
table_structure,
engine_expr(table.engine), options_expr(table.options)]]
end
def execute_ddl({command, %Table{} = table}) when command in [:drop, :drop_if_exists] do
[["DROP TABLE ", if_do(command == :drop_if_exists, "IF EXISTS "),
quote_table(table.prefix, table.name)]]
end
def execute_ddl({:alter, %Table{} = table, changes}) do
[["ALTER TABLE ", quote_table(table.prefix, table.name), ?\s,
column_changes(table, changes), pk_definitions(changes, ", ADD ")]]
end
def execute_ddl({:create, %Index{} = index}) do
if index.where do
error!(nil, "MySQL adapter does not support where in indexes")
end
[["CREATE", if_do(index.unique, " UNIQUE"), " INDEX ",
quote_name(index.name),
" ON ",
quote_table(index.prefix, index.table), ?\s,
?(, intersperse_map(index.columns, ", ", &index_expr/1), ?),
if_do(index.using, [" USING ", to_string(index.using)]),
if_do(index.concurrently, " LOCK=NONE")]]
end
def execute_ddl({:create_if_not_exists, %Index{}}),
do: error!(nil, "MySQL adapter does not support create if not exists for index")
def execute_ddl({:create, %Constraint{check: check}}) when is_binary(check),
do: error!(nil, "MySQL adapter does not support check constraints")
def execute_ddl({:create, %Constraint{exclude: exclude}}) when is_binary(exclude),
do: error!(nil, "MySQL adapter does not support exclusion constraints")
def execute_ddl({:drop, %Index{} = index}) do
[["DROP INDEX ",
quote_name(index.name),
" ON ", quote_table(index.prefix, index.table),
if_do(index.concurrently, " LOCK=NONE")]]
end
def execute_ddl({:drop, %Constraint{}}),
do: error!(nil, "MySQL adapter does not support constraints")
def execute_ddl({:drop_if_exists, %Constraint{}}),
do: error!(nil, "MySQL adapter does not support constraints")
def execute_ddl({:drop_if_exists, %Index{}}),
do: error!(nil, "MySQL adapter does not support drop if exists for index")
def execute_ddl({:rename, %Table{} = current_table, %Table{} = new_table}) do
[["RENAME TABLE ", quote_table(current_table.prefix, current_table.name),
" TO ", quote_table(new_table.prefix, new_table.name)]]
end
def execute_ddl({:rename, %Table{} = table, current_column, new_column}) do
[["ALTER TABLE ", quote_table(table.prefix, table.name), " RENAME COLUMN ",
quote_name(current_column), " TO ", quote_name(new_column)]]
end
def execute_ddl(string) when is_binary(string), do: [string]
def execute_ddl(keyword) when is_list(keyword),
do: error!(nil, "MySQL adapter does not support keyword lists in execute")
@impl true
def ddl_logs(_), do: []
@impl true
def table_exists_query(table) do
{"SELECT true FROM information_schema.tables WHERE table_name = ? AND table_schema = DATABASE() LIMIT 1", [table]}
end
defp pk_definitions(columns, prefix) do
pks =
for {_, name, _, opts} <- columns,
opts[:primary_key],
do: name
case pks do
[] -> []
_ -> [[prefix, "PRIMARY KEY (", intersperse_map(pks, ", ", "e_name/1), ?)]]
end
end
defp column_definitions(table, columns) do
intersperse_map(columns, ", ", &column_definition(table, &1))
end
defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do
[quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(opts), reference_expr(ref, table, name)]
end
defp column_definition(_table, {:add, name, type, opts}) do
[quote_name(name), ?\s, column_type(type, opts), column_options(opts)]
end
defp column_changes(table, columns) do
intersperse_map(columns, ", ", &column_change(table, &1))
end
defp column_change(_table, {_command, _name, %Reference{validate: false}, _opts}) do
error!(nil, "validate: false on references is not supported in MyXQL")
end
defp column_change(table, {:add, name, %Reference{} = ref, opts}) do
["ADD ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(opts), constraint_expr(ref, table, name)]
end
defp column_change(_table, {:add, name, type, opts}) do
["ADD ", quote_name(name), ?\s, column_type(type, opts), column_options(opts)]
end
defp column_change(table, {:add_if_not_exists, name, %Reference{} = ref, opts}) do
["ADD IF NOT EXISTS ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(opts), constraint_if_not_exists_expr(ref, table, name)]
end
defp column_change(_table, {:add_if_not_exists, name, type, opts}) do
["ADD IF NOT EXISTS ", quote_name(name), ?\s, column_type(type, opts), column_options(opts)]
end
defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do
[drop_constraint_expr(opts[:from], table, name), "MODIFY ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(opts), constraint_expr(ref, table, name)]
end
defp column_change(table, {:modify, name, type, opts}) do
[drop_constraint_expr(opts[:from], table, name), "MODIFY ", quote_name(name), ?\s, column_type(type, opts), column_options(opts)]
end
defp column_change(_table, {:remove, name}), do: ["DROP ", quote_name(name)]
defp column_change(table, {:remove, name, %Reference{} = ref, _opts}) do
[drop_constraint_expr(ref, table, name), "DROP ", quote_name(name)]
end
defp column_change(_table, {:remove, name, _type, _opts}), do: ["DROP ", quote_name(name)]
defp column_change(table, {:remove_if_exists, name, %Reference{} = ref}) do
[drop_constraint_if_exists_expr(ref, table, name), "DROP IF EXISTS ", quote_name(name)]
end
defp column_change(_table, {:remove_if_exists, name, _type}), do: ["DROP IF EXISTS ", quote_name(name)]
defp column_options(opts) do
default = Keyword.fetch(opts, :default)
null = Keyword.get(opts, :null)
after_column = Keyword.get(opts, :after)
[default_expr(default), null_expr(null), after_expr(after_column)]
end
defp after_expr(nil), do: []
defp after_expr(column) when is_atom(column) or is_binary(column), do: " AFTER `#{column}`"
defp after_expr(_), do: []
defp null_expr(false), do: " NOT NULL"
defp null_expr(true), do: " NULL"
defp null_expr(_), do: []
defp default_expr({:ok, nil}),
do: " DEFAULT NULL"
defp default_expr({:ok, literal}) when is_binary(literal),
do: [" DEFAULT '", escape_string(literal), ?']
defp default_expr({:ok, literal}) when is_number(literal) or is_boolean(literal),
do: [" DEFAULT ", to_string(literal)]
defp default_expr({:ok, {:fragment, expr}}),
do: [" DEFAULT ", expr]
defp default_expr({:ok, value}) when is_map(value),
do: error!(nil, ":default is not supported for json columns by MySQL")
defp default_expr(:error),
do: []
defp index_expr(literal) when is_binary(literal),
do: literal
defp index_expr(literal), do: quote_name(literal)
defp engine_expr(storage_engine),
do: [" ENGINE = ", String.upcase(to_string(storage_engine || "INNODB"))]
defp options_expr(nil),
do: []
defp options_expr(keyword) when is_list(keyword),
do: error!(nil, "MySQL adapter does not support keyword lists in :options")
defp options_expr(options),
do: [?\s, to_string(options)]
defp column_type(type, _opts) when type in ~w(time utc_datetime naive_datetime)a,
do: ecto_to_db(type)
defp column_type(type, opts) when type in ~w(time_usec utc_datetime_usec naive_datetime_usec)a do
precision = Keyword.get(opts, :precision, 6)
type_name = ecto_to_db(type)
[type_name, ?(, to_string(precision), ?)]
end
defp column_type(type, opts) do
size = Keyword.get(opts, :size)
precision = Keyword.get(opts, :precision)
scale = Keyword.get(opts, :scale)
cond do
size -> [ecto_size_to_db(type), ?(, to_string(size), ?)]
precision -> [ecto_to_db(type), ?(, to_string(precision), ?,, to_string(scale || 0), ?)]
type == :string -> ["varchar(255)"]
true -> ecto_to_db(type)
end
end
defp constraint_expr(%Reference{} = ref, table, name),
do: [", ADD CONSTRAINT ", reference_name(ref, table, name),
" FOREIGN KEY (", quote_name(name), ?),
" REFERENCES ", quote_table(ref.prefix || table.prefix, ref.table),
?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp constraint_if_not_exists_expr(%Reference{} = ref, table, name),
do: [", ADD CONSTRAINT ", reference_name(ref, table, name),
" FOREIGN KEY IF NOT EXISTS (", quote_name(name), ?),
" REFERENCES ", quote_table(ref.prefix || table.prefix, ref.table),
?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp reference_expr(%Reference{} = ref, table, name),
do: [", CONSTRAINT ", reference_name(ref, table, name),
" FOREIGN KEY (", quote_name(name), ?),
" REFERENCES ", quote_table(ref.prefix || table.prefix, ref.table),
?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp drop_constraint_expr(%Reference{} = ref, table, name),
do: ["DROP FOREIGN KEY ", reference_name(ref, table, name), ", "]
defp drop_constraint_expr(_, _, _),
do: []
defp drop_constraint_if_exists_expr(%Reference{} = ref, table, name),
do: ["DROP FOREIGN KEY IF EXISTS ", reference_name(ref, table, name), ", "]
defp drop_constraint_if_exists_expr(_, _, _),
do: []
defp reference_name(%Reference{name: nil}, table, column),
do: quote_name("#{table.name}_#{column}_fkey")
defp reference_name(%Reference{name: name}, _table, _column),
do: quote_name(name)
defp reference_column_type(:serial, _opts), do: "BIGINT UNSIGNED"
defp reference_column_type(:bigserial, _opts), do: "BIGINT UNSIGNED"
defp reference_column_type(type, opts), do: column_type(type, opts)
defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL"
defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE"
defp reference_on_delete(:restrict), do: " ON DELETE RESTRICT"
defp reference_on_delete(_), do: []
defp reference_on_update(:nilify_all), do: " ON UPDATE SET NULL"
defp reference_on_update(:update_all), do: " ON UPDATE CASCADE"
defp reference_on_update(:restrict), do: " ON UPDATE RESTRICT"
defp reference_on_update(_), do: []
## Helpers
defp get_source(query, sources, ix, source) do
{expr, name, _schema} = elem(sources, ix)
{expr || expr(source, sources, query), name}
end
defp quote_name(name)
defp quote_name(name) when is_atom(name),
do: quote_name(Atom.to_string(name))
defp quote_name(name) do
if String.contains?(name, "`") do
error!(nil, "bad field name #{inspect name}")
end
[?`, name, ?`]
end
defp quote_table(nil, name), do: quote_table(name)
defp quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)]
defp quote_table(name) when is_atom(name),
do: quote_table(Atom.to_string(name))
defp quote_table(name) do
if String.contains?(name, "`") do
error!(nil, "bad table name #{inspect name}")
end
[?`, name, ?`]
end
defp intersperse_map(list, separator, mapper, acc \\ [])
defp intersperse_map([], _separator, _mapper, acc),
do: acc
defp intersperse_map([elem], _separator, mapper, acc),
do: [acc | mapper.(elem)]
defp intersperse_map([elem | rest], separator, mapper, acc),
do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator])
defp if_do(condition, value) do
if condition, do: value, else: []
end
defp escape_string(value) when is_binary(value) do
value
|> :binary.replace("'", "''", [:global])
|> :binary.replace("\\", "\\\\", [:global])
end
defp escape_json_key(value) when is_binary(value) do
value
|> escape_string()
|> :binary.replace("\"", "\\\\\"", [:global])
end
defp ecto_cast_to_db(:id, _query), do: "unsigned"
defp ecto_cast_to_db(:integer, _query), do: "unsigned"
defp ecto_cast_to_db(:string, _query), do: "char"
defp ecto_cast_to_db(:utc_datetime_usec, _query), do: "datetime(6)"
defp ecto_cast_to_db(:naive_datetime_usec, _query), do: "datetime(6)"
defp ecto_cast_to_db(type, query), do: ecto_to_db(type, query)
defp ecto_size_to_db(:binary), do: "varbinary"
defp ecto_size_to_db(type), do: ecto_to_db(type)
defp ecto_to_db(type, query \\ nil)
defp ecto_to_db({:array, _}, query), do: error!(query, "Array type is not supported by MySQL")
defp ecto_to_db(:id, _query), do: "integer"
defp ecto_to_db(:serial, _query), do: "bigint unsigned not null auto_increment"
defp ecto_to_db(:bigserial, _query), do: "bigint unsigned not null auto_increment"
defp ecto_to_db(:binary_id, _query), do: "binary(16)"
defp ecto_to_db(:string, _query), do: "varchar"
defp ecto_to_db(:float, _query), do: "double"
defp ecto_to_db(:binary, _query), do: "blob"
defp ecto_to_db(:uuid, _query), do: "binary(16)" # MySQL does not support uuid
defp ecto_to_db(:map, _query), do: "json"
defp ecto_to_db({:map, _}, _query), do: "json"
defp ecto_to_db(:time_usec, _query), do: "time"
defp ecto_to_db(:utc_datetime, _query), do: "datetime"
defp ecto_to_db(:utc_datetime_usec, _query), do: "datetime"
defp ecto_to_db(:naive_datetime, _query), do: "datetime"
defp ecto_to_db(:naive_datetime_usec, _query), do: "datetime"
defp ecto_to_db(atom, _query) when is_atom(atom), do: Atom.to_string(atom)
defp ecto_to_db(str, _query) when is_binary(str), do: str
defp ecto_to_db(type, _query) do
raise ArgumentError,
"unsupported type `#{inspect(type)}`. The type can either be an atom, a string " <>
"or a tuple of the form `{:map, t}` where `t` itself follows the same conditions."
end
defp error!(nil, message) do
raise ArgumentError, message
end
defp error!(query, message) do
raise Ecto.QueryError, query: query, message: message
end
end
end
| 36.214029 | 135 | 0.602682 |
f7475871047c656ba4b25f1419a289a76905c08f | 1,912 | ex | Elixir | test/support/cluster.ex | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 845 | 2017-02-14T14:16:11.000Z | 2022-03-30T04:13:08.000Z | test/support/cluster.ex | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 146 | 2017-04-29T16:11:14.000Z | 2022-03-29T08:49:05.000Z | test/support/cluster.ex | alexandrubagu/nebulex | c8870a91644545f7a10a6af825ef6822a40fadf4 | [
"MIT"
] | 50 | 2017-08-17T13:44:06.000Z | 2022-03-30T11:29:59.000Z | defmodule Nebulex.Cluster do
@moduledoc """
Taken from `Phoenix.PubSub.Cluster`.
Copyright (c) 2014 Chris McCord
"""
def spawn(nodes) do
# Turn node into a distributed node with the given long name
_ = :net_kernel.start([:"[email protected]"])
# Allow spawned nodes to fetch all code from this node
_ = :erl_boot_server.start([])
_ = allow_boot(to_charlist("127.0.0.1"))
nodes
|> Enum.map(&Task.async(fn -> spawn_node(&1) end))
|> Enum.map(&Task.await(&1, 30_000))
end
def spawn_node(node_host) do
{:ok, node} = :slave.start(to_charlist("127.0.0.1"), node_name(node_host), inet_loader_args())
_ = add_code_paths(node)
_ = transfer_configuration(node)
_ = ensure_applications_started(node)
{:ok, node}
end
defp rpc(node, module, function, args) do
:rpc.block_call(node, module, function, args)
end
defp inet_loader_args do
to_charlist("-loader inet -hosts 127.0.0.1 -setcookie #{:erlang.get_cookie()}")
end
defp allow_boot(host) do
{:ok, ipv4} = :inet.parse_ipv4_address(host)
:erl_boot_server.add_slave(ipv4)
end
defp add_code_paths(node) do
rpc(node, :code, :add_paths, [:code.get_path()])
end
defp transfer_configuration(node) do
for {app_name, _, _} <- Application.loaded_applications() do
for {key, val} <- Application.get_all_env(app_name) do
rpc(node, Application, :put_env, [app_name, key, val])
end
end
end
defp ensure_applications_started(node) do
rpc(node, Application, :ensure_all_started, [:mix])
rpc(node, Mix, :env, [Mix.env()])
for {app_name, _, _} <- Application.loaded_applications(), app_name not in [:dialyxir] do
rpc(node, Application, :ensure_all_started, [app_name])
end
end
defp node_name(node_host) do
node_host
|> to_string()
|> String.split("@")
|> Enum.at(0)
|> String.to_atom()
end
end
| 27.314286 | 98 | 0.660042 |
f7476e8f097539f538d147eac4813c20dd7e9d41 | 1,733 | ex | Elixir | lib/pacman/engine.ex | zampino/pacman | 889080e26054dd04aa9e3ef5f7971d408a698a86 | [
"MIT"
] | null | null | null | lib/pacman/engine.ex | zampino/pacman | 889080e26054dd04aa9e3ef5f7971d408a698a86 | [
"MIT"
] | null | null | null | lib/pacman/engine.ex | zampino/pacman | 889080e26054dd04aa9e3ef5f7971d408a698a86 | [
"MIT"
] | null | null | null | defmodule Pacman.Engine do
@doc "the main animation loop changes states of the pacman's world"
def main(world, outs) do
catch_exit
event = fetch_event
{world, outs} = react_on_event(world, outs, event)
world = Pacman.World.move_pacmans(world)
Enum.each outs, &(send_state(&1, world))
:timer.sleep 200
main(world, outs)
end
def send_state(out, world) do
send out, {:state, Pacman.World.represent(world)}
end
@doc "this ensures we process just
one shared event per cycle in a non-blocking fashion"
def fetch_event do
receive do
{:event, event} -> event
after
0 -> nil
end
end
@doc "adds an output channel"
def react_on_event(world, outs, [type: :register_output, pid: pid]) do
outs = List.insert_at outs, 0, pid
{world, outs}
end
@doc "removes the specified output channel"
def react_on_event(world, outs, [type: :remove_output, pid: pid]) do
outs = List.delete outs, pid
{world, outs}
end
@doc "changes the world's state based on incoming event"
def react_on_event(world, outs, [type: :register_pacman, name: name]) do
world = Pacman.World.register_pacman(world, name)
{world, outs}
end
@doc "removed named pacman from the World"
def react_on_event(world, outs, [type: :remove_pacman, name: name]) do
world = Pacman.World.remove_pacman(world, name)
{world, outs}
end
def react_on_event(world, outs, [type: :dump_state]) do
IO.puts Pacman.World.represent(world)
{world, outs}
end
def react_on_event(world, outs, _) do
{world, outs}
end
def catch_exit do
receive do
:quit -> Process.exit(self, :kill)
after
0 -> "no exit signal"
end
end
end
| 24.757143 | 74 | 0.66532 |
f747811a24828fa46b9612ae945d7114a7337e69 | 2,913 | ex | Elixir | lib/xcribe/conn_parser.ex | brainn-co/xcribe | 06e66df0ac28d5b3e525f54bc52ed21722701a06 | [
"Apache-2.0"
] | 30 | 2019-12-17T20:09:41.000Z | 2021-01-11T13:58:24.000Z | lib/xcribe/conn_parser.ex | Finbits/xcribe | 37f4195315e27e415212910f0219c68d96a16d4a | [
"Apache-2.0"
] | 36 | 2019-12-17T20:32:04.000Z | 2020-12-02T17:50:49.000Z | lib/xcribe/conn_parser.ex | Finbits/xcribe | 37f4195315e27e415212910f0219c68d96a16d4a | [
"Apache-2.0"
] | 3 | 2020-02-21T18:13:59.000Z | 2020-10-20T07:59:05.000Z | defmodule Xcribe.ConnParser do
@moduledoc false
alias Plug.Conn
alias Xcribe.{Config, Request, Request.Error}
@error_struct %Error{type: :parsing}
@doc """
Parse the given `Plug.Conn` and transform it to a `Xcribe.Request`. A
description can be provided.
If any error occurs a `Xcribe.Request.Error` is returned
"""
def execute(conn, description \\ "")
def execute(%Conn{} = conn, description) do
conn
|> identify_route()
|> parse_conn(conn, description)
end
def execute(_conn, _description),
do: %{@error_struct | message: "A Plug.Conn must be given"}
defp parse_conn(%Error{} = error, _conn, _description), do: error
defp parse_conn(route, conn, description) do
path = format_path(route.route, conn.path_params)
%Request{
action: route |> router_options() |> Atom.to_string(),
header_params: conn.req_headers,
controller: controller_module(route),
description: description,
params: conn.params,
path: path,
path_params: conn.path_params,
query_params: conn.query_params,
request_body: conn.body_params,
resource: resource_name(path, fetch_namespaces()),
resource_group: resource_group(route),
resp_body: conn.resp_body,
resp_headers: conn.resp_headers,
status_code: conn.status,
verb: String.downcase(conn.method)
}
end
defp identify_route(%{method: method, host: host, path_info: path} = conn) do
conn
|> router_module()
|> apply(:__match_route__, [method, decode_uri(path), host])
|> extract_route_info()
rescue
_ -> %{@error_struct | message: "An invalid Plug.Conn was given or maybe an invalid Router"}
end
defp router_module(%{private: %{phoenix_router: router}}), do: router
defp decode_uri(path_info), do: Enum.map(path_info, &URI.decode/1)
defp extract_route_info({%{} = route_info, _callback_one, _callback_two, _plug_info}),
do: route_info
defp extract_route_info(_),
do: Map.put(@error_struct, :message, "A route wasn't found for given Conn")
defp router_options(%{plug_opts: opts}), do: opts
defp router_options(%{opts: opts}), do: opts
defp controller_module(%{plug: controller}), do: controller
defp resource_group(%{pipe_through: [head | _rest]}), do: head
defp resource_group(%{}), do: nil
defp resource_name(path, namespaces) do
namespaces
|> Enum.reduce(path, &remove_namespace/2)
|> String.split("/")
|> Enum.filter(&Regex.match?(~r/^\w+$/, &1))
|> Enum.join("_")
end
defp remove_namespace(namespace, path), do: String.replace(path, ~r/^#{namespace}/, "")
defp format_path(path, params),
do: params |> Map.keys() |> Enum.reduce(path, &transform_param/2)
defp transform_param(param, path), do: String.replace(path, ":#{param}", "{#{param}}")
defp fetch_namespaces, do: apply(Config.xcribe_information_source!(), :namespaces, [])
end
| 30.989362 | 96 | 0.680055 |
f747999abf839593aebc3c272fb267b0c8b0152a | 2,061 | ex | Elixir | lib/type_check/type_error.ex | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | lib/type_check/type_error.ex | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | lib/type_check/type_error.ex | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | defmodule TypeCheck.TypeError do
@moduledoc """
Exception to be returned or raised when a value is not of the expected type.
This exception has two fields:
- `:raw`, which will contain the problem tuple of the type check failure.
- `:message`, which will contain a the humanly-readable representation of the raw problem_tuple
`:message` is constructed from `:raw` using the TypeCheck.TypeError.DefaultFormatter.
(TODO at some point this might be configured to use your custom formatter instead)
"""
defexception [:message, :raw]
@type t() :: %__MODULE__{message: String.t(), raw: problem_tuple()}
@typedoc """
Any built-in TypeCheck struct (c.f. `TypeCheck.Builtin.*`), whose check(s) failed.
"""
@type type_checked_against :: TypeCheck.Type.t()
@typedoc """
The name of the particular check. Might be `:no_match` for simple types,
but for more complex types that have multiple checks, it disambugates between them.
For instance, for `TypeCheck.Builtin.List` we have `:not_a_list`, `:different_length`, and `:element_error`.
"""
@type check_name :: atom()
@typedoc """
An extra map with any information related to the check that failed.
For instance, if the check was a compound check, will contain the field `problem:` with the child problem_tuple
as well as `:index` or `:key` etc. to indicate _where_ in the compound structure the check failed.
"""
@type extra_information :: %{optional(atom) => any()}
@typedoc """
The value that was passed in which failed the check.
It is included for the easy creation of `value did not match y`-style messages.
"""
@type problematic_value :: any()
@typedoc """
A problem_tuple contains all information about a failed type check.
"""
@type problem_tuple ::
{type_checked_against(), check_name(), extra_information(), problematic_value()}
@impl true
def exception(problem_tuple) do
message = TypeCheck.TypeError.DefaultFormatter.format_wrap(problem_tuple)
%__MODULE__{message: message, raw: problem_tuple}
end
end
| 35.534483 | 113 | 0.719554 |
f747d18abcc2676366eedd4541769c8f6c2e3706 | 2,733 | exs | Elixir | test/election_web/controllers/vote_controller_test.exs | manojsamanta/election | e4eca4f2813011954d08eb04d057f84a9f6bda90 | [
"MIT"
] | null | null | null | test/election_web/controllers/vote_controller_test.exs | manojsamanta/election | e4eca4f2813011954d08eb04d057f84a9f6bda90 | [
"MIT"
] | null | null | null | test/election_web/controllers/vote_controller_test.exs | manojsamanta/election | e4eca4f2813011954d08eb04d057f84a9f6bda90 | [
"MIT"
] | null | null | null | defmodule ElectionWeb.VoteControllerTest do
use ElectionWeb.ConnCase
alias Election.Voting
@create_attrs %{v_2014: true, v_2016: true, v_2018: true, v_2020: true}
@update_attrs %{v_2014: false, v_2016: false, v_2018: false, v_2020: false}
@invalid_attrs %{v_2014: nil, v_2016: nil, v_2018: nil, v_2020: nil}
def fixture(:vote) do
{:ok, vote} = Voting.create_vote(@create_attrs)
vote
end
describe "index" do
test "lists all votes", %{conn: conn} do
conn = get(conn, Routes.vote_path(conn, :index))
assert html_response(conn, 200) =~ "Listing Votes"
end
end
describe "new vote" do
test "renders form", %{conn: conn} do
conn = get(conn, Routes.vote_path(conn, :new))
assert html_response(conn, 200) =~ "New Vote"
end
end
describe "create vote" do
test "redirects to show when data is valid", %{conn: conn} do
conn = post(conn, Routes.vote_path(conn, :create), vote: @create_attrs)
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == Routes.vote_path(conn, :show, id)
conn = get(conn, Routes.vote_path(conn, :show, id))
assert html_response(conn, 200) =~ "Show Vote"
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.vote_path(conn, :create), vote: @invalid_attrs)
assert html_response(conn, 200) =~ "New Vote"
end
end
describe "edit vote" do
setup [:create_vote]
test "renders form for editing chosen vote", %{conn: conn, vote: vote} do
conn = get(conn, Routes.vote_path(conn, :edit, vote))
assert html_response(conn, 200) =~ "Edit Vote"
end
end
describe "update vote" do
setup [:create_vote]
test "redirects when data is valid", %{conn: conn, vote: vote} do
conn = put(conn, Routes.vote_path(conn, :update, vote), vote: @update_attrs)
assert redirected_to(conn) == Routes.vote_path(conn, :show, vote)
conn = get(conn, Routes.vote_path(conn, :show, vote))
assert html_response(conn, 200)
end
test "renders errors when data is invalid", %{conn: conn, vote: vote} do
conn = put(conn, Routes.vote_path(conn, :update, vote), vote: @invalid_attrs)
assert html_response(conn, 200) =~ "Edit Vote"
end
end
describe "delete vote" do
setup [:create_vote]
test "deletes chosen vote", %{conn: conn, vote: vote} do
conn = delete(conn, Routes.vote_path(conn, :delete, vote))
assert redirected_to(conn) == Routes.vote_path(conn, :index)
assert_error_sent 404, fn ->
get(conn, Routes.vote_path(conn, :show, vote))
end
end
end
defp create_vote(_) do
vote = fixture(:vote)
%{vote: vote}
end
end
| 30.707865 | 83 | 0.652397 |
f747f07da2f4228ef83c1ccfcf5f5ace523d2afe | 670 | exs | Elixir | episode24/your_app/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode24/your_app/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode24/your_app/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | defmodule YourApp.Mixfile do
use Mix.Project
def project do
[app: :your_app,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.30303 | 77 | 0.607463 |
f7482604635ce8f75b5aa45190ab26f08f6d9e17 | 2,566 | exs | Elixir | test/commands/core_test.exs | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | 3 | 2018-07-01T05:31:02.000Z | 2019-08-03T04:11:32.000Z | test/commands/core_test.exs | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | 8 | 2018-06-28T07:30:04.000Z | 2018-07-13T07:36:56.000Z | test/commands/core_test.exs | AnilRedshift/wand-cli | c364cf5397353fd61ba0e5fc33225575eb72ccda | [
"BSD-3-Clause"
] | null | null | null | defmodule CoreTest do
use ExUnit.Case, async: true
import Mox
alias Wand.CLI.Commands.Core
alias Wand.CLI.Executor.Result
alias Wand.Test.Helpers
describe "validate" do
test "returns help if nothing is passed in" do
assert Core.validate(["core"]) == {:error, :wrong_command}
end
test "returns help if invalid commands are given" do
assert Core.validate(["core", "wrong"]) == {:error, :wrong_command}
end
test "returns help if an invalid flag is given" do
assert Core.validate(["core", "install", "--version"]) ==
{:error, {:invalid_flag, "--version"}}
end
test "install" do
assert Core.validate(["core", "install"]) == {:ok, :install}
end
test "--version" do
assert Core.validate(["core", "--version"]) == {:ok, :version}
end
test "version" do
assert Core.validate(["core", "version"]) == {:ok, :version}
end
end
describe "help" do
setup :verify_on_exit!
setup :stub_io
test "wrong_command" do
Core.help(:wrong_command)
end
test "banner" do
Core.help(:banner)
end
test "verbose" do
Core.help(:verbose)
end
def stub_io(_) do
expect(Wand.IOMock, :puts, fn _message -> :ok end)
:ok
end
end
describe "execute version" do
setup :verify_on_exit!
test "succesfully gets the version" do
version = "3.2.1"
Helpers.System.stub_core_version(version)
expect(Wand.IOMock, :puts, fn ^version -> :ok end)
Sys
assert Core.execute(:version, %{}) == {:ok, %Wand.CLI.Executor.Result{message: nil}}
end
test "gets the version even if there are warnings in the project" do
version = "warning: function deps/0 is unused\n mix.exs:22\n\n3.2.1"
Helpers.System.stub_core_version(version)
expect(Wand.IOMock, :puts, fn "3.2.1" -> :ok end)
Sys
assert Core.execute(:version, %{}) == {:ok, %Wand.CLI.Executor.Result{message: nil}}
end
test "fails to get the version" do
Helpers.System.stub_core_version_missing()
assert Core.execute(:version, %{}) == {:error, :wand_core_missing, nil}
end
end
describe "execute install" do
test "successfully installs the core" do
Helpers.System.stub_install_core()
assert Core.execute(:install, %{}) == {:ok, %Result{}}
end
test "Returns an error if installing the core fails" do
Helpers.System.stub_failed_install_core()
assert Core.execute(:install, %{}) == {:error, :wand_core_api_error, nil}
end
end
end
| 27.297872 | 90 | 0.627825 |
f7484ec1fcaf217a0ff1d4949a0f518830232424 | 7,587 | ex | Elixir | lib/teslamate/vehicles/vehicle/summary.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 1 | 2021-11-03T02:08:43.000Z | 2021-11-03T02:08:43.000Z | lib/teslamate/vehicles/vehicle/summary.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 170 | 2020-07-27T05:57:31.000Z | 2022-03-01T04:05:16.000Z | lib/teslamate/vehicles/vehicle/summary.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | null | null | null | defmodule TeslaMate.Vehicles.Vehicle.Summary do
import TeslaMate.Convert, only: [miles_to_km: 2, mph_to_kmh: 1]
alias TeslaApi.Vehicle.State.{Drive, Charge, VehicleState}
alias TeslaApi.Vehicle
alias TeslaMate.Log.Car
defstruct ~w(
car display_name state since healthy latitude longitude heading battery_level usable_battery_level
ideal_battery_range_km est_battery_range_km rated_battery_range_km charge_energy_added
speed outside_temp inside_temp is_climate_on is_preconditioning locked sentry_mode
plugged_in scheduled_charging_start_time charge_limit_soc charger_power windows_open doors_open
odometer shift_state charge_port_door_open time_to_full_charge charger_phases
charger_actual_current charger_voltage version update_available update_version is_user_present geofence
model trim_badging exterior_color wheel_type spoiler_type trunk_open frunk_open elevation power
)a
def into(nil, %{state: :start, healthy?: healthy?, car: car}) do
%__MODULE__{
state: :unavailable,
healthy: healthy?,
trim_badging: get_car_attr(car, :trim_badging),
exterior_color: get_car_attr(car, :exterior_color),
spoiler_type: get_car_attr(car, :spoiler_type),
wheel_type: get_car_attr(car, :wheel_type),
model: get_car_attr(car, :model),
car: car
}
end
def into(vehicle, attrs) do
%{
state: state,
since: since,
healthy?: healthy?,
car: car,
elevation: elevation,
geofence: gf
} = attrs
%__MODULE__{
format_vehicle(vehicle)
| state: format_state(state),
since: since,
healthy: healthy?,
elevation: elevation,
geofence: gf,
trim_badging: get_car_attr(car, :trim_badging),
exterior_color: get_car_attr(car, :exterior_color),
spoiler_type: get_car_attr(car, :spoiler_type),
wheel_type: get_car_attr(car, :wheel_type),
model: get_car_attr(car, :model),
car: car
}
end
defp format_state({:driving, {:offline, _}, _id}), do: :offline
defp format_state({:driving, _state, _id}), do: :driving
defp format_state({state, _, _}) when is_atom(state), do: state
defp format_state({state, _}) when is_atom(state), do: state
defp format_state(state) when is_atom(state), do: state
defp get_car_attr(%Car{exterior_color: v}, :exterior_color), do: v
defp get_car_attr(%Car{spoiler_type: v}, :spoiler_type), do: v
defp get_car_attr(%Car{trim_badging: v}, :trim_badging), do: v
defp get_car_attr(%Car{wheel_type: v}, :wheel_type), do: v
defp get_car_attr(%Car{model: v}, :model), do: v
defp get_car_attr(nil, _key), do: nil
defp format_vehicle(%Vehicle{} = vehicle) do
%__MODULE__{
# General
display_name: vehicle.display_name,
# Drive State
latitude: get_in_struct(vehicle, [:drive_state, :latitude]),
longitude: get_in_struct(vehicle, [:drive_state, :longitude]),
power: get_in_struct(vehicle, [:drive_state, :power]),
speed: speed(vehicle),
shift_state: get_in_struct(vehicle, [:drive_state, :shift_state]),
heading: get_in_struct(vehicle, [:drive_state, :heading]),
# Charge State
plugged_in: plugged_in(vehicle),
battery_level: charge(vehicle, :battery_level),
usable_battery_level: charge(vehicle, :usable_battery_level),
charge_energy_added: charge(vehicle, :charge_energy_added),
charge_limit_soc: charge(vehicle, :charge_limit_soc),
charge_port_door_open: charge(vehicle, :charge_port_door_open),
charger_actual_current: charge(vehicle, :charger_actual_current),
charger_phases: charge(vehicle, :charger_phases),
charger_power: charge(vehicle, :charger_power),
charger_voltage: charge(vehicle, :charger_voltage),
est_battery_range_km: charge(vehicle, :est_battery_range) |> miles_to_km(2),
ideal_battery_range_km: charge(vehicle, :ideal_battery_range) |> miles_to_km(2),
rated_battery_range_km: charge(vehicle, :battery_range) |> miles_to_km(2),
time_to_full_charge: charge(vehicle, :time_to_full_charge),
scheduled_charging_start_time:
charge(vehicle, :scheduled_charging_start_time) |> to_datetime(),
# Climate State
is_climate_on: get_in_struct(vehicle, [:climate_state, :is_climate_on]),
is_preconditioning: get_in_struct(vehicle, [:climate_state, :is_preconditioning]),
outside_temp: get_in_struct(vehicle, [:climate_state, :outside_temp]),
inside_temp: get_in_struct(vehicle, [:climate_state, :inside_temp]),
# Vehicle State
odometer: get_in_struct(vehicle, [:vehicle_state, :odometer]) |> miles_to_km(2),
locked: get_in_struct(vehicle, [:vehicle_state, :locked]),
sentry_mode: get_in_struct(vehicle, [:vehicle_state, :sentry_mode]),
windows_open: window_open(vehicle),
doors_open: doors_open(vehicle),
trunk_open: trunk_open(vehicle),
frunk_open: frunk_open(vehicle),
is_user_present: get_in_struct(vehicle, [:vehicle_state, :is_user_present]),
version: version(vehicle),
update_available: update_available(vehicle),
update_version: update_version(vehicle)
}
end
defp charge(vehicle, key), do: get_in_struct(vehicle, [:charge_state, key])
defp speed(%Vehicle{drive_state: %Drive{speed: s}}) when not is_nil(s), do: mph_to_kmh(s)
defp speed(_vehicle), do: nil
defp plugged_in(%Vehicle{charge_state: nil}), do: nil
defp plugged_in(%Vehicle{charge_state: %Charge{charge_port_door_open: :unknown}}), do: :unknown
defp plugged_in(%Vehicle{charge_state: %Charge{} = c}) do
c.charging_state != "Disconnected"
end
defp plugged_in(_vehicle), do: nil
defp window_open(%Vehicle{vehicle_state: vehicle_state}) do
case vehicle_state do
%VehicleState{fd_window: fd, fp_window: fp, rd_window: rd, rp_window: rp}
when is_number(fd) and is_number(fp) and is_number(rd) and is_number(rp) ->
fd > 0 or fp > 0 or rd > 0 or rp > 0
_ ->
nil
end
end
defp doors_open(%Vehicle{vehicle_state: vehicle_state}) do
case vehicle_state do
%VehicleState{df: df, pf: pf, dr: dr, pr: pr}
when is_number(df) and is_number(pf) and is_number(dr) and is_number(pr) ->
df > 0 or pf > 0 or dr > 0 or pr > 0
_ ->
nil
end
end
defp trunk_open(%Vehicle{vehicle_state: %VehicleState{rt: rt}}) when is_number(rt), do: rt > 0
defp trunk_open(_vehicle), do: nil
defp frunk_open(%Vehicle{vehicle_state: %VehicleState{ft: ft}}) when is_number(ft), do: ft > 0
defp frunk_open(_vehicle), do: nil
defp version(vehicle) do
with %Vehicle{vehicle_state: %VehicleState{car_version: v}} when is_binary(v) <- vehicle,
[version | _] <- String.split(v, " ") do
version
else
_ -> nil
end
end
defp update_available(vehicle) do
case get_in_struct(vehicle, [:vehicle_state, :software_update, :status]) do
status when status in ["available", "downloading", "downloading_wifi_wait"] -> true
status when is_binary(status) -> false
nil -> nil
end
end
defp update_version(vehicle) do
case get_in_struct(vehicle, [:vehicle_state, :software_update, :version]) do
version when is_binary(version) -> List.first(String.split(version, " "))
nil -> nil
end
end
defp to_datetime(val) when val in [nil, :unknown], do: val
defp to_datetime(ts), do: DateTime.from_unix!(ts)
defp get_in_struct(struct, keys) do
Enum.reduce(keys, struct, fn key, acc -> if acc, do: Map.get(acc, key) end)
end
end
| 38.907692 | 107 | 0.702122 |
f748531ec48e3b1ab7268314e68230f5b5f39d7e | 152 | ex | Elixir | lib/simple_budget_web/views/token_view.ex | corybuecker/simple-budget | d86241ff712552267da87052120468b01d2b8f41 | [
"MIT"
] | 2 | 2019-04-02T01:06:40.000Z | 2019-05-13T01:12:24.000Z | lib/simple_budget_web/views/token_view.ex | corybuecker/simple-budget | d86241ff712552267da87052120468b01d2b8f41 | [
"MIT"
] | 7 | 2018-12-27T12:33:38.000Z | 2021-03-08T22:31:14.000Z | lib/simple_budget_web/views/token_view.ex | corybuecker/simple-budget | d86241ff712552267da87052120468b01d2b8f41 | [
"MIT"
] | null | null | null | defmodule SimpleBudgetWeb.TokenView do
use SimpleBudgetWeb, :view
def render("create.json", %{token: token}) do
%{localtoken: token}
end
end
| 19 | 47 | 0.717105 |
f74865d707b4b72e982d960a0c0d353f0692e07f | 3,086 | exs | Elixir | test/central_web/controllers/config/user_config_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | test/central_web/controllers/config/user_config_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | test/central_web/controllers/config/user_config_controller_test.exs | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 2 | 2021-02-23T22:34:00.000Z | 2021-04-08T13:31:36.000Z | defmodule CentralWeb.Config.UserConfigControllerTest do
use CentralWeb.ConnCase
alias Central.Config
alias Central.Helpers.GeneralTestLib
setup do
GeneralTestLib.conn_setup(~w())
end
@key "general.Homepage"
describe "listing" do
test "index", %{conn: conn} do
conn = get(conn, Routes.user_config_path(conn, :index))
assert html_response(conn, 200) =~ "User settings"
end
end
describe "creating" do
test "new", %{conn: conn} do
conn = get(conn, Routes.user_config_path(conn, :new, key: @key))
assert html_response(conn, 200) =~ "<h4>Edit setting</h4>"
end
test "create", %{conn: conn, user: user} do
configs = Config.get_user_configs!(user.id)
assert Enum.empty?(configs)
conn =
post(conn, Routes.user_config_path(conn, :create),
user_config: %{
"user_id" => user.id,
"key" => @key,
"value" => "some value"
}
)
assert redirected_to(conn) == Routes.user_config_path(conn, :index) <> "#general"
configs = Config.get_user_configs!(user.id)
assert Enum.count(configs) == 1
end
test "bad create, no effect", %{conn: conn, user: user} do
configs = Config.get_user_configs!(user.id)
assert Enum.empty?(configs)
conn =
post(conn, Routes.user_config_path(conn, :create),
user_config: %{
"user_id" => user.id,
"key" => @key,
"value" => ""
}
)
assert redirected_to(conn) == Routes.user_config_path(conn, :index) <> "#general"
configs = Config.get_user_configs!(user.id)
assert Enum.empty?(configs)
end
end
describe "updating" do
test "new", %{conn: conn, user: user} do
attrs = %{
"key" => @key,
"user_id" => user.id,
"value" => "some value"
}
{:ok, the_config} = Config.create_user_config(attrs)
configs = Config.get_user_configs!(user.id)
assert Enum.count(configs) == 1
conn =
put(conn, Routes.user_config_path(conn, :update, the_config.id),
user_config: %{"key" => @key, "value" => "some updated value"}
)
assert redirected_to(conn) == Routes.user_config_path(conn, :index) <> "#general"
the_config = Config.get_user_config!(the_config.id)
assert the_config.value == "some updated value"
end
test "bad update, should delete", %{conn: conn, user: user} do
attrs = %{
"key" => @key,
"user_id" => user.id,
"value" => "some value"
}
{:ok, the_config} = Config.create_user_config(attrs)
configs = Config.get_user_configs!(user.id)
assert Enum.count(configs) == 1
conn =
put(conn, Routes.user_config_path(conn, :update, the_config.id),
user_config: %{"key" => @key, "value" => ""}
)
assert redirected_to(conn) == Routes.user_config_path(conn, :index) <> "#general"
configs = Config.get_user_configs!(user.id)
assert Enum.count(configs) == 0
end
end
end
| 27.801802 | 87 | 0.589436 |
f7487a5ef05835753ef79f2d01edbe06bc3919ba | 20,857 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/api/variants.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/api/variants.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/api/variants.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Api.Variants do
@moduledoc """
API calls for all endpoints tagged `Variants`.
"""
alias GoogleApi.Genomics.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Creates a new variant.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :body (Variant):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Variant{}} on success
{:error, info} on failure
"""
@spec genomics_variants_create(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Variant.t()} | {:error, Tesla.Env.t()}
def genomics_variants_create(connection, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/variants")
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Variant{})
end
@doc """
Deletes a variant.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- variant_id (String.t): The ID of the variant to be deleted.
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec genomics_variants_delete(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def genomics_variants_delete(connection, variant_id, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/variants/{variantId}", %{
"variantId" => URI.encode_www_form(variant_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Empty{})
end
@doc """
Gets a variant by ID.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- variant_id (String.t): The ID of the variant.
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Variant{}} on success
{:error, info} on failure
"""
@spec genomics_variants_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Variant.t()} | {:error, Tesla.Env.t()}
def genomics_variants_get(connection, variant_id, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/variants/{variantId}", %{
"variantId" => URI.encode_www_form(variant_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Variant{})
end
@doc """
Creates variant data by asynchronously importing the provided information. The variants for import will be merged with any existing variant that matches its reference sequence, start, end, reference bases, and alternative bases. If no such variant exists, a new one will be created. When variants are merged, the call information from the new variant is added to the existing variant, and Variant info fields are merged as specified in infoMergeConfig. As a special case, for single-sample VCF files, QUAL and FILTER fields will be moved to the call level; these are sometimes interpreted in a call-specific context. Imported VCF headers are appended to the metadata already in a variant set.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :body (ImportVariantsRequest):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec genomics_variants_import(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def genomics_variants_import(connection, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/variants:import")
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Operation{})
end
@doc """
Merges the given variants with existing variants. Each variant will be merged with an existing variant that matches its reference sequence, start, end, reference bases, and alternative bases. If no such variant exists, a new one will be created. When variants are merged, the call information from the new variant is added to the existing variant. Variant info fields are merged as specified in the infoMergeConfig field of the MergeVariantsRequest. Please exercise caution when using this method! It is easy to introduce mistakes in existing variants and difficult to back out of them. For example, suppose you were trying to merge a new variant with an existing one and both variants contain calls that belong to callsets with the same callset ID. // Existing variant - irrelevant fields trimmed for clarity { \"variantSetId\": \"10473108253681171589\", \"referenceName\": \"1\", \"start\": \"10582\", \"referenceBases\": \"G\", \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\": \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\": [ 0, 1 ], } ] } // New variant with conflicting call information { \"variantSetId\": \"10473108253681171589\", \"referenceName\": \"1\", \"start\": \"10582\", \"referenceBases\": \"G\", \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\": \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\": [ 1, 1 ], } ] } The resulting merged variant would overwrite the existing calls with those from the new variant: { \"variantSetId\": \"10473108253681171589\", \"referenceName\": \"1\", \"start\": \"10582\", \"referenceBases\": \"G\", \"alternateBases\": [ \"A\" ], \"calls\": [ { \"callSetId\": \"10473108253681171589-0\", \"callSetName\": \"CALLSET0\", \"genotype\": [ 1, 1 ], } ] } This may be the desired outcome, but it is up to the user to determine if if that is indeed the case.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :body (MergeVariantsRequest):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec genomics_variants_merge(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def genomics_variants_merge(connection, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/variants:merge")
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Empty{})
end
@doc """
Updates a variant. This method supports patch semantics. Returns the modified variant without its calls.
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- variant_id (String.t): The ID of the variant to be updated.
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :updateMask (String.t): An optional mask specifying which fields to update. At this time, mutable fields are names and info. Acceptable values are \"names\" and \"info\". If unspecified, all mutable fields will be updated.
- :body (Variant):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.Variant{}} on success
{:error, info} on failure
"""
@spec genomics_variants_patch(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.Variant.t()} | {:error, Tesla.Env.t()}
def genomics_variants_patch(connection, variant_id, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/variants/{variantId}", %{
"variantId" => URI.encode_www_form(variant_id)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.Variant{})
end
@doc """
Gets a list of variants matching the criteria. Implements [GlobalAllianceApi.searchVariants](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L126).
## Parameters
- connection (GoogleApi.Genomics.V1.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :body (SearchVariantsRequest):
## Returns
{:ok, %GoogleApi.Genomics.V1.Model.SearchVariantsResponse{}} on success
{:error, info} on failure
"""
@spec genomics_variants_search(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Genomics.V1.Model.SearchVariantsResponse.t()} | {:error, Tesla.Env.t()}
def genomics_variants_search(connection, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/variants/search")
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Genomics.V1.Model.SearchVariantsResponse{})
end
end
| 50.623786 | 3,006 | 0.652203 |
f7487c461ea89716c3963bc8bb944c9996c0c405 | 1,530 | ex | Elixir | lib/protobuf/protoc/generator/service.ex | tony612/protobuf-elixir | 7e7a0f2380d0bb851507d6451ce3688db4e978ae | [
"MIT"
] | 419 | 2017-04-02T13:10:51.000Z | 2020-11-15T15:53:17.000Z | lib/protobuf/protoc/generator/service.ex | tony612/protobuf-elixir | 7e7a0f2380d0bb851507d6451ce3688db4e978ae | [
"MIT"
] | 100 | 2017-04-02T14:26:41.000Z | 2020-11-10T23:43:30.000Z | lib/protobuf/protoc/generator/service.ex | tony612/protobuf-elixir | 7e7a0f2380d0bb851507d6451ce3688db4e978ae | [
"MIT"
] | 83 | 2017-07-24T21:50:04.000Z | 2020-11-15T08:52:34.000Z | defmodule Protobuf.Protoc.Generator.Service do
@moduledoc false
alias Protobuf.Protoc.Context
alias Protobuf.Protoc.Generator.Util
require EEx
EEx.function_from_file(
:defp,
:service_template,
Path.expand("./templates/service.ex.eex", :code.priv_dir(:protobuf)),
[:assigns]
)
@spec generate(Context.t(), Google.Protobuf.ServiceDescriptorProto.t()) ::
{String.t(), String.t()}
def generate(%Context{} = ctx, %Google.Protobuf.ServiceDescriptorProto{} = desc) do
# service can't be nested
mod_name = Util.mod_name(ctx, [Macro.camelize(desc.name)])
name = Util.prepend_package_prefix(ctx.package, desc.name)
methods = Enum.map(desc.method, &generate_service_method(ctx, &1))
descriptor_fun_body =
if ctx.gen_descriptors? do
Util.descriptor_fun_body(desc)
else
nil
end
{mod_name,
Util.format(
service_template(
module: mod_name,
service_name: name,
methods: methods,
descriptor_fun_body: descriptor_fun_body,
version: Util.version()
)
)}
end
defp generate_service_method(ctx, method) do
input = service_arg(Util.type_from_type_name(ctx, method.input_type), method.client_streaming)
output =
service_arg(Util.type_from_type_name(ctx, method.output_type), method.server_streaming)
{method.name, input, output}
end
defp service_arg(type, _streaming? = true), do: "stream(#{type})"
defp service_arg(type, _streaming?), do: type
end
| 27.818182 | 98 | 0.679739 |
f748972f040eebbc6610129c19cc771bebcee7c5 | 1,709 | ex | Elixir | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/update_release_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/update_release_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/update_release_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseRules.V1.Model.UpdateReleaseRequest do
@moduledoc """
The request for FirebaseRulesService.UpdateReleasePatch.
## Attributes
* `release` (*type:* `GoogleApi.FirebaseRules.V1.Model.Release.t`, *default:* `nil`) - `Release` to update.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - Specifies which fields to update.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:release => GoogleApi.FirebaseRules.V1.Model.Release.t(),
:updateMask => String.t()
}
field(:release, as: GoogleApi.FirebaseRules.V1.Model.Release)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseRules.V1.Model.UpdateReleaseRequest do
def decode(value, options) do
GoogleApi.FirebaseRules.V1.Model.UpdateReleaseRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseRules.V1.Model.UpdateReleaseRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.18 | 111 | 0.739029 |
f748afc0597b1cba73fb6db82cdb1cc5b933f162 | 30,335 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/node_templates.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/node_templates.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/node_templates.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.NodeTemplates do
@moduledoc """
API calls for all endpoints tagged `NodeTemplates`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves an aggregated list of node templates.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:includeAllScopes` (*type:* `boolean()`) - Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.NodeTemplateAggregatedList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_aggregated_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.NodeTemplateAggregatedList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_node_templates_aggregated_list(
connection,
project,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:includeAllScopes => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/aggregated/nodeTemplates", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.NodeTemplateAggregatedList{}])
end
@doc """
Deletes the specified NodeTemplate resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `node_template` (*type:* `String.t`) - Name of the NodeTemplate resource to delete.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_node_templates_delete(
connection,
project,
region,
node_template,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/{project}/regions/{region}/nodeTemplates/{nodeTemplate}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"nodeTemplate" => URI.encode(node_template, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Returns the specified node template. Gets a list of available node templates by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `node_template` (*type:* `String.t`) - Name of the node template to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.NodeTemplate{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.NodeTemplate.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_node_templates_get(
connection,
project,
region,
node_template,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/regions/{region}/nodeTemplates/{nodeTemplate}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"nodeTemplate" => URI.encode(node_template, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.NodeTemplate{}])
end
@doc """
Gets the access control policy for a resource. May be empty if no such policy or resource exists.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:optionsRequestedPolicyVersion` (*type:* `integer()`) - Requested IAM Policy version.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_get_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.Compute.V1.Model.Policy.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_node_templates_get_iam_policy(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:optionsRequestedPolicyVersion => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Policy{}])
end
@doc """
Creates a NodeTemplate resource in the specified project using the data included in the request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.NodeTemplate.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_insert(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_node_templates_insert(
connection,
project,
region,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/regions/{region}/nodeTemplates", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Retrieves a list of node templates available to the specified project.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.NodeTemplateList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.NodeTemplateList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_node_templates_list(connection, project, region, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/regions/{region}/nodeTemplates", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.NodeTemplateList{}])
end
@doc """
Sets the access control policy on the specified resource. Replaces any existing policy.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionSetPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_set_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.Compute.V1.Model.Policy.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_node_templates_set_iam_policy(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Policy{}])
end
@doc """
Returns permissions that a caller has on the specified resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.Compute.V1.Model.TestPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.TestPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec compute_node_templates_test_iam_permissions(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.TestPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_node_templates_test_iam_permissions(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.TestPermissionsResponse{}])
end
end
| 49.729508 | 511 | 0.643086 |
f748e4ce60d0071ab620eabbd1726b748edc6a84 | 1,394 | exs | Elixir | mix.exs | DriesDeBackker/quarp-reactivity | 88e297818df4c9fd85af0c14dabe5edc4788ffd4 | [
"MIT"
] | 3 | 2019-07-18T12:43:25.000Z | 2020-11-26T00:28:26.000Z | mix.exs | DriesDeBackker/quarp-reactivity | 88e297818df4c9fd85af0c14dabe5edc4788ffd4 | [
"MIT"
] | null | null | null | mix.exs | DriesDeBackker/quarp-reactivity | 88e297818df4c9fd85af0c14dabe5edc4788ffd4 | [
"MIT"
] | null | null | null | defmodule Quarp.MixProject do
use Mix.Project
def project do
[
app: :quarp,
version: "1.2.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
source_url: "https://github.com/DriesDeBackker/quarp-reactivity.git"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:rp_middleware, "~> 0.1.0"},
{:ex_doc, "~> 0.19", only: :dev},
{:observables_extended, "~> 0.3.5"}
]
end
defp description() do
"A library for distributed reactive programming with consistency guarantees in the spirit of 'Quality Aware Reacive Programming for the IoT'.
Features fifo (no guarantee), (relaxed) glitch-freedom ({:g, margin}) and (relaxed) logical clock synchronization ({:t, margin}) as guarantees."
end
defp package() do
[
# This option is only needed when you don't want to use the OTP application name
name: "quarp",
# These are the default files included in the package
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Dries De Backker"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/DriesDeBackker/quarp-reactivity.git"}
]
end
end
| 28.44898 | 148 | 0.625538 |
f748f1a3d03a162d47ba5239fa5c61bda88ab17f | 1,109 | ex | Elixir | lib/ex_admin/changeset.ex | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | lib/ex_admin/changeset.ex | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | lib/ex_admin/changeset.ex | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | defmodule ExAdmin.Changeset do
@moduledoc false
alias __MODULE__, as: Cs
defstruct valid?: true, changeset: nil, errors: nil, dependents: []
def update(%Cs{} = r, items) when is_list(items) do
Enum.reduce(items, r, fn({k,v}, acc) -> update(acc, k, v) end)
end
def update(%Cs{} = r, :changeset, changeset) do
%Cs{r | changeset: changeset}
end
def update(%Cs{valid?: valid?} = r, :valid?, value) do
%Cs{r | valid?: valid? and value}
end
def update(%Cs{dependents: dependents} = r, :dependents, dependent) do
%Cs{r | dependents: dependents ++ [dependent]}
end
def update(%Cs{} = r, :errors, nil), do: r
def update(%Cs{errors: nil} = r, :errors, error) do
%Cs{r | errors: error}
end
def update(%Cs{errors: errors} = r, :errors, error) do
%Cs{r | errors: errors ++ error}
end
def set_data(%{data: data} = cs, params) do
struct(cs, data: struct(data, params))
end
def set_data(%{model: data} = cs, params) do
struct(cs, model: struct(data, params))
end
def get_data(%{data: data}), do: data
def get_data(%{model: data}), do: data
end
| 29.184211 | 72 | 0.628494 |
f74944ddfaccf0b8f773624a2a2c4026b0f743a6 | 509 | ex | Elixir | lib/central/logging/lib/logging_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/central/logging/lib/logging_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | lib/central/logging/lib/logging_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | 2 | 2021-02-23T22:34:00.000Z | 2021-04-08T13:31:36.000Z | defmodule Central.Logging.LoggingLib do
import Plug.Conn, only: [assign: 3]
@spec colours() :: {String.t(), String.t(), String.t()}
def colours(), do: {"#666", "#EEE", "default"}
@spec icon() :: String.t()
def icon(), do: "far fa-bars"
@spec do_not_log(Plug.Conn.t()) :: Plug.Conn.t()
def do_not_log(conn) do
assign(conn, :do_not_log, true)
end
@spec authorize(any, Plug.Conn.t(), atom) :: boolean
def authorize(_, conn, _), do: Central.Account.AuthLib.allow?(conn, "logging")
end
| 28.277778 | 80 | 0.636542 |
f7494ca8b70a3cc1f1433d040782ba2899d119c1 | 2,386 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/create_collectd_time_series_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/create_collectd_time_series_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/create_collectd_time_series_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.CreateCollectdTimeSeriesRequest do
@moduledoc """
The CreateCollectdTimeSeries request.
## Attributes
* `collectdPayloads` (*type:* `list(GoogleApi.Monitoring.V3.Model.CollectdPayload.t)`, *default:* `nil`) - The collectd payloads representing the time series data. You must not include more than a single point for each time series, so no two payloads can have the same values for all of the fields plugin, plugin_instance, type, and type_instance.
* `collectdVersion` (*type:* `String.t`, *default:* `nil`) - The version of collectd that collected the data. Example: "5.3.0-192.el6".
* `resource` (*type:* `GoogleApi.Monitoring.V3.Model.MonitoredResource.t`, *default:* `nil`) - The monitored resource associated with the time series.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:collectdPayloads => list(GoogleApi.Monitoring.V3.Model.CollectdPayload.t()) | nil,
:collectdVersion => String.t() | nil,
:resource => GoogleApi.Monitoring.V3.Model.MonitoredResource.t() | nil
}
field(:collectdPayloads, as: GoogleApi.Monitoring.V3.Model.CollectdPayload, type: :list)
field(:collectdVersion)
field(:resource, as: GoogleApi.Monitoring.V3.Model.MonitoredResource)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.CreateCollectdTimeSeriesRequest do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.CreateCollectdTimeSeriesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.CreateCollectdTimeSeriesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.018868 | 351 | 0.750629 |
f749543145793dc2bcc3225284b721e4da889ce3 | 5,058 | ex | Elixir | lib/couchdb_client.ex | joaohelio/elixir-couchdb-client | de29900f3383db3ac42ff4c8aa17e744369648a9 | [
"MIT"
] | 5 | 2015-11-24T15:15:32.000Z | 2019-03-03T14:03:39.000Z | lib/couchdb_client.ex | joaohelio/elixir-couchdb-client | de29900f3383db3ac42ff4c8aa17e744369648a9 | [
"MIT"
] | null | null | null | lib/couchdb_client.ex | joaohelio/elixir-couchdb-client | de29900f3383db3ac42ff4c8aa17e744369648a9 | [
"MIT"
] | 3 | 2015-11-30T13:02:19.000Z | 2019-09-09T21:05:14.000Z | defmodule CouchdbClient do
@vsn "0.2.3"
@moduledoc """
This module conveniently interfaces/proxies
- CouchdbClient.Repository
- CouchdbClient.Database
- CouchdbClient.Document
- CouchdbClient.Attachment
to provide easy access to a CouchDB instance. It will collect connection
data from Application.get_env(:couchdb_client, options) that you may
possibly configure via
config :couchdb_client,
scheme: "http",
host: "127.0.0.1",
port: 5984,
name: "test_database"
in your config/config.exs. Add :couchdb_client to your mix.exs:
def application do
[applications: [ .., :couchdb_client, ..], .. ]
end
If you don't want to autostart, issue
CouchdbClient.start name: "test_database", host: "..", ..
## Examples
iex> doc = %CouchdbClient.Document{ data: %{ "one" => "two" } }
%CouchdbClient.Document{data: %{"one" => "two"}, id: nil, rev: nil}
iex> doc = CouchdbClient.save doc
%CouchdbClient.Document{data: %{"_rev" => _REV1, "one" => "two"}, id: _ID, rev: _REV1}
iex> CouchdbClient.delete doc
:ok
iex> doc = %CouchdbClient.Document{ id: "test_doc", data: %{ "one" => "two" } }
%CouchdbClient.Document{id: "test_doc", data: %{"one" => "two"}, id: "test_doc", rev: nil}
iex> doc = CouchdbClient.save doc
%CouchdbClient.Document{data: %{"_rev" => _REV2, "one" => "two"}, id: "test_doc", rev: _REV2}
"""
alias CouchdbClient.Repository, as: Repo
alias CouchdbClient.Document, as: Doc
alias CouchdbClient.Database, as: DB
alias CouchdbClient.Attachment, as: Attachment
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Define workers and child supervisors to be supervised
# worker(NavigationTree.Worker, [arg1, arg2, arg3])
worker( Repo, [
[
scheme: Application.get_env(:couchdb_client, :scheme ) || "http",
host: Application.get_env(:couchdb_client, :host ) || "127.0.0.1",
post: Application.get_env(:couchdb_client, :port ) || 5984,
name: Application.get_env(:couchdb_client, :name ),
]
] )
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: NavigationTree.Supervisor]
Supervisor.start_link(children, opts)
end
def start_repo( db_options ) do
Repo.start_link db_options
end
@doc "Returns the %CouchdbClient.Database{} stored in Repository Agent"
def db do
Repo.get
end
@doc """
Loads a document. Accepts both an id string or a %CouchdbClient.Document
struct which must have the id property set.
"""
def load( id ) when is_binary( id ) do
Doc.load %Doc{ id: id }, db
end
def load( id ) when is_integer( id ) do
Doc.load %Doc{ id: "#{id}" }, db
end
def load( document ) do
Doc.load document, db
end
@doc "Performs a HEAD request to CouchDB, returning the revision \"rev\"."
def get_rev( document ) do
Doc.get_rev document, db
end
@doc "Sets all keys found in data map in document.data"
def set( document, data ) do
Doc.set( document, data )
end
@doc "Inserts the document, returns it"
def insert( document ) do
Doc.insert document, db
end
@doc "Updates the document, returns it"
def update( document ) do
Doc.update document, db
end
@doc "Either inserts or updates the document, returns it"
def save( document ) do
Doc.save document, db
end
@doc "Deletes the document, return :ok"
def delete( document ) do
Doc.delete document, db
end
@doc "Adds an attachment, see CouchdbClient.Attachment.attach/3"
def add_attachment( document, attachment ) do
Attachment.attach( document, attachment, db )
end
@doc "Deletes an attachment, see CouchdbClient.Attachment.attach/3"
def delete_attachment( document, filename ) do
Attachment.delete( document, filename , db )
end
@doc """
Fetches an attachment, returns { content, content_type }.
See CouchdbClient.Attachment.fetch/3
"""
def fetch_attachment( document, filename ) do
Attachment.fetch( document, filename, db )
end
@doc "Returns a list of all documents in db"
def all_docs, do: DB.all_docs db
@doc "Retrieves general DB information from server"
def info, do: DB.info db
@doc "Changes the database name to use on current CouchDB server"
def change_db name do
Repo.change_db name
end
end
| 33.276316 | 97 | 0.6121 |
f74966d353dfa0c26f968aefc8f186b75ecb7c8b | 5,627 | ex | Elixir | lib/cased/event.ex | cased/cased-elixir | a19a4c5d6dec3374c1067a908b357db798cfb2c8 | [
"MIT"
] | 3 | 2021-02-06T01:41:33.000Z | 2021-07-09T21:19:06.000Z | lib/cased/event.ex | cased/cased-elixir | a19a4c5d6dec3374c1067a908b357db798cfb2c8 | [
"MIT"
] | 1 | 2021-07-09T20:34:35.000Z | 2021-07-09T20:35:29.000Z | lib/cased/event.ex | cased/cased-elixir | a19a4c5d6dec3374c1067a908b357db798cfb2c8 | [
"MIT"
] | 2 | 2021-03-05T22:52:16.000Z | 2021-03-18T13:18:30.000Z | defmodule Cased.Event do
@moduledoc """
Data modeling a Cased audit event.
"""
import Norm
defstruct [:audit_trail, :id, :url, :data, :published_at, :processed_at]
@type t :: %__MODULE__{
audit_trail: Cased.AuditTrail.t(),
id: String.t(),
url: String.t(),
published_at: DateTime.t(),
processed_at: DateTime.t(),
data: %{String.t() => any()}
}
@default_audit_trail :default
@type get_opts :: [get_opt()]
@type get_opt ::
{:audit_trail, String.t()}
| {:key, String.t()}
@default_get_opts [
audit_trail: @default_audit_trail
]
@spec get(
client :: Cased.Client.t(),
event_id :: String.t(),
opts :: get_opts()
) :: Cased.Request.t() | no_return()
@doc """
Build a request to retrieve an event.
## Options
All optional:
- `:audit_trail` — The audit trail, used to ensure the event comes from the
given audit trail.
- `:key` — A Cased policy key allowing access to events.
If `:key` is omitted:
- If an `:audit_trail` is provided, the key configured on the client for that
audit trail will be used.
- If an `:audit_trail` is **not** provided, the key configured on the client
for the `:default` audit trail will be used.
# If `:audit_trail` is omitted, the `#{inspect(Keyword.fetch!(@default_get_opts, :audit_trail))}` audit trail is assumed.
"""
def get(client, event_id, opts \\ []) do
opts =
@default_get_opts
|> Keyword.merge(opts)
with {:ok, options} <- validate_get_opts(opts, client) do
audit_trail = Map.get(options, :audit_trail)
key = Map.get_lazy(options, :key, fn -> Map.fetch!(client.keys, audit_trail) end)
%Cased.Request{
client: client,
id: :audit_trail_event,
method: :get,
path: "/audit-trails/#{audit_trail}/events/#{event_id}",
key: key
}
else
{:error, details} ->
raise %Cased.RequestError{details: details}
end
end
@spec validate_get_opts(opts :: keyword(), client :: Cased.Client.t()) ::
{:ok, map()} | {:error, list()}
defp validate_get_opts(opts, client) do
conform(Map.new(opts), get_opts_schema(client))
end
# Option schema for `query/2`.
@spec get_opts_schema(client :: Cased.Client.t()) :: struct()
defp get_opts_schema(client) do
schema(%{
audit_trail: spec(is_atom() and (&Map.has_key?(client.keys, &1))),
key: spec(is_binary())
})
end
@type query_opts :: [query_opt()]
@type query_opt ::
{:phrase, String.t()}
| {:key, String.t()}
| {:variables, keyword()}
| {:per_page, pos_integer()}
| {:page, pos_integer()}
@default_query_opts [
page: 1,
per_page: 25
]
@doc """
Build a request to retrieve events from an audit trail.
## Options
- `:phrase` — The search phrase.
- `:audit_trail` — The audit trail.
- `:key` — A Cased policy key allowing access to events.
- `:variables` — Cased Policy variables.
- `:per_page` — Number of results per page (default: `#{
inspect(Keyword.fetch!(@default_query_opts, :per_page))
}`).
- `:page` — Requested page (default: `#{inspect(Keyword.fetch!(@default_query_opts, :page))}`).
If `:key` is omitted:
- If an `:audit_trail` is provided, the key configured on the client for that
audit trail will be used.
- If an `:audit_trail` is **not** provided, the key configured on the client
for the `:default` audit trail will be used.
"""
@spec query(client :: Cased.Client.t(), opts :: query_opts()) ::
Cased.Request.t() | no_return()
def query(client, opts \\ []) do
opts =
@default_query_opts
|> Keyword.merge(opts)
with {:ok, options} <- validate_query_opts(opts, client) do
{options, query} =
options
|> Map.split([:audit_trail, :key])
{id, path, key} =
if Map.get(options, :audit_trail) do
{:audit_trail_events, "/audit-trails/#{options.audit_trail}/events",
Map.get_lazy(options, :key, fn -> Map.fetch!(client.keys, options.audit_trail) end)}
else
{:events, "/events", Map.get(options, :key, client.keys.default)}
end
%Cased.Request{
client: client,
id: id,
method: :get,
path: path,
key: key,
query: query
}
else
{:error, details} ->
raise %Cased.RequestError{details: details}
end
end
@spec validate_query_opts(opts :: keyword(), client :: Cased.Client.t()) ::
{:ok, map()} | {:error, list()}
defp validate_query_opts(opts, client) do
conform(Map.new(opts), query_opts_schema(client))
end
# Option schema for `query/2`.
@spec query_opts_schema(client :: Cased.Client.t()) :: struct()
defp query_opts_schema(client) do
schema(%{
phrase: spec(is_binary()),
variables: spec(&Keyword.keyword?/1),
per_page: spec(&Enum.member?(1..100, &1)),
page: spec(is_integer() and (&(&1 > 0))),
audit_trail: spec(is_atom() and (&Map.has_key?(client.keys, &1)))
})
end
@doc false
@spec from_json!(map()) :: t()
def from_json!(event) do
{:ok, published_at, _} = DateTime.from_iso8601(event["published_at"])
{:ok, processed_at, _} = DateTime.from_iso8601(event["processed_at"])
%__MODULE__{
id: event["id"],
audit_trail: Cased.AuditTrail.from_json(event["audit_trail"]),
url: event["url"],
published_at: published_at,
processed_at: processed_at,
data: event["event"]
}
end
end
| 29.307292 | 123 | 0.601386 |
f7496bbae25422501fbfcbfd4971fe6e52809943 | 1,341 | ex | Elixir | apps/crm/lib/crm.ex | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | apps/crm/lib/crm.ex | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | 1 | 2020-04-06T07:42:36.000Z | 2020-04-06T07:42:36.000Z | apps/crm/lib/crm.ex | jdambron/phoenix_crm | 6d17280e3a6655990840f4714a338f58b7d0c756 | [
"MIT"
] | null | null | null | defmodule Crm do
alias Crm.{Contact, Update, Group}
@repo Crm.Repo
def list_contacts do
@repo.all(Contact)
end
def insert_contact(%{"group" => group_id} = attrs) do
group = Crm.get_group(group_id)
%Contact{}
|> Contact.changeset(attrs, group)
|> @repo.insert()
end
def insert_update(%{"contacts" => contact_ids} = update) do
contacts = Enum.map(contact_ids, &Crm.get_contact/1)
%Update{}
|> @repo.preload(:contacts)
|> Update.changeset_update_contacts(update, contacts)
|> @repo.insert()
end
def insert_update(attrs) do
%Update{}
|> @repo.preload(:contacts)
|> Update.changeset(attrs)
|> @repo.insert()
end
def get_contact_with_preloads(id) do
@repo.get!(Contact, id)
|> @repo.preload([:group, :updates, :notes])
end
def list_groups do
@repo.all(Group)
end
def insert_group(attrs) do
%Group{}
|> Group.changeset(attrs)
|> @repo.insert()
end
def get_contact(id), do: @repo.get!(Contact, id)
def new_contact, do: Contact.changeset(%Contact{} |> @repo.preload(:group))
def delete_contact(%Contact{} = contact), do: @repo.delete!(contact)
def new_update, do: Update.changeset(%Update{} |> @repo.preload(:contacts))
def new_group(), do: Group.changeset(%Group{})
def get_group(id), do: @repo.get!(Group, id)
end
| 24.381818 | 77 | 0.649515 |
f749703809c17842fa2dc16cfbe301024d0547cb | 12,311 | ex | Elixir | lib/stripe/core_resources/payment_intent.ex | dkulchenko/stripity_stripe | 88deb5a1dffe39aa391dadf5f6cf143c5220a9a6 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/core_resources/payment_intent.ex | dkulchenko/stripity_stripe | 88deb5a1dffe39aa391dadf5f6cf143c5220a9a6 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/core_resources/payment_intent.ex | dkulchenko/stripity_stripe | 88deb5a1dffe39aa391dadf5f6cf143c5220a9a6 | [
"BSD-3-Clause"
] | null | null | null | defmodule Stripe.PaymentIntent do
@moduledoc """
Work with [Stripe `payment_intent` objects](https://stripe.com/docs/api/payment_intents).
You can:
- [Create a payment_intent](https://stripe.com/docs/api/payment_intents/create)
- [Retrieve a payment_intent](https://stripe.com/docs/api/payment_intents/retrieve)
- [Update a payment_intent](https://stripe.com/docs/api/payment_intents/update)
- [Confirm a payment_intent](https://stripe.com/docs/api/payment_intents/confirm)
- [Capture a payment_intent](https://stripe.com/docs/api/payment_intents/capture)
- [Cancel a payment_intent](https://stripe.com/docs/api/payment_intents/cancel)
- [List all payment_intent](https://stripe.com/docs/api/payment_intents/list)
"""
use Stripe.Entity
import Stripe.Request
require Stripe.Util
@type last_payment_error :: %{
type: String.t(),
charge: String.t(),
code: String.t(),
decline_code: String.t(),
doc_url: String.t(),
message: String.t(),
param: String.t(),
payment_intent: Stripe.PaymentIntent.t() | map,
source: Stripe.Card.t() | map
}
@type next_action :: %{
redirect_to_url: redirect_to_url | nil,
type: String.t(),
use_stripe_sdk: map | nil
}
@type redirect_to_url :: %{
return_url: String.t(),
url: String.t()
}
@type transfer_data :: %{
:destination => String.t()
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: non_neg_integer,
amount_capturable: non_neg_integer,
amount_received: non_neg_integer,
application: Stripe.id() | nil,
application_fee_amount: non_neg_integer | nil,
canceled_at: Stripe.timestamp() | nil,
cancellation_reason: String.t() | nil,
capture_method: String.t(),
charges: Stripe.List.t(Stripe.Charge.t()),
client_secret: String.t(),
confirmation_method: String.t(),
created: Stripe.timestamp(),
currency: String.t(),
customer: Stripe.id() | Stripe.Customer.t() | nil,
description: String.t() | nil,
invoice: Stripe.id() | Stripe.Invoice.t() | nil,
last_payment_error: last_payment_error | nil,
livemode: boolean,
metadata: Stripe.Types.metadata(),
next_action: next_action | nil,
on_behalf_of: Stripe.id() | Stripe.Account.t() | nil,
payment_method: Stripe.id() | Stripe.PaymentMethod.t() | nil,
payment_method_options: map,
payment_method_types: list(String.t()),
receipt_email: String.t() | nil,
review: Stripe.id() | Stripe.Review.t() | nil,
shipping: Stripe.Types.shipping() | nil,
source: Stripe.Card.t() | map,
statement_descriptor: String.t() | nil,
statement_descriptor_suffix: String.t() | nil,
status: String.t(),
setup_future_usage: String.t() | nil,
transfer_data: transfer_data | nil,
transfer_group: String.t() | nil
}
defstruct [
:id,
:object,
:amount,
:amount_capturable,
:amount_received,
:application,
:application_fee_amount,
:canceled_at,
:cancellation_reason,
:capture_method,
:charges,
:client_secret,
:confirmation_method,
:created,
:currency,
:customer,
:description,
:invoice,
:last_payment_error,
:livemode,
:metadata,
:next_action,
:on_behalf_of,
:payment_method,
:payment_method_options,
:payment_method_types,
:receipt_email,
:review,
:shipping,
:source,
:statement_descriptor,
:statement_descriptor_suffix,
:setup_future_usage,
:status,
:transfer_data,
:transfer_group
]
@plural_endpoint "payment_intents"
@doc """
Create a payment intent.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/create).
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:amount => pos_integer,
:currency => String.t(),
:payment_method_types => [String.t()],
optional(:application_fee_amount) => non_neg_integer,
optional(:capture_method) => String.t(),
optional(:confirm) => boolean,
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:off_session) => boolean,
optional(:on_behalf_of) => Stripe.id() | Stripe.Account.t(),
optional(:payment_method) => String.t(),
optional(:payment_method_options) => map,
optional(:payment_method_types) => [Stripe.id()],
optional(:receipt_email) => String.t(),
optional(:return_url) => String.t(),
optional(:save_payment_method) => boolean,
optional(:setup_future_usage) => String.t(),
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t(),
optional(:statement_descriptor) => String.t(),
optional(:statement_descriptor_suffix) => String.t(),
optional(:transfer_data) => transfer_data,
optional(:transfer_group) => String.t()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> cast_to_id([:on_behalf_of, :customer, :source])
|> make_request()
end
@doc """
Retrieves the details of a PaymentIntent that has previously been created.
Client-side retrieval using a publishable key is allowed when the client_secret is provided in the query string.
When retrieved with a publishable key, only a subset of properties will be returned. Please refer to the payment intent object reference for more details.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/retrieve).
"""
@spec retrieve(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:client_secret) => String.t()
}
| %{}
def retrieve(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_params(params)
|> put_method(:get)
|> make_request()
end
@doc """
Updates a PaymentIntent object.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/update).
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:amount) => non_neg_integer,
optional(:application_fee_amount) => non_neg_integer,
optional(:currency) => String.t(),
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:payment_method) => String.t(),
optional(:payment_method_types) => [Stripe.id()],
optional(:receipt_email) => String.t(),
optional(:save_payment_method) => boolean,
optional(:setup_future_usage) => String.t(),
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t(),
optional(:statement_descriptor_suffix) => String.t(),
optional(:transfer_group) => String.t()
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Confirm that your customer intends to pay with current or provided source. Upon confirmation,
the PaymentIntent will attempt to initiate a payment.
If the selected source requires additional authentication steps, the PaymentIntent will transition to
the requires_action status and suggest additional actions via next_source_action.
If payment fails, the PaymentIntent will transition to the requires_payment_method status.
If payment succeeds, the PaymentIntent will transition to the succeeded status (or requires_capture,
if capture_method is set to manual). Read the expanded documentation to learn more about server-side confirmation.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/confirm).
"""
@spec confirm(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:client_secret) => String.t(),
optional(:receipt_email) => String.t(),
optional(:return_url) => String.t(),
optional(:save_payment_method) => boolean,
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t()
}
| %{}
def confirm(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/confirm")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Capture the funds of an existing uncaptured PaymentIntent where required_action="requires_capture".
Uncaptured PaymentIntents will be canceled exactly seven days after they are created.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/capture).
"""
@spec capture(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:amount_to_capture) => non_neg_integer,
optional(:application_fee_amount) => non_neg_integer
}
| %{}
def capture(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}/capture")
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
A PaymentIntent object can be canceled when it is in one of these statuses: requires_payment_method,
requires_capture, requires_confirmation, requires_action.
Once canceled, no additional charges will be made by the PaymentIntent and any operations on the PaymentIntent will fail with an error.
For PaymentIntents with status='requires_capture', the remaining amount_capturable will automatically be refunded.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/cancel).
"""
@spec cancel(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:cancellation_reason) => String.t()
}
| %{}
def cancel(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/cancel")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Returns a list of PaymentIntents.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/list).
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:created) => Stripe.date_query(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after, :customer])
|> make_request()
end
end
| 39.207006 | 157 | 0.59524 |
f74976781940f1bf44dcd7a9acff55f2b3a47981 | 959 | ex | Elixir | apps/calgy_api/lib/calgy_api/helpers/view_helpers.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | 1 | 2017-09-18T08:54:46.000Z | 2017-09-18T08:54:46.000Z | apps/calgy_api/lib/calgy_api/helpers/view_helpers.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | null | null | null | apps/calgy_api/lib/calgy_api/helpers/view_helpers.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | null | null | null | defmodule CalgyApi.Helpers.ViewHelpers do
@doc ~S"""
Formats a DateTime to an extended ISO8601 format. Equivalent to
DateTime.to_iso8601 but truncates to the second and allows nil.
## Examples
iex> CalgyApi.Helpers.ViewHelpers.format_datetime(nil)
nil
iex> {:ok, dt, _} = DateTime.from_iso8601("2017-09-19T12:15:46.246277Z")
iex> CalgyApi.Helpers.ViewHelpers.format_datetime(dt)
"2017-09-19T12:15:46Z"
"""
def format_datetime(nil), do: nil
def format_datetime(dt) do
truncated = %DateTime{dt | microsecond: {0, 0}}
DateTime.to_iso8601(truncated, :extended)
end
@doc ~S"""
Removes any key/val pairs from a map that have a nil value.
## Examples
iex> map = %{foo: "", bar: false, baz: nil, qux: 0}
iex> CalgyApi.Helpers.ViewHelpers.reject_nils(map)
%{foo: "", bar: false, qux: 0}
"""
def reject_nils(map) do
for {k,v} <- map, v != nil, into: %{}, do: {k,v}
end
end
| 26.638889 | 78 | 0.649635 |
f7498485f52f5aa3ec927c17505eec82b9d6b58e | 75 | ex | Elixir | web/views/coherence/unlock_view.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 60 | 2017-05-09T19:08:26.000Z | 2021-01-20T11:09:42.000Z | web/views/coherence/unlock_view.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 6 | 2017-05-10T15:43:16.000Z | 2020-07-15T07:14:41.000Z | web/views/coherence/unlock_view.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 10 | 2017-05-10T04:13:54.000Z | 2020-12-28T10:30:27.000Z | defmodule Coherence.UnlockView do
use UcxChat.Coherence.Web, :view
end
| 12.5 | 34 | 0.786667 |
f74990135e92afe506fcdc7bbed740999921a079 | 7,532 | exs | Elixir | test/grizzly/zwave/smart_start/meta_extensions/uuid16_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 76 | 2019-09-04T16:56:58.000Z | 2022-03-29T06:54:36.000Z | test/grizzly/zwave/smart_start/meta_extensions/uuid16_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 124 | 2019-09-05T14:01:24.000Z | 2022-02-28T22:58:14.000Z | test/grizzly/zwave/smart_start/meta_extensions/uuid16_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 10 | 2019-10-23T19:25:45.000Z | 2021-11-17T13:21:20.000Z | defmodule Grizzly.ZWave.SmartStart.MetaExtension.UUID16Test do
use ExUnit.Case, async: true
alias Grizzly.ZWave.SmartStart.MetaExtension.UUID16
test "make a new uuid with hex format" do
format = :hex
uuid = "0102030405060708090A141516171819"
assert {:ok, %{uuid: uuid, format: format}} == UUID16.new(uuid, :hex)
end
test "make a new uuid with ascii format" do
format = :ascii
uuid = "Hello Elixir!!!!"
assert {:ok, %{uuid: uuid, format: format}} == UUID16.new("Hello Elixir!!!!", :ascii)
end
test "make a new uuid with rfc format" do
format = :rfc4122
uuid = "58D5E212-165B-4CA0-909B-C86B9CEE0111"
assert {:ok, %{uuid: uuid, format: format}} == UUID16.new(uuid, :rfc4122)
end
test "cannot create when hex uuid is too short" do
assert {:error, :invalid_uuid_length} == UUID16.new("0123", :hex)
end
test "cannot create when hex uuid is too long" do
assert {:error, :invalid_uuid_length} ==
UUID16.new("0102030405060708090A1415161718190102030405060708090A141516171819", :hex)
end
test "cannot create when ascii uuid is too short" do
assert {:error, :invalid_uuid_length} == UUID16.new("Hello!!!", :ascii)
end
test "cannot create when ascii uuid is too long" do
assert {:error, :invalid_uuid_length} == UUID16.new("Hello Elixir!!!!!!!!!!!!", :ascii)
end
test "cannot create when rfc4122 is too long" do
assert {:error, :invalid_uuid_length} ==
UUID16.new("58D5E212-165B-4CA0-909B-C86B9CEE01111", :rfc4122)
end
test "cannot create when rfc4122 is too short" do
assert {:error, :invalid_uuid_length} ==
UUID16.new("58D5E212-165B-4CA0-09B-C86B9CEE0111", :rfc4122)
end
describe "parsing" do
test "when critical bit is set" do
binary = <<0x07, 0x11, 0x00, 0x00>>
assert {:error, :critical_bit_set} ==
UUID16.parse(binary)
end
test "when representation is 32 hex numbers" do
binary =
<<0x06, 0x11, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
expected_uuid = %{
uuid: "0102030405060708090A141516171819",
format: :hex
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is 16 ASCII characters" do
binary =
<<0x06, 0x11, 0x01, ?H, ?e, ?l, ?l, ?o, 0x20, ?E, ?l, ?i, ?x, ?i, ?r, ?!, ?!, ?!, ?!>>
expected_uuid = %{
uuid: "Hello Elixir!!!!",
format: :ascii
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is sn: with 32 hex digits" do
binary =
<<0x06, 0x11, 0x02, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
expected_uuid = %{
uuid: "sn:0102030405060708090A141516171819",
format: :hex
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is sn: with with 16 ASCII characters" do
binary =
<<0x06, 0x11, 0x03, ?H, ?e, ?l, ?l, ?o, 0x20, ?E, ?l, ?i, ?x, ?i, ?r, ?!, ?!, ?!, ?!>>
expected_uuid = %{
uuid: "sn:Hello Elixir!!!!",
format: :ascii
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is UUID: with 32 hex digits" do
binary =
<<0x06, 0x11, 0x04, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
expected_uuid = %{
uuid: "UUID:0102030405060708090A141516171819",
format: :hex
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is UUID: with 16 ASCII characters" do
binary =
<<0x06, 0x11, 0x05, ?H, ?e, ?l, ?l, ?o, 0x20, ?E, ?l, ?i, ?x, ?i, ?r, ?!, ?!, ?!, ?!>>
expected_uuid = %{
uuid: "UUID:Hello Elixir!!!!",
format: :ascii
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "when representation is RFC 4122 UUID format" do
binary =
<<0x06, 0x11, 0x06, 0x58, 0xD5, 0xE2, 0x12, 0x16, 0x5B, 0x4C, 0xA0, 0x90, 0x9B, 0xC8,
0x6B, 0x9C, 0xEE, 0x01, 0x11>>
expected_uuid = %{
uuid: "58D5E212-165B-4CA0-909B-C86B9CEE0111",
format: :rfc4122
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end
test "Maps representation when format byte is between 7 and 99 back to 0 representation 0" do
7..99
|> Enum.map(fn representation ->
<<0x06, 0x11, representation, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A,
0x14, 0x15, 0x16, 0x17, 0x18, 0x19>>
end)
|> Enum.each(fn binary ->
expected_uuid = %{
uuid: "0102030405060708090A141516171819",
format: :hex
}
assert {:ok, expected_uuid} == UUID16.parse(binary)
end)
end
end
describe "to binary" do
test "when representation is 32 hex digits" do
uuid16 = %{
uuid: "0102030405060708090A141516171819",
format: :hex
}
expected_binary =
<<0x06, 0x11, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
assert expected_binary == UUID16.encode(uuid16)
end
test "when representation is 32 hex digits with sn" do
uuid16 = %{
uuid: "sn:0102030405060708090A141516171819",
format: :hex
}
expected_binary =
<<0x06, 0x11, 0x02, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
assert expected_binary == UUID16.encode(uuid16)
end
test "when representation is 32 hex digits with UUID" do
uuid16 = %{
uuid: "UUID:0102030405060708090A141516171819",
format: :hex
}
expected_binary =
<<0x06, 0x11, 0x04, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19>>
assert expected_binary == UUID16.encode(uuid16)
end
test "when representation is 16 ASCII characters" do
uuid16 = %{
uuid: "Hello Elixir!!!!",
format: :ascii
}
assert <<0x06, 0x011, 0x01, 0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x45, 0x6C, 0x69, 0x78,
0x69, 0x72, 0x21, 0x21, 0x21, 0x21>> == UUID16.encode(uuid16)
end
test "when representation is 16 ASCII characters wth sn" do
uuid16 = %{
uuid: "sn:Hello Elixir!!!!",
format: :ascii
}
assert <<0x06, 0x011, 0x03, 0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x45, 0x6C, 0x69, 0x78,
0x69, 0x72, 0x21, 0x21, 0x21, 0x21>> == UUID16.encode(uuid16)
end
test "when representation is 16 ASCII characters with UUID" do
uuid16 = %{
uuid: "UUID:Hello Elixir!!!!",
format: :ascii
}
assert <<0x06, 0x011, 0x05, 0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x45, 0x6C, 0x69, 0x78,
0x69, 0x72, 0x21, 0x21, 0x21, 0x21>> == UUID16.encode(uuid16)
end
test "when representation is RFC 4122" do
expected_binary =
<<0x06, 0x11, 0x06, 0x58, 0xD5, 0xE2, 0x12, 0x16, 0x5B, 0x4C, 0xA0, 0x90, 0x9B, 0xC8,
0x6B, 0x9C, 0xEE, 0x01, 0x11>>
uuid = %{
uuid: "58D5E212-165B-4CA0-909B-C86B9CEE0111",
format: :rfc4122
}
assert expected_binary == UUID16.encode(uuid)
end
end
end
| 30.007968 | 97 | 0.591609 |
f749e26905a9fcdb81a66cf60344196afc235439 | 3,258 | ex | Elixir | lib/earmark_parser/helpers/ast_helpers.ex | sionide21/earmark_parser | d9283fd90dad21d4bf4277e284b2e4a8560d14b0 | [
"Apache-2.0"
] | null | null | null | lib/earmark_parser/helpers/ast_helpers.ex | sionide21/earmark_parser | d9283fd90dad21d4bf4277e284b2e4a8560d14b0 | [
"Apache-2.0"
] | null | null | null | lib/earmark_parser/helpers/ast_helpers.ex | sionide21/earmark_parser | d9283fd90dad21d4bf4277e284b2e4a8560d14b0 | [
"Apache-2.0"
] | null | null | null | defmodule EarmarkParser.Helpers.AstHelpers do
@moduledoc false
import EarmarkParser.Ast.Emitter
import EarmarkParser.Helpers
import EarmarkParser.Helpers.AttrParser
alias EarmarkParser.Block
@doc false
def attrs_to_string_keys(key_value_pair)
def attrs_to_string_keys({k, vs}) when is_list(vs) do
{to_string(k), Enum.join(vs, " ")}
end
def attrs_to_string_keys({k, vs}) do
{to_string(k),to_string(vs)}
end
@doc false
def augment_tag_with_ial(tags, ial)
def augment_tag_with_ial([{t, a, c, m}|tags], atts) do
[{t, merge_attrs(a, atts), c, m}|tags]
end
def augment_tag_with_ial([], _atts) do
[]
end
@doc false
def code_classes(language, prefix) do
classes =
["" | String.split(prefix || "")]
|> Enum.map(fn pfx -> "#{pfx}#{language}" end)
{"class", classes |> Enum.join(" ")}
end
@doc false
def codespan(text) do
emit("code", text, class: "inline")
end
@doc false
def render_footnote_link(ref, backref, number) do
emit("a", to_string(number), href: "##{ref}", id: backref, class: "footnote", title: "see footnote")
end
@doc false
def render_code(%Block.Code{lines: lines}) do
lines |> Enum.join("\n")
end
@remove_escapes ~r{ \\ (?! \\ ) }x
@doc false
def render_image(text, href, title) do
alt = text |> escape() |> String.replace(@remove_escapes, "")
if title do
emit("img", [], src: href, alt: alt, title: title)
else
emit("img", [], src: href, alt: alt)
end
end
@doc false
def render_link(url, text) do
url =
try do
url
|> URI.decode
|> URI.encode
rescue
_ -> url
end
text =
try do
URI.decode(text)
rescue
_ -> text
end
emit("a", text, href: url)
end
##############################################
# add attributes to the outer tag in a block #
##############################################
@doc false
def merge_attrs(maybe_atts, new_atts)
def merge_attrs(nil, new_atts), do: new_atts
def merge_attrs(atts, new) when is_list(atts) do
atts
|> Enum.into(%{})
|> merge_attrs(new)
end
def merge_attrs(atts, new) do
atts
|> Map.merge(new, &_value_merger/3)
|> Enum.into([])
|> Enum.map(&attrs_to_string_keys/1)
end
@doc false
def add_attrs(context, text, attrs_as_string_or_map, default_attrs, lnb)
def add_attrs(context, text, nil, [], _lnb), do: {context, text}
def add_attrs(context, text, nil, default, lnb), do: add_attrs(context, text, %{}, default, lnb)
def add_attrs(context, text, attrs, default, lnb) when is_binary(attrs) do
{context1, attrs} = parse_attrs( context, attrs, lnb )
add_attrs(context1, text, attrs, default, lnb)
end
def add_attrs(_context, _text, attrs, default, _lnb) do
default
|> Map.new()
|> Map.merge(attrs, fn _k, v1, v2 -> v1 ++ v2 end)
end
defp _value_merger(key, val1, val2)
defp _value_merger(_, val1, val2) when is_list(val1) and is_list(val2) do
val1 ++ val2
end
defp _value_merger(_, val1, val2) when is_list(val1) do
val1 ++ [val2]
end
defp _value_merger(_, val1, val2) do
[val1, val2]
end
end
# SPDX-License-Identifier: Apache-2.0
| 24.133333 | 104 | 0.609576 |
f749eb3660e08e92c892cf00ba4dcea8cccc3dbc | 50,463 | ex | Elixir | apps/astarte_appengine_api/lib/astarte_appengine_api/device/device.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api/device/device.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_appengine_api/lib/astarte_appengine_api/device/device.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2017 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Astarte.AppEngine.API.Device do
@moduledoc """
The Device context.
"""
alias Astarte.AppEngine.API.DataTransmitter
alias Astarte.AppEngine.API.Device.AstarteValue
alias Astarte.AppEngine.API.Device.DevicesListOptions
alias Astarte.AppEngine.API.Device.DeviceStatus
alias Astarte.AppEngine.API.Device.MapTree
alias Astarte.AppEngine.API.Device.InterfaceValues
alias Astarte.AppEngine.API.Device.InterfaceValuesOptions
alias Astarte.AppEngine.API.Device.Queries
alias Astarte.Core.CQLUtils
alias Astarte.Core.Device
alias Astarte.Core.InterfaceDescriptor
alias Astarte.Core.Interface.Aggregation
alias Astarte.Core.Interface.Type
alias Astarte.Core.Mapping
alias Astarte.Core.Mapping.EndpointsAutomaton
alias Astarte.Core.Mapping.ValueType
alias Astarte.DataAccess.Database
alias Astarte.DataAccess.Mappings
alias Astarte.DataAccess.Device, as: DeviceQueries
alias Astarte.DataAccess.Interface, as: InterfaceQueries
alias Ecto.Changeset
require Logger
def list_devices!(realm_name, params) do
changeset = DevicesListOptions.changeset(%DevicesListOptions{}, params)
with {:ok, options} <- Changeset.apply_action(changeset, :insert),
{:ok, client} <- Database.connect(realm: realm_name) do
Queries.retrieve_devices_list(client, options.limit, options.details, options.from_token)
end
end
@doc """
Returns a DeviceStatus struct which represents device status.
Device status returns information such as connected, last_connection and last_disconnection.
"""
def get_device_status!(realm_name, encoded_device_id) do
with {:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id) do
Queries.retrieve_device_status(client, device_id)
end
end
def merge_device_status(realm_name, encoded_device_id, device_status_merge) do
with {:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id),
{:ok, device_status} <- Queries.retrieve_device_status(client, device_id),
changeset = DeviceStatus.changeset(device_status, device_status_merge),
{:ok, updated_device_status} <- Ecto.Changeset.apply_action(changeset, :update),
credentials_inhibited_change = Map.get(changeset.changes, :credentials_inhibited),
:ok <- change_credentials_inhibited(client, device_id, credentials_inhibited_change),
aliases_change = Map.get(changeset.changes, :aliases, %{}),
attributes_change = Map.get(changeset.changes, :attributes, %{}),
:ok <- update_aliases(client, device_id, aliases_change),
:ok <- update_attributes(client, device_id, attributes_change) do
# Manually merge aliases since changesets don't perform maps deep merge
merged_aliases = merge_data(device_status.aliases, updated_device_status.aliases)
merged_attributes = merge_data(device_status.attributes, updated_device_status.attributes)
updated_map =
updated_device_status
|> Map.put(:aliases, merged_aliases)
|> Map.put(:attributes, merged_attributes)
{:ok, updated_map}
end
end
defp update_attributes(client, device_id, attributes) do
Enum.reduce_while(attributes, :ok, fn
{"", _attribute_value}, _acc ->
Logger.warn("Attribute key cannot be an empty string.", tag: :invalid_attribute_empty_key)
{:halt, {:error, :invalid_attributes}}
{attribute_key, nil}, _acc ->
case Queries.delete_attribute(client, device_id, attribute_key) do
:ok ->
{:cont, :ok}
{:error, reason} ->
{:halt, {:error, reason}}
end
{attribute_key, attribute_value}, _acc ->
case Queries.insert_attribute(client, device_id, attribute_key, attribute_value) do
:ok ->
{:cont, :ok}
{:error, reason} ->
{:halt, {:error, reason}}
end
end)
end
defp update_aliases(client, device_id, aliases) do
Enum.reduce_while(aliases, :ok, fn
{_alias_key, ""}, _acc ->
Logger.warn("Alias value cannot be an empty string.", tag: :invalid_alias_empty_value)
{:halt, {:error, :invalid_alias}}
{"", _alias_value}, _acc ->
Logger.warn("Alias key cannot be an empty string.", tag: :invalid_alias_empty_key)
{:halt, {:error, :invalid_alias}}
{alias_key, nil}, _acc ->
case Queries.delete_alias(client, device_id, alias_key) do
:ok -> {:cont, :ok}
{:error, reason} -> {:halt, {:error, reason}}
end
{alias_key, alias_value}, _acc ->
case Queries.insert_alias(client, device_id, alias_key, alias_value) do
:ok -> {:cont, :ok}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp merge_data(old_data, new_data) when is_map(old_data) and is_map(new_data) do
Map.merge(old_data, new_data)
|> Enum.reject(fn {_, v} -> v == nil end)
|> Enum.into(%{})
end
defp change_credentials_inhibited(_client, _device_id, nil) do
:ok
end
defp change_credentials_inhibited(client, device_id, credentials_inhibited)
when is_boolean(credentials_inhibited) do
Queries.set_inhibit_credentials_request(client, device_id, credentials_inhibited)
end
@doc """
Returns the list of interfaces.
"""
def list_interfaces(realm_name, encoded_device_id) do
with {:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id) do
Queries.retrieve_interfaces_list(client, device_id)
end
end
@doc """
Gets all values set on a certain interface.
This function handles all GET requests on /{realm_name}/devices/{device_id}/interfaces/{interface}
"""
def get_interface_values!(realm_name, encoded_device_id, interface, params) do
changeset = InterfaceValuesOptions.changeset(%InterfaceValuesOptions{}, params)
with {:ok, options} <- Changeset.apply_action(changeset, :insert),
{:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id),
{:ok, major_version} <- DeviceQueries.interface_version(client, device_id, interface),
{:ok, interface_row} <-
InterfaceQueries.retrieve_interface_row(client, interface, major_version) do
do_get_interface_values!(
client,
device_id,
Aggregation.from_int(interface_row[:aggregation]),
interface_row,
options
)
end
end
@doc """
Gets a single interface_values.
Raises if the Interface values does not exist.
"""
def get_interface_values!(realm_name, encoded_device_id, interface, no_prefix_path, params) do
changeset = InterfaceValuesOptions.changeset(%InterfaceValuesOptions{}, params)
with {:ok, options} <- Changeset.apply_action(changeset, :insert),
{:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id),
{:ok, major_version} <- DeviceQueries.interface_version(client, device_id, interface),
{:ok, interface_row} <-
InterfaceQueries.retrieve_interface_row(client, interface, major_version),
path <- "/" <> no_prefix_path,
{:ok, interface_descriptor} <- InterfaceDescriptor.from_db_result(interface_row),
{:ok, endpoint_ids} <-
get_endpoint_ids(interface_descriptor.automaton, path, allow_guess: true) do
endpoint_query = Queries.prepare_value_type_query(interface_row[:interface_id])
do_get_interface_values!(
client,
device_id,
Aggregation.from_int(interface_row[:aggregation]),
Type.from_int(interface_row[:type]),
interface_row,
endpoint_ids,
endpoint_query,
path,
options
)
end
end
defp update_individual_interface_values(
client,
realm_name,
device_id,
interface_descriptor,
path,
raw_value
) do
with {:ok, [endpoint_id]} <- get_endpoint_ids(interface_descriptor.automaton, path),
mapping <-
Queries.retrieve_mapping(client, interface_descriptor.interface_id, endpoint_id),
{:ok, value} <- cast_value(mapping.value_type, raw_value),
:ok <- validate_value_type(mapping.value_type, value),
wrapped_value = wrap_to_bson_struct(mapping.value_type, value),
interface_type = interface_descriptor.type,
reliability = mapping.reliability,
publish_opts = build_publish_opts(interface_type, reliability),
interface_name = interface_descriptor.name,
:ok <-
ensure_publish(
realm_name,
device_id,
interface_name,
path,
wrapped_value,
publish_opts
),
{:ok, realm_max_ttl} <-
Queries.fetch_datastream_maximum_storage_retention(client) do
timestamp_micro =
DateTime.utc_now()
|> DateTime.to_unix(:microsecond)
db_max_ttl =
if mapping.database_retention_policy == :use_ttl do
min(realm_max_ttl, mapping.database_retention_ttl)
else
realm_max_ttl
end
opts =
case db_max_ttl do
nil ->
[]
_ ->
[ttl: db_max_ttl]
end
Queries.insert_value_into_db(
client,
device_id,
interface_descriptor,
endpoint_id,
mapping,
path,
value,
timestamp_micro,
opts
)
if interface_descriptor.type == :datastream do
Queries.insert_path_into_db(
client,
device_id,
interface_descriptor,
endpoint_id,
path,
timestamp_micro,
div(timestamp_micro, 1000),
opts
)
end
{:ok,
%InterfaceValues{
data: raw_value
}}
else
{:error, :endpoint_guess_not_allowed} ->
_ = Logger.warn("Incomplete path not allowed.", tag: "endpoint_guess_not_allowed")
{:error, :read_only_resource}
{:error, :unexpected_value_type, expected: value_type} ->
_ = Logger.warn("Unexpected value type.", tag: "unexpected_value_type")
{:error, :unexpected_value_type, expected: value_type}
{:error, reason} ->
_ = Logger.warn("Error while writing to interface.", tag: "write_to_device_error")
{:error, reason}
end
end
defp path_or_endpoint_depth(path) when is_binary(path) do
String.split(path, "/", trim: true)
|> length()
end
defp resolve_object_aggregation_path(
path,
%InterfaceDescriptor{aggregation: :object} = interface_descriptor,
mappings
) do
mappings =
Enum.into(mappings, %{}, fn mapping ->
{mapping.endpoint_id, mapping}
end)
with {:guessed, guessed_endpoints} <-
EndpointsAutomaton.resolve_path(path, interface_descriptor.automaton),
:ok <- check_object_aggregation_prefix(path, guessed_endpoints, mappings) do
endpoint_id =
CQLUtils.endpoint_id(
interface_descriptor.name,
interface_descriptor.major_version,
""
)
{:ok, %Mapping{endpoint_id: endpoint_id}}
else
{:ok, _endpoint_id} ->
# This is invalid here, publish doesn't happen on endpoints in object aggregated interfaces
Logger.warn(
"Tried to publish on endpoint #{inspect(path)} for object aggregated " <>
"interface #{inspect(interface_descriptor.name)}. You should publish on " <>
"the common prefix",
tag: "invalid_path"
)
{:error, :mapping_not_found}
{:error, :not_found} ->
Logger.warn(
"Tried to publish on invalid path #{inspect(path)} for object aggregated " <>
"interface #{inspect(interface_descriptor.name)}",
tag: "invalid_path"
)
{:error, :mapping_not_found}
{:error, :invalid_object_aggregation_path} ->
Logger.warn(
"Tried to publish on invalid path #{inspect(path)} for object aggregated " <>
"interface #{inspect(interface_descriptor.name)}",
tag: "invalid_path"
)
{:error, :mapping_not_found}
end
end
defp check_object_aggregation_prefix(path, guessed_endpoints, mappings) do
received_path_depth = path_or_endpoint_depth(path)
Enum.reduce_while(guessed_endpoints, :ok, fn
endpoint_id, _acc ->
with {:ok, %Mapping{endpoint: endpoint}} <- Map.fetch(mappings, endpoint_id),
endpoint_depth when received_path_depth == endpoint_depth - 1 <-
path_or_endpoint_depth(endpoint) do
{:cont, :ok}
else
_ ->
{:halt, {:error, :invalid_object_aggregation_path}}
end
end)
end
defp object_retention([first | _rest] = _mappings) do
if first.database_retention_policy == :no_ttl do
nil
else
first.database_retention_ttl
end
end
defp update_object_interface_values(
client,
realm_name,
device_id,
interface_descriptor,
path,
raw_value
) do
timestamp_micro =
DateTime.utc_now()
|> DateTime.to_unix(:microsecond)
with {:ok, mappings} <-
Mappings.fetch_interface_mappings(client, interface_descriptor.interface_id),
{:ok, endpoint} <-
resolve_object_aggregation_path(path, interface_descriptor, mappings),
endpoint_id <- endpoint.endpoint_id,
expected_types <- extract_expected_types(mappings),
:ok <- validate_value_type(expected_types, raw_value),
wrapped_value = wrap_to_bson_struct(nil, raw_value),
reliability = extract_aggregate_reliability(mappings),
interface_type = interface_descriptor.type,
publish_opts = build_publish_opts(interface_type, reliability),
interface_name = interface_descriptor.name,
:ok <-
ensure_publish(
realm_name,
device_id,
interface_name,
path,
wrapped_value,
publish_opts
),
{:ok, realm_max_ttl} <-
Queries.fetch_datastream_maximum_storage_retention(client) do
db_max_ttl = min(realm_max_ttl, object_retention(mappings))
opts =
case db_max_ttl do
nil ->
[]
_ ->
[ttl: db_max_ttl]
end
Queries.insert_value_into_db(
client,
device_id,
interface_descriptor,
nil,
nil,
path,
raw_value,
timestamp_micro,
opts
)
Queries.insert_path_into_db(
client,
device_id,
interface_descriptor,
endpoint_id,
path,
timestamp_micro,
div(timestamp_micro, 1000),
opts
)
{:ok,
%InterfaceValues{
data: raw_value
}}
else
{:error, :unexpected_value_type, expected: value_type} ->
Logger.warn("Unexpected value type.", tag: "unexpected_value_type")
{:error, :unexpected_value_type, expected: value_type}
{:error, :invalid_object_aggregation_path} ->
Logger.warn("Error while trying to publish on path for object aggregated interface.",
tag: "invalid_object_aggregation_path"
)
{:error, :invalid_object_aggregation_path}
{:error, :mapping_not_found} ->
{:error, :mapping_not_found}
{:error, :database_error} ->
Logger.warn("Error while trying to retrieve ttl.", tag: "database_error")
{:error, :database_error}
{:error, reason} ->
Logger.warn("Unhandled error while updating object interface values: #{inspect(reason)}.")
{:error, reason}
end
end
def update_interface_values(
realm_name,
encoded_device_id,
interface,
no_prefix_path,
raw_value,
_params
) do
with {:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id),
{:ok, major_version} <- DeviceQueries.interface_version(client, device_id, interface),
{:ok, interface_row} <-
InterfaceQueries.retrieve_interface_row(client, interface, major_version),
{:ok, interface_descriptor} <- InterfaceDescriptor.from_db_result(interface_row),
{:ownership, :server} <- {:ownership, interface_descriptor.ownership},
path <- "/" <> no_prefix_path do
if interface_descriptor.aggregation == :individual do
update_individual_interface_values(
client,
realm_name,
device_id,
interface_descriptor,
path,
raw_value
)
else
update_object_interface_values(
client,
realm_name,
device_id,
interface_descriptor,
path,
raw_value
)
end
else
{:ownership, :device} ->
_ = Logger.warn("Invalid write (device owned).", tag: "cannot_write_to_device_owned")
{:error, :cannot_write_to_device_owned}
{:error, reason} ->
_ = Logger.warn("Error while writing to interface.", tag: "write_to_device_error")
{:error, reason}
end
end
defp extract_expected_types(mappings) do
Enum.into(mappings, %{}, fn mapping ->
expected_key =
mapping.endpoint
|> String.split("/")
|> List.last()
{expected_key, mapping.value_type}
end)
end
defp extract_aggregate_reliability([mapping | _rest] = _mappings) do
# Extract the reliability from the first mapping since it's
# the same for all mappings in object aggregated interfaces
mapping.reliability
end
defp build_publish_opts(:properties, _reliability) do
[type: :properties]
end
defp build_publish_opts(:datastream, reliability) do
[type: :datastream, reliability: reliability]
end
defp ensure_unset(realm, device_id, interface, path) do
with {:ok, %{local_matches: local_matches, remote_matches: remote_matches}} <-
DataTransmitter.unset_property(realm, device_id, interface, path) do
case local_matches + remote_matches do
0 ->
{:error, :cannot_push_to_device}
1 ->
:ok
matches when matches > 1 ->
# Multiple matches, we print a warning but we consider it ok
Logger.warn(
"Multiple matches while sending unset to device, " <>
"local_matches: #{local_matches}, remote_matches: #{remote_matches}",
tag: "publish_multiple_matches"
)
:ok
end
end
end
defp ensure_publish(realm, device_id, interface, path, value, opts) do
with {:ok, %{local_matches: local_matches, remote_matches: remote_matches}} <-
publish_data(realm, device_id, interface, path, value, opts),
:ok <- ensure_publish_reliability(local_matches, remote_matches, opts) do
:ok
end
end
defp publish_data(realm, device_id, interface, path, value, opts) do
case Keyword.fetch!(opts, :type) do
:properties ->
DataTransmitter.set_property(
realm,
device_id,
interface,
path,
value
)
:datastream ->
qos =
Keyword.fetch!(opts, :reliability)
|> reliability_to_qos()
DataTransmitter.push_datastream(
realm,
device_id,
interface,
path,
value,
qos: qos
)
end
end
# Exactly one match, always good
defp ensure_publish_reliability(local_matches, remote_matches, _opts)
when local_matches + remote_matches == 1 do
:ok
end
# Multiple matches, we print a warning but we consider it ok
defp ensure_publish_reliability(local_matches, remote_matches, _opts)
when local_matches + remote_matches > 1 do
Logger.warn(
"Multiple matches while publishing to device, " <>
"local_matches: #{local_matches}, remote_matches: #{remote_matches}",
tag: "publish_multiple_matches"
)
:ok
end
# No matches, check type and reliability
defp ensure_publish_reliability(_local, _remote, opts) do
type = Keyword.fetch!(opts, :type)
# We use get since we can be in a properties case
reliability = Keyword.get(opts, :reliability)
cond do
type == :properties ->
# No matches will happen only if the device doesn't have a session on
# the broker, but the SDK would then send an emptyCache at the first
# connection and receive all properties. Hence, we return :ok for
# properties even if there are no matches
:ok
type == :datastream and reliability == :unreliable ->
# Unreliable datastream is allowed to fail
:ok
true ->
{:error, :cannot_push_to_device}
end
end
defp reliability_to_qos(reliability) do
case reliability do
:unreliable -> 0
:guaranteed -> 1
:unique -> 2
end
end
defp validate_value_type(expected_types, object)
when is_map(expected_types) and is_map(object) do
Enum.reduce_while(object, :ok, fn {key, value}, _acc ->
with {:ok, expected_type} <- Map.fetch(expected_types, key),
:ok <- validate_value_type(expected_type, value) do
{:cont, :ok}
else
{:error, reason, expected} ->
{:halt, {:error, reason, expected}}
:error ->
{:halt, {:error, :unexpected_object_key}}
end
end)
end
defp validate_value_type(value_type, value) do
with :ok <- ValueType.validate_value(value_type, value) do
:ok
else
{:error, :unexpected_value_type} ->
{:error, :unexpected_value_type, expected: value_type}
{:error, reason} ->
{:error, reason}
end
end
defp cast_value(:datetime, value) when is_binary(value) do
with {:ok, datetime, _utc_off} <- DateTime.from_iso8601(value) do
{:ok, datetime}
else
{:error, _reason} ->
{:error, :unexpected_value_type, expected: :datetime}
end
end
defp cast_value(:datetime, value) when is_integer(value) do
with {:ok, datetime} <- DateTime.from_unix(value, :millisecond) do
{:ok, datetime}
else
{:error, _reason} ->
{:error, :unexpected_value_type, expected: :datetime}
end
end
defp cast_value(:datetime, _value) do
{:error, :unexpected_value_type, expected: :datetime}
end
defp cast_value(:binaryblob, value) when is_binary(value) do
with {:ok, binvalue} <- Base.decode64(value) do
{:ok, binvalue}
else
:error ->
{:error, :unexpected_value_type, expected: :binaryblob}
end
end
defp cast_value(:binaryblob, _value) do
{:error, :unexpected_value_type, expected: :binaryblob}
end
defp cast_value(:datetimearray, values) do
case map_while_ok(values, &cast_value(:datetime, &1)) do
{:ok, mapped_values} ->
{:ok, mapped_values}
_ ->
{:error, :unexpected_value_type, expected: :datetimearray}
end
end
defp cast_value(:binaryblobarray, values) do
case map_while_ok(values, &cast_value(:binaryblob, &1)) do
{:ok, mapped_values} ->
{:ok, mapped_values}
_ ->
{:error, :unexpected_value_type, expected: :binaryblobarray}
end
end
defp cast_value(_anytype, anyvalue) do
{:ok, anyvalue}
end
defp map_while_ok(values, fun) do
result =
Enum.reduce_while(values, {:ok, []}, fn value, {:ok, acc} ->
case fun.(value) do
{:ok, mapped_value} ->
{:cont, {:ok, [mapped_value | acc]}}
other ->
{:halt, other}
end
end)
with {:ok, mapped_values} <- result do
{:ok, Enum.reverse(mapped_values)}
end
end
defp wrap_to_bson_struct(:binaryblob, value) do
# 0 is generic binary subtype
{0, value}
end
defp wrap_to_bson_struct(:binaryblobarray, values) do
Enum.map(values, &wrap_to_bson_struct(:binaryblob, &1))
end
defp wrap_to_bson_struct(_anytype, value) do
value
end
# TODO: we should probably allow delete for every path regardless of the interface type
# just for maintenance reasons
def delete_interface_values(realm_name, encoded_device_id, interface, no_prefix_path) do
with {:ok, client} <- Database.connect(realm: realm_name),
{:ok, device_id} <- Device.decode_device_id(encoded_device_id),
{:ok, major_version} <- DeviceQueries.interface_version(client, device_id, interface),
{:ok, interface_row} <-
InterfaceQueries.retrieve_interface_row(client, interface, major_version),
{:ok, interface_descriptor} <- InterfaceDescriptor.from_db_result(interface_row),
{:ownership, :server} <- {:ownership, interface_descriptor.ownership},
path <- "/" <> no_prefix_path,
{:ok, [endpoint_id]} <- get_endpoint_ids(interface_descriptor.automaton, path) do
mapping = Queries.retrieve_mapping(client, interface_descriptor.interface_id, endpoint_id)
Queries.insert_value_into_db(
client,
device_id,
interface_descriptor,
endpoint_id,
mapping,
path,
nil,
nil,
[]
)
case interface_descriptor.type do
:properties ->
ensure_unset(realm_name, device_id, interface, path)
:datastream ->
:ok
end
else
{:ownership, :device} ->
{:error, :cannot_write_to_device_owned}
{:error, :endpoint_guess_not_allowed} ->
{:error, :read_only_resource}
{:error, reason} ->
{:error, reason}
end
end
defp do_get_interface_values!(client, device_id, :individual, interface_row, opts) do
endpoint_rows =
Queries.retrieve_all_endpoint_ids_for_interface!(client, interface_row[:interface_id])
values_map =
Enum.reduce(endpoint_rows, %{}, fn endpoint_row, values ->
# TODO: we can do this by using just one query without any filter on the endpoint
value =
retrieve_endpoint_values(
client,
device_id,
Aggregation.from_int(interface_row[:aggregation]),
Type.from_int(interface_row[:type]),
interface_row,
endpoint_row[:endpoint_id],
endpoint_row,
"/",
opts
)
Map.merge(values, value)
end)
{:ok, %InterfaceValues{data: MapTree.inflate_tree(values_map)}}
end
defp do_get_interface_values!(client, device_id, :object, interface_row, opts) do
# We need to know if mappings have explicit_timestamp set, so we retrieve it from the
# first one.
endpoint =
Queries.retrieve_all_endpoint_ids_for_interface!(client, interface_row[:interface_id])
|> CQEx.Result.head()
mapping =
Queries.retrieve_mapping(client, interface_row[:interface_id], endpoint[:endpoint_id])
do_get_interface_values!(
client,
device_id,
Aggregation.from_int(interface_row[:aggregation]),
Type.from_int(interface_row[:type]),
interface_row,
nil,
nil,
"/",
%{opts | explicit_timestamp: mapping.explicit_timestamp}
)
end
defp do_get_interface_values!(
client,
device_id,
:individual,
:properties,
interface_row,
endpoint_ids,
endpoint_query,
path,
opts
) do
{status, result} =
List.foldl(endpoint_ids, {:ok, %{}}, fn endpoint_id, {status, values} ->
if status == :ok do
endpoint_row = Queries.execute_value_type_query(client, endpoint_query, endpoint_id)
value =
retrieve_endpoint_values(
client,
device_id,
:individual,
:properties,
interface_row,
endpoint_id,
endpoint_row,
path,
opts
)
if value != %{} do
{:ok, Map.merge(values, value)}
else
{:error, :path_not_found}
end
else
{status, values}
end
end)
if status == :ok do
individual_value = Map.get(result, "")
data =
if individual_value != nil do
individual_value
else
MapTree.inflate_tree(result)
end
{:ok, %InterfaceValues{data: data}}
else
{:error, result}
end
end
defp do_get_interface_values!(
client,
device_id,
:individual,
:datastream,
interface_row,
endpoint_ids,
endpoint_query,
path,
opts
) do
[endpoint_id] = endpoint_ids
endpoint_row = Queries.execute_value_type_query(client, endpoint_query, endpoint_id)
retrieve_endpoint_values(
client,
device_id,
:individual,
:datastream,
interface_row,
endpoint_id,
endpoint_row,
path,
opts
)
end
defp do_get_interface_values!(
client,
device_id,
:object,
:datastream,
interface_row,
_endpoint_ids,
_endpoint_query,
path,
opts
) do
# We need to know if mappings have explicit_timestamp set, so we retrieve it from the
# first one.
endpoint =
Queries.retrieve_all_endpoint_ids_for_interface!(client, interface_row[:interface_id])
|> CQEx.Result.head()
mapping =
Queries.retrieve_mapping(client, interface_row[:interface_id], endpoint[:endpoint_id])
endpoint_rows =
Queries.retrieve_all_endpoints_for_interface!(client, interface_row[:interface_id])
interface_values =
retrieve_endpoint_values(
client,
device_id,
:object,
:datastream,
interface_row,
nil,
endpoint_rows,
path,
%{opts | explicit_timestamp: mapping.explicit_timestamp}
)
cond do
path == "/" and interface_values == {:error, :path_not_found} ->
{:ok, %InterfaceValues{data: %{}}}
path != "/" and elem(interface_values, 1).data == [] ->
{:error, :path_not_found}
true ->
interface_values
end
end
# TODO: optimize: do not use string replace
defp simplify_path(base_path, path) do
no_basepath = String.replace_prefix(path, base_path, "")
case no_basepath do
"/" <> noleadingslash -> noleadingslash
already_noleadingslash -> already_noleadingslash
end
end
defp get_endpoint_ids(automaton, path, opts \\ []) do
allow_guess = opts[:allow_guess]
case EndpointsAutomaton.resolve_path(path, automaton) do
{:ok, endpoint_id} ->
{:ok, [endpoint_id]}
{:guessed, endpoint_ids} when allow_guess ->
{:ok, endpoint_ids}
{:guessed, _endpoint_ids} ->
{:error, :endpoint_guess_not_allowed}
{:error, :not_found} ->
{:error, :endpoint_not_found}
end
end
defp column_pretty_name(endpoint) do
endpoint
|> String.split("/")
|> List.last()
end
defp retrieve_endpoint_values(
client,
device_id,
:individual,
:datastream,
interface_row,
endpoint_id,
endpoint_row,
"/",
opts
) do
path = "/"
interface_id = interface_row[:interface_id]
values =
Queries.retrieve_all_endpoint_paths!(client, device_id, interface_id, endpoint_id)
|> Enum.reduce(%{}, fn row, values_map ->
if String.starts_with?(row[:path], path) do
[{:path, row_path}] = row
last_value =
Queries.last_datastream_value!(
client,
device_id,
interface_row,
endpoint_row,
endpoint_id,
row_path,
opts
)
case last_value do
:empty_dataset ->
%{}
[
{:value_timestamp, tstamp},
{:reception_timestamp, reception},
_,
{_, v}
] ->
simplified_path = simplify_path(path, row_path)
nice_value =
AstarteValue.to_json_friendly(
v,
ValueType.from_int(endpoint_row[:value_type]),
allow_bigintegers: true
)
Map.put(values_map, simplified_path, %{
"value" => nice_value,
"timestamp" =>
AstarteValue.to_json_friendly(
tstamp,
:datetime,
keep_milliseconds: opts.keep_milliseconds
),
"reception_timestamp" =>
AstarteValue.to_json_friendly(
reception,
:datetime,
keep_milliseconds: opts.keep_milliseconds
)
})
end
else
values_map
end
end)
values
end
defp retrieve_endpoint_values(
client,
device_id,
:object,
:datastream,
interface_row,
nil,
endpoint_row,
"/",
opts
) do
path = "/"
interface_id = interface_row[:interface_id]
endpoint_id = CQLUtils.endpoint_id(interface_row[:name], interface_row[:major_version], "")
{count, paths} =
Queries.retrieve_all_endpoint_paths!(client, device_id, interface_id, endpoint_id)
|> Enum.reduce({0, []}, fn row, {count, all_paths} ->
if String.starts_with?(row[:path], path) do
[{:path, row_path}] = row
{count + 1, [row_path | all_paths]}
else
{count, all_paths}
end
end)
cond do
count == 0 ->
{:error, :path_not_found}
count == 1 ->
[only_path] = paths
with {:ok,
%Astarte.AppEngine.API.Device.InterfaceValues{data: values, metadata: metadata}} <-
retrieve_endpoint_values(
client,
device_id,
:object,
:datastream,
interface_row,
endpoint_id,
endpoint_row,
only_path,
opts
),
{:ok, interface_values} <-
get_interface_values_from_path(values, metadata, path, only_path) do
{:ok, interface_values}
else
err ->
Logger.warn("An error occurred while retrieving endpoint values: #{inspect(err)}",
tag: "retrieve_endpoint_values_error"
)
err
end
count > 1 ->
values_map =
Enum.reduce(paths, %{}, fn a_path, values_map ->
{:ok, %Astarte.AppEngine.API.Device.InterfaceValues{data: values}} =
retrieve_endpoint_values(
client,
device_id,
:object,
:datastream,
interface_row,
endpoint_id,
endpoint_row,
a_path,
%{opts | limit: 1}
)
case values do
[] ->
values_map
[value] ->
simplified_path = simplify_path(path, a_path)
Map.put(values_map, simplified_path, value)
end
end)
|> MapTree.inflate_tree()
{:ok, %InterfaceValues{data: values_map}}
end
end
defp retrieve_endpoint_values(
client,
device_id,
:object,
:datastream,
interface_row,
_endpoint_id,
endpoint_rows,
path,
opts
) do
# FIXME: reading result wastes atoms: new atoms are allocated every time a new table is seen
# See cqerl_protocol.erl:330 (binary_to_atom), strings should be used when dealing with large schemas
{columns, column_metadata, downsample_column_atom} =
Enum.reduce(endpoint_rows, {"", %{}, nil}, fn endpoint,
{query_acc, atoms_map,
prev_downsample_column_atom} ->
endpoint_name = endpoint[:endpoint]
column_name = CQLUtils.endpoint_to_db_column_name(endpoint_name)
value_type = endpoint[:value_type] |> ValueType.from_int()
next_query_acc = "#{query_acc} #{column_name}, "
column_atom = String.to_atom(column_name)
pretty_name = column_pretty_name(endpoint_name)
metadata = %{pretty_name: pretty_name, value_type: value_type}
next_atom_map = Map.put(atoms_map, column_atom, metadata)
if opts.downsample_key == pretty_name do
{next_query_acc, next_atom_map, column_atom}
else
{next_query_acc, next_atom_map, prev_downsample_column_atom}
end
end)
{:ok, count, values} =
Queries.retrieve_object_datastream_values(
client,
device_id,
interface_row,
path,
columns,
opts
)
values
|> maybe_downsample_to(count, :object, %InterfaceValuesOptions{
opts
| downsample_key: downsample_column_atom
})
|> pack_result(:object, :datastream, column_metadata, opts)
end
defp retrieve_endpoint_values(
client,
device_id,
:individual,
:datastream,
interface_row,
endpoint_id,
endpoint_row,
path,
opts
) do
{:ok, count, values} =
Queries.retrieve_datastream_values(
client,
device_id,
interface_row,
endpoint_row,
endpoint_id,
path,
opts
)
values
|> maybe_downsample_to(count, :individual, opts)
|> pack_result(:individual, :datastream, endpoint_row, path, opts)
end
defp retrieve_endpoint_values(
client,
device_id,
:individual,
:properties,
interface_row,
endpoint_id,
endpoint_row,
path,
_opts
) do
values =
Queries.all_properties_for_endpoint!(
client,
device_id,
interface_row,
endpoint_row,
endpoint_id
)
|> Enum.reduce(%{}, fn row, values_map ->
if String.starts_with?(row[:path], path) do
[{:path, row_path}, {_, row_value}] = row
simplified_path = simplify_path(path, row_path)
nice_value =
AstarteValue.to_json_friendly(
row_value,
ValueType.from_int(endpoint_row[:value_type]),
allow_bigintegers: true
)
Map.put(values_map, simplified_path, nice_value)
else
values_map
end
end)
values
end
defp get_interface_values_from_path([], _metadata, _path, _only_path) do
{:ok, %InterfaceValues{data: %{}}}
end
defp get_interface_values_from_path(values, metadata, path, only_path) when is_list(values) do
simplified_path = simplify_path(path, only_path)
case simplified_path do
"" ->
{:ok, %InterfaceValues{data: values, metadata: metadata}}
_ ->
values_map =
%{simplified_path => values}
|> MapTree.inflate_tree()
{:ok, %InterfaceValues{data: values_map, metadata: metadata}}
end
end
defp get_interface_values_from_path(values, metadata, _path, _only_path) do
{:ok, %InterfaceValues{data: values, metadata: metadata}}
end
defp maybe_downsample_to(values, _count, _aggregation, %InterfaceValuesOptions{
downsample_to: nil
}) do
values
end
defp maybe_downsample_to(values, nil, _aggregation, _opts) do
# TODO: we can't downsample an object without a valid count, propagate an error changeset
# when we start using changeset consistently here
_ = Logger.warn("No valid count in maybe_downsample_to.", tag: "downsample_invalid_count")
values
end
defp maybe_downsample_to(values, _count, :object, %InterfaceValuesOptions{downsample_key: nil}) do
# TODO: we can't downsample an object without downsample_key, propagate an error changeset
# when we start using changeset consistently here
_ =
Logger.warn("No valid downsample_key found in maybe_downsample_to.",
tag: "downsample_invalid_key"
)
values
end
defp maybe_downsample_to(values, count, :object, %InterfaceValuesOptions{
downsample_to: downsampled_size,
downsample_key: downsample_key,
explicit_timestamp: explicit_timestamp
})
when downsampled_size > 2 do
timestamp_column =
if explicit_timestamp do
:value_timestamp
else
:reception_timestamp
end
avg_bucket_size = max(1, (count - 2) / (downsampled_size - 2))
sample_to_x_fun = fn sample -> Keyword.get(sample, timestamp_column) end
sample_to_y_fun = fn sample -> Keyword.get(sample, downsample_key) end
xy_to_sample_fun = fn x, y -> [{timestamp_column, x}, {downsample_key, y}] end
ExLTTB.Stream.downsample(
values,
avg_bucket_size,
sample_to_x_fun: sample_to_x_fun,
sample_to_y_fun: sample_to_y_fun,
xy_to_sample_fun: xy_to_sample_fun
)
end
defp maybe_downsample_to(values, count, :individual, %InterfaceValuesOptions{
downsample_to: downsampled_size
})
when downsampled_size > 2 do
avg_bucket_size = max(1, (count - 2) / (downsampled_size - 2))
sample_to_x_fun = fn sample -> Keyword.get(sample, :value_timestamp) end
sample_to_y_fun = fn sample ->
timestamp_keys = [:value_timestamp, :reception_timestamp, :reception_timestamp_submillis]
[{_key, value}] = Keyword.drop(sample, timestamp_keys)
value
end
xy_to_sample_fun = fn x, y -> [{:value_timestamp, x}, {:generic_key, y}] end
ExLTTB.Stream.downsample(
values,
avg_bucket_size,
sample_to_x_fun: sample_to_x_fun,
sample_to_y_fun: sample_to_y_fun,
xy_to_sample_fun: xy_to_sample_fun
)
end
defp pack_result(
values,
:individual,
:datastream,
endpoint_row,
_path,
%{format: "structured"} = opts
) do
values_array =
for value <- values do
[{:value_timestamp, tstamp}, _, _, {_, v}] = value
%{
"timestamp" =>
AstarteValue.to_json_friendly(
tstamp,
:datetime,
keep_milliseconds: opts.keep_milliseconds
),
"value" =>
AstarteValue.to_json_friendly(v, ValueType.from_int(endpoint_row[:value_type]), [])
}
end
if values_array != [] do
{:ok,
%InterfaceValues{
data: values_array
}}
else
{:error, :path_not_found}
end
end
defp pack_result(
values,
:individual,
:datastream,
endpoint_row,
path,
%{format: "table"} = opts
) do
value_name =
path
|> String.split("/")
|> List.last()
values_array =
for value <- values do
[{:value_timestamp, tstamp}, _, _, {_, v}] = value
[
AstarteValue.to_json_friendly(tstamp, :datetime, []),
AstarteValue.to_json_friendly(
v,
ValueType.from_int(endpoint_row[:value_type]),
keep_milliseconds: opts.keep_milliseconds
)
]
end
if values_array != [] do
{:ok,
%InterfaceValues{
metadata: %{
"columns" => %{"timestamp" => 0, value_name => 1},
"table_header" => ["timestamp", value_name]
},
data: values_array
}}
else
{:error, :path_not_found}
end
end
defp pack_result(
values,
:individual,
:datastream,
endpoint_row,
_path,
%{format: "disjoint_tables"} = opts
) do
values_array =
for value <- values do
[{:value_timestamp, tstamp}, _, _, {_, v}] = value
[
AstarteValue.to_json_friendly(v, ValueType.from_int(endpoint_row[:value_type]), []),
AstarteValue.to_json_friendly(
tstamp,
:datetime,
keep_milliseconds: opts.keep_milliseconds
)
]
end
if values_array != [] do
{:ok,
%InterfaceValues{
data: %{"value" => values_array}
}}
else
{:error, :path_not_found}
end
end
defp pack_result(
values,
:object,
:datastream,
column_metadata,
%{format: "table"} = opts
) do
timestamp_column =
if opts.explicit_timestamp do
:value_timestamp
else
:reception_timestamp
end
{_cols_count, columns, reverse_table_header} =
Queries.first_result_row(values)
|> List.foldl({1, %{"timestamp" => 0}, ["timestamp"]}, fn {column, _column_value},
{next_index, acc, list_acc} ->
pretty_name = column_metadata[column][:pretty_name]
if pretty_name != nil and pretty_name != "timestamp" do
{next_index + 1, Map.put(acc, pretty_name, next_index), [pretty_name | list_acc]}
else
{next_index, acc, list_acc}
end
end)
table_header = Enum.reverse(reverse_table_header)
values_array =
for value <- values do
base_array_entry = [
AstarteValue.to_json_friendly(
value[timestamp_column],
:datetime,
keep_milliseconds: opts.keep_milliseconds
)
]
List.foldl(value, base_array_entry, fn {column, column_value}, acc ->
case Map.fetch(column_metadata, column) do
{:ok, metadata} ->
%{
value_type: value_type
} = metadata
json_friendly_value = AstarteValue.to_json_friendly(column_value, value_type, [])
[json_friendly_value | acc]
:error ->
acc
end
end)
|> Enum.reverse()
end
{:ok,
%InterfaceValues{
metadata: %{"columns" => columns, "table_header" => table_header},
data: values_array
}}
end
defp pack_result(
values,
:object,
:datastream,
column_metadata,
%{format: "disjoint_tables"} = opts
) do
timestamp_column =
if opts.explicit_timestamp do
:value_timestamp
else
:reception_timestamp
end
reversed_columns_map =
Enum.reduce(values, %{}, fn value, columns_acc ->
List.foldl(value, columns_acc, fn {column, column_value}, acc ->
case Map.fetch(column_metadata, column) do
{:ok, metadata} ->
%{
pretty_name: pretty_name,
value_type: value_type
} = metadata
json_friendly_value = AstarteValue.to_json_friendly(column_value, value_type, [])
column_list = [
[
json_friendly_value,
AstarteValue.to_json_friendly(
value[timestamp_column],
:datetime,
keep_milliseconds: opts.keep_milliseconds
)
]
| Map.get(columns_acc, pretty_name, [])
]
Map.put(acc, pretty_name, column_list)
:error ->
acc
end
end)
end)
columns =
Enum.reduce(reversed_columns_map, %{}, fn {column_name, column_values}, acc ->
Map.put(acc, column_name, Enum.reverse(column_values))
end)
{:ok,
%InterfaceValues{
data: columns
}}
end
defp pack_result(
values,
:object,
:datastream,
column_metadata,
%{format: "structured"} = opts
) do
timestamp_column =
if opts.explicit_timestamp do
:value_timestamp
else
:reception_timestamp
end
values_list =
for value <- values do
base_array_entry = %{
"timestamp" =>
AstarteValue.to_json_friendly(
value[timestamp_column],
:datetime,
keep_milliseconds: opts.keep_milliseconds
)
}
List.foldl(value, base_array_entry, fn {column, column_value}, acc ->
case Map.fetch(column_metadata, column) do
{:ok, metadata} ->
%{
pretty_name: pretty_name,
value_type: value_type
} = metadata
json_friendly_value = AstarteValue.to_json_friendly(column_value, value_type, [])
Map.put(acc, pretty_name, json_friendly_value)
:error ->
acc
end
end)
end
{:ok, %InterfaceValues{data: values_list}}
end
def device_alias_to_device_id(realm_name, device_alias) do
with {:ok, client} <- Database.connect(realm: realm_name) do
Queries.device_alias_to_device_id(client, device_alias)
else
not_ok ->
_ = Logger.warn("Database error: #{inspect(not_ok)}.", tag: "db_error")
{:error, :database_error}
end
end
end
| 29.253913 | 105 | 0.603591 |
f749fa1ea0e618c3d2fcff86da65526c6589cb78 | 902 | ex | Elixir | clients/web_security_scanner/lib/google_api/web_security_scanner/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/web_security_scanner/lib/google_api/web_security_scanner/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/web_security_scanner/lib/google_api/web_security_scanner/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.WebSecurityScanner.V1 do
@moduledoc """
API client metadata for GoogleApi.WebSecurityScanner.V1.
"""
@discovery_revision "20220319"
def discovery_revision(), do: @discovery_revision
end
| 33.407407 | 74 | 0.763858 |
f74a5de9b113a1a8f8cd9873228a36e85fc77d91 | 3,375 | ex | Elixir | lib/appsignal/instrumentation/decorators.ex | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | lib/appsignal/instrumentation/decorators.ex | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | lib/appsignal/instrumentation/decorators.ex | cblavier/appsignal-elixir | 6e191a9e50acab3537a6331cd08c269b9e0fd7f4 | [
"MIT"
] | null | null | null | defmodule Appsignal.Instrumentation.Decorators do
@moduledoc false
require Appsignal.Utils
@span Appsignal.Utils.compile_env(:appsignal, :appsignal_span, Appsignal.Span)
use Decorator.Define,
instrument: 0,
instrument: 1,
transaction: 0,
transaction: 1,
transaction_event: 0,
transaction_event: 1,
channel_action: 0
import Appsignal.Utils, only: [module_name: 1]
def instrument(namespace, body, context) when is_atom(namespace) do
namespace
|> Atom.to_string()
|> instrument(body, context)
end
def instrument(namespace, body, context) when is_binary(namespace) do
do_instrument(body, Map.put(context, :namespace, namespace))
end
def instrument(body, context) do
do_instrument(body, context)
end
defp do_instrument(body, %{module: module, name: name, arity: arity, namespace: namespace}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn span ->
_ = unquote(@span).set_namespace(span, unquote(namespace))
unquote(body)
end
)
end
end
defp do_instrument(body, %{module: module, name: name, namespace: namespace}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}",
fn span ->
_ = unquote(@span).set_namespace(span, unquote(namespace))
unquote(body)
end
)
end
end
defp do_instrument(body, %{module: module, name: name, arity: arity, category: category}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
unquote(category),
fn -> unquote(body) end
)
end
end
defp do_instrument(body, %{module: module, name: name, arity: arity}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn -> unquote(body) end
)
end
end
defp do_instrument(body, %{module: module, name: name}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}",
fn -> unquote(body) end
)
end
end
def transaction(body, context) do
transaction("background_job", body, context)
end
def transaction(namespace, body, context) when is_atom(namespace) do
namespace
|> Atom.to_string()
|> transaction(body, context)
end
def transaction(namespace, body, %{module: module, name: name, arity: arity})
when is_binary(namespace) do
quote do
Appsignal.Instrumentation.instrument_root(
unquote(namespace),
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn -> unquote(body) end
)
end
end
def transaction_event(body, context) do
instrument(body, context)
end
def transaction_event(category, body, context) do
do_instrument(body, Map.put(context, :category, category))
end
def channel_action(body, %{module: module, args: [action, _payload, _socket]}) do
quote do
Appsignal.Instrumentation.instrument_root(
"channel",
"#{module_name(unquote(module))}.#{unquote(action)}",
fn -> unquote(body) end
)
end
end
end
| 27.217742 | 96 | 0.652148 |
f74a6875863222359b246c3f501ac703bd65482e | 1,031 | ex | Elixir | lib/kovacs/watcher.ex | praveenperera/phoenix_kovacs | 1ccfa2fbbe77360f55006255f2e6dcd7e5a3548c | [
"Apache-2.0"
] | 2 | 2015-10-10T00:34:44.000Z | 2015-10-14T06:53:24.000Z | lib/kovacs/watcher.ex | praveenperera/phoenix_kovacs | 1ccfa2fbbe77360f55006255f2e6dcd7e5a3548c | [
"Apache-2.0"
] | null | null | null | lib/kovacs/watcher.ex | praveenperera/phoenix_kovacs | 1ccfa2fbbe77360f55006255f2e6dcd7e5a3548c | [
"Apache-2.0"
] | 2 | 2015-09-30T08:08:59.000Z | 2020-03-03T22:34:25.000Z | defmodule Kovacs.Watcher do
@fswatch_path "fswatch"
@max_line_length_for_each_event 2048
def initialise(path) do
paths = Enum.join(path, " ")
port = Port.open({ :spawn, '#{@fswatch_path} #{paths}' },
[:stderr_to_stdout, :in, :exit_status, :binary, :stream, { :line, @max_line_length_for_each_event }])
{:os_pid, pid} = Port.info(port, :os_pid)
Kovacs.Watcher.Data.create(port, pid, path)
end
def unload(state) do
port = Kovacs.Watcher.Data.port(state)
pid = Kovacs.Watcher.Data.pid(state)
close_port(port, pid)
end
def get_changed_files(modified_file, active_port, state, on_changed_fn) do
port = Kovacs.Watcher.Data.port(state)
if active_port == port do
if String.ends_with?(modified_file, [".ex", ".exs"]) do
on_changed_fn.(modified_file)
end
end
state
end
defp close_port(port, pid) do
case port do
nil ->
:ok
port ->
Port.close(port)
System.cmd("kill", ["#{pid}"], [])
end
end
end | 22.911111 | 109 | 0.630456 |
f74a6907594139a5ca80739d86d6ff34c162c3b0 | 89 | ex | Elixir | lib/balalaika_bear/status.ex | ayrat-playground/balalaika_bear | bcccdd4a0caf075e133ef4f162eb13e3d28b2d65 | [
"MIT"
] | 5 | 2018-04-30T09:48:17.000Z | 2020-05-01T10:20:15.000Z | lib/balalaika_bear/status.ex | BalalaikaIndustries/balalaika_bear | bcccdd4a0caf075e133ef4f162eb13e3d28b2d65 | [
"MIT"
] | 7 | 2018-04-28T06:39:10.000Z | 2018-04-28T07:31:59.000Z | lib/balalaika_bear/status.ex | ayrat-playground/balalaika_bear | bcccdd4a0caf075e133ef4f162eb13e3d28b2d65 | [
"MIT"
] | 2 | 2018-04-27T19:42:04.000Z | 2020-05-01T10:20:02.000Z | defmodule BalalaikaBear.Status do
use BalalaikaBear.Macro.API, namespace: "status"
end
| 22.25 | 50 | 0.808989 |
f74aa130c365037d8199f97a872740bec8ac79f1 | 1,144 | ex | Elixir | lib/offer_hunters/comments/create.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | 3 | 2021-10-01T21:13:02.000Z | 2021-11-05T22:25:55.000Z | lib/offer_hunters/comments/create.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | null | null | null | lib/offer_hunters/comments/create.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | null | null | null | defmodule OfferHunters.Comments.Create do
@moduledoc """
Module for create comments
"""
alias OfferHunters.{Comment, Error, Offer, Repo, User}
def call(%{} = params) do
case validate_ids(params) do
{:ok, _message} ->
params
|> Comment.changeset()
|> Repo.insert()
|> handle_insert()
_ ->
{:error, Error.build(:bad_request, "Invalid Ids")}
end
end
defp handle_insert({:ok, %Comment{id: id}}),
do: {:ok, Repo.preload(Repo.get(Comment, id), [:user, :offer])}
defp handle_insert({:error, result}), do: {:error, Error.build(:bad_request, result)}
defp validate_ids(%{
"comment" => _comment,
"offer_id" => offer_id,
"user_id" => user_id,
"created_date" => _created_date
}) do
with {:ok, %Offer{}} <- OfferHunters.get_offer_by_id(offer_id),
{:ok, %User{}} <- OfferHunters.get_user_by_id(user_id),
do: {:ok, "Id's verified"}
end
end
# map = %{
# comment: "sla mano, banana pra vcs",
# offer_id: "1e5313c8-374a-4d1f-b539-d859eb219e43",
# user_id: "4c0147c4-4dd6-4217-ba1c-1b6ff65b173d"
# }
| 27.238095 | 87 | 0.596154 |
f74af3c37d0d338f78cd40fa18b59e7b3bb3b3af | 265 | ex | Elixir | examples/basic/NumberAdder.ex | fxn/nimler | 7038b2b6fcc41c013f835e929844ef648aa47d62 | [
"MIT"
] | 83 | 2019-10-20T12:04:33.000Z | 2022-03-17T01:04:26.000Z | examples/basic/NumberAdder.ex | fxn/nimler | 7038b2b6fcc41c013f835e929844ef648aa47d62 | [
"MIT"
] | 18 | 2019-10-12T17:56:25.000Z | 2022-03-21T03:05:02.000Z | examples/basic/NumberAdder.ex | fxn/nimler | 7038b2b6fcc41c013f835e929844ef648aa47d62 | [
"MIT"
] | 6 | 2020-02-21T14:00:30.000Z | 2021-11-06T04:18:38.000Z | defmodule NumberAdder do
@on_load :init
def init(), do: :erlang.load_nif(to_charlist(Path.join(Path.dirname(__ENV__.file), 'libnif')), 0)
def add_numbers(_, _), do: exit(:nif_library_not_loaded)
def sub_numbers(_, _), do: exit(:nif_library_not_loaded)
end
| 33.125 | 99 | 0.735849 |
f74b143c78d2e3484f443d1959055bc6ead0aba2 | 1,448 | ex | Elixir | lib/utils.ex | azohra/SlackDB | f59b5d96a27a7452b0e3285a067e8fe2c7e691e0 | [
"MIT"
] | 10 | 2019-04-25T23:33:19.000Z | 2020-12-08T20:33:59.000Z | lib/utils.ex | azohra/SlackDB | f59b5d96a27a7452b0e3285a067e8fe2c7e691e0 | [
"MIT"
] | 4 | 2019-05-03T06:36:12.000Z | 2019-12-18T21:43:32.000Z | lib/utils.ex | azohra/SlackDB | f59b5d96a27a7452b0e3285a067e8fe2c7e691e0 | [
"MIT"
] | null | null | null | defmodule SlackDB.Utils do
@moduledoc false
# @emoji_list_regex ~r/:[^:]+:/
@key_type_regex ":thumbsup:|:family:|:hear_no_evil:|:monkey:"
@key_schema ~r/(?<key_phrase>.+)\s(?<key_type>#{@key_type_regex})(?<more_metadata>.*)/
@emoji_to_metadata %{
":thumbsup:" => :voting,
":family:" => :multiple,
":hear_no_evil:" => :single_front,
":monkey:" => :single_back,
":do_not_litter:" => :constant,
":anchor:" => :undeletable
# ":octagonal_sign:" => :locked,
}
@metadata_to_emoji %{
voting: ":thumbsup:",
multiple: ":family:",
single_front: ":hear_no_evil:",
single_back: ":monkey:",
constant: ":do_not_litter:",
undeletable: ":anchor:"
# locked: ":octagonal_sign:",
}
@spec get_tokens(term(), list(atom())) :: list() | {:error, String.t()}
def get_tokens(server_name, key_list) do
try do
server =
Application.get_env(:slackdb, :servers)
|> Map.fetch!(server_name)
for key <- key_list, do: Map.fetch!(server, key)
rescue
e in KeyError -> {:error, "KeyError: couldn't find key `#{e.key}`"}
end
end
def check_schema(phrase) do
Regex.named_captures(@key_schema, phrase)
end
def metadata_to_emoji(metadata) when is_atom(metadata) do
Map.get(@metadata_to_emoji, metadata, ":question:")
end
def emoji_to_metadata(emoji) when is_binary(emoji) do
Map.get(@emoji_to_metadata, emoji, :unknown_emoji)
end
end
| 26.814815 | 88 | 0.631215 |
f74b1678e16cffc7b233528879de8f33fe89204f | 868 | exs | Elixir | mix.exs | linduxed/taskwarrior.ex | 30cec311e2b5373b5068ae37954230adceec5ed7 | [
"MIT"
] | null | null | null | mix.exs | linduxed/taskwarrior.ex | 30cec311e2b5373b5068ae37954230adceec5ed7 | [
"MIT"
] | null | null | null | mix.exs | linduxed/taskwarrior.ex | 30cec311e2b5373b5068ae37954230adceec5ed7 | [
"MIT"
] | null | null | null | defmodule Taskwarrior.MixProject do
use Mix.Project
def project do
[
app: :taskwarrior,
description: description(),
package: package(),
version: "0.4.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
name: "Taskwarrior",
source_url: repo_url()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp description do
"""
Parser and manipulator of Taskwarrior data
"""
end
defp package do
[
licenses: ["MIT"],
links: %{"GitHub" => repo_url()}
]
end
defp repo_url do
"https://github.com/linduxed/taskwarrior.ex"
end
defp deps do
[
{:credo, "~> 1.2", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
{:jason, "~> 1.1"}
]
end
end
| 17.714286 | 62 | 0.544931 |
f74b1a379754c5102af53856987fbf75c74d970f | 898 | ex | Elixir | test/support/projection_assertions.ex | kianmeng/commanded-ecto-projections | aa5a99488e969fe338db780898cb8bd22599df15 | [
"MIT"
] | 57 | 2017-11-09T21:18:22.000Z | 2022-02-10T07:58:21.000Z | test/support/projection_assertions.ex | kianmeng/commanded-ecto-projections | aa5a99488e969fe338db780898cb8bd22599df15 | [
"MIT"
] | 26 | 2018-01-08T13:31:16.000Z | 2022-03-24T20:59:29.000Z | test/support/projection_assertions.ex | kianmeng/commanded-ecto-projections | aa5a99488e969fe338db780898cb8bd22599df15 | [
"MIT"
] | 25 | 2017-11-03T20:27:10.000Z | 2022-02-23T09:47:11.000Z | defmodule Commanded.Projections.ProjectionAssertions do
import ExUnit.Assertions
alias Commanded.Projections.Repo
def assert_projections(schema, expected) do
actual = Repo.all(schema) |> pluck(:name)
assert actual == expected
end
def assert_seen_event(projection_name, expected_last_seen)
when is_binary(projection_name) and is_integer(expected_last_seen) do
assert last_seen_event(projection_name) == expected_last_seen
end
def last_seen_event(projection_name) when is_binary(projection_name) do
sql = "SELECT last_seen_event_number from projection_versions where projection_name = $1"
case Ecto.Adapters.SQL.query(Repo, sql, [projection_name]) do
{:ok, %{num_rows: 0}} -> nil
{:ok, %{rows: [[last_seen]], num_rows: 1}} -> last_seen
end
end
defp pluck(enumerable, field) do
Enum.map(enumerable, &Map.get(&1, field))
end
end
| 29.933333 | 93 | 0.73608 |
f74b1b9f989168a47aa662fe16d5f2f2465b8e8c | 1,831 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/targeting_value_day_part_targeting.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/targeting_value_day_part_targeting.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/targeting_value_day_part_targeting.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargeting do
@moduledoc """
## Attributes
* `dayParts` (*type:* `list(GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargetingDayPart.t)`, *default:* `nil`) -
* `timeZoneType` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dayParts =>
list(GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargetingDayPart.t())
| nil,
:timeZoneType => String.t() | nil
}
field(:dayParts,
as: GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargetingDayPart,
type: :list
)
field(:timeZoneType)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargeting do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargeting.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.TargetingValueDayPartTargeting do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.696429 | 131 | 0.737848 |
f74b3caf64b0047c328042a35b4166953324c6ab | 1,602 | ex | Elixir | lib/tox/calendar.ex | hrzndhrn/tox | c7439a9eb83b9989b1866a23c8e6feedbb0b56b6 | [
"MIT"
] | 6 | 2020-07-18T06:16:10.000Z | 2021-11-12T14:12:20.000Z | lib/tox/calendar.ex | hrzndhrn/tox | c7439a9eb83b9989b1866a23c8e6feedbb0b56b6 | [
"MIT"
] | null | null | null | lib/tox/calendar.ex | hrzndhrn/tox | c7439a9eb83b9989b1866a23c8e6feedbb0b56b6 | [
"MIT"
] | null | null | null | defmodule Tox.Calendar do
@moduledoc false
# Since the first version of Tox, Date.beginning_of_week/1 is now available in
# Elixir. With this change was also Calendar.day_of_week/3 replaced by
# Calendar.day_of_week4. Tox implements beginning_of_week for Date, DateTime
# and NaiveDateTime. These implementations are using the functions of this
# module.
Code.ensure_loaded(Date)
if function_exported?(Date, :beginning_of_week, 2) do
@spec day_of_week(Date.t() | DateTime.t() | NaiveDateTime.t()) ::
{
day_of_week :: non_neg_integer(),
first_day_of_week :: non_neg_integer(),
last_day_of_week :: non_neg_integer()
}
def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_week(year, month, day, :default)
end
else
@spec day_of_week(Date.t() | DateTime.t() | NaiveDateTime.t()) ::
{
day_of_week :: non_neg_integer(),
first_day_of_week :: non_neg_integer(),
last_day_of_week :: non_neg_integer()
}
def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do
{calendar.day_of_week(year, month, day), 1, 7}
end
end
@spec beginning_of_week(Date.t() | DateTime.t() | NaiveDateTime.t()) :: 0 | neg_integer()
def beginning_of_week(date) do
case day_of_week(date) do
{day_of_week, day_of_week, _last_day_of_week} ->
0
{day_of_week, first_day_of_week, _last_day_of_week} ->
-(day_of_week - first_day_of_week)
end
end
end
| 35.6 | 91 | 0.651061 |
f74b5180f1f58279a13397758fe122cd21e6beb1 | 219 | exs | Elixir | config/config.exs | elvanja/escript_testbed | eb92772b6223309d727716d8a6dc1c9d316bbbb4 | [
"MIT"
] | null | null | null | config/config.exs | elvanja/escript_testbed | eb92772b6223309d727716d8a6dc1c9d316bbbb4 | [
"MIT"
] | 2 | 2020-07-27T00:13:11.000Z | 2020-07-27T05:22:17.000Z | config/config.exs | elvanja/escript_testbed | eb92772b6223309d727716d8a6dc1c9d316bbbb4 | [
"MIT"
] | 2 | 2020-07-26T12:32:39.000Z | 2020-07-26T23:54:13.000Z | use Mix.Config
config :logger,
backends: [:console],
level: :warning,
utc_log: true
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:mfa]
import_config "#{Mix.env()}.exs"
| 16.846154 | 47 | 0.6621 |
f74b8be4fcb100c3e2725ae849da36ed981f1aae | 1,954 | ex | Elixir | test/support/test_schema.ex | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | null | null | null | test/support/test_schema.ex | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | null | null | null | test/support/test_schema.ex | noonie2k/ecto_adapters_dynamodb | 199c14348f5787b04b4eb997a1f00d13cc09948c | [
"Apache-2.0"
] | 1 | 2018-09-12T13:11:05.000Z | 2018-09-12T13:11:05.000Z | defmodule Ecto.Adapters.DynamoDB.TestSchema.Address do
use Ecto.Schema
embedded_schema do
field :street_number, :integer
field :street_name, :string
end
end
defmodule Ecto.Adapters.DynamoDB.TestSchema.Person do
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
alias Ecto.Adapters.DynamoDB.TestSchema.Address
schema "test_person" do
field :first_name, :string
field :last_name, :string
field :age, :integer
field :email, :string
field :password, :string
field :role, :string
field :circles, {:array, :string}
embeds_many :addresses, Address
end
def changeset(person, params \\ %{}) do
person
|> Ecto.Changeset.cast(params, [:first_name, :last_name, :age, :email, :password, :role, :circles])
|> Ecto.Changeset.validate_required([:first_name, :last_name])
|> Ecto.Changeset.unique_constraint(:id)
end
def get_fields() do
@changeset_fields
end
end
# This is used to test records that have a hash+range primary key
# However there's no way to specify this on the Ecto side: we just
# tell Ecto that the hash key (:id) is the primary key, and that the
# range key (:page_num) is a required field.
defmodule Ecto.Adapters.DynamoDB.TestSchema.BookPage do
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "test_book_page" do
field :page_num, :integer
field :text, :string
end
def changeset(page, params \\ %{}) do
page
|> Ecto.Changeset.cast(params, [:page_num, :text])
|> Ecto.Changeset.validate_required([:page_num])
|> Ecto.Changeset.unique_constraint(:id)
# See this page for why we only put a constraint on :id even though
# the real constraint is on the full primary key of hash+range:
# https://hexdocs.pm/ecto/Ecto.Changeset.html#unique_constraint/3-complex-constraints
end
end
| 30.53125 | 103 | 0.702661 |
f74babf4b45d4907e0c3967814cbcc3ea7054a0f | 1,847 | ex | Elixir | lib/ash.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | lib/ash.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | lib/ash.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | defmodule Ash do
@moduledoc " Types and simple helpers for Ash"
alias Ash.Resource.Actions.{Create, Destroy, Read, Update}
alias Ash.Resource.Relationships.{BelongsTo, HasMany, HasOne, ManyToMany}
@type action :: Create.t() | Read.t() | Update.t() | Destroy.t()
@type action_type :: :read | :create | :update | :destroy
@type actor :: Ash.record()
@type aggregate :: Ash.Query.Aggregate.t() | Ash.Resource.Aggregate.t()
@type aggregate_kind :: Ash.Query.Aggregate.kind()
@type api :: module
@type attribute :: Ash.Resource.Attribute.t()
@type calculation :: Ash.Resource.Calculation.t()
@type cardinality_many_relationship() :: HasMany.t() | ManyToMany.t()
@type cardinality_one_relationship() :: HasOne.t() | BelongsTo.t()
@type changeset :: Ash.Changeset.t()
@type data_layer :: module
@type data_layer_query :: struct
@type error :: struct
@type filter :: Ash.Filter.t()
@type params :: Keyword.t()
@type primary_key :: record() | map | term
@type query :: Ash.Query.t()
@type record :: struct
@type relationship :: cardinality_one_relationship() | cardinality_many_relationship()
@type relationship_cardinality :: :many | :one
@type resource :: module
@type side_loads :: term
@type sort :: list(atom | {atom, :asc} | {atom, :desc})
@type validation :: Ash.Resource.Validation.t()
require Ash.Dsl.Extension
def implements_behaviour?(module, behaviour) do
:attributes
|> module.module_info()
|> Enum.flat_map(fn
{:behaviour, value} -> List.wrap(value)
_ -> []
end)
|> Enum.any?(&(&1 == behaviour))
end
def uuid do
Ecto.UUID.generate()
end
@doc "Returns all extensions of a resource or api"
@spec extensions(resource() | api()) :: [module]
def extensions(resource) do
:persistent_term.get({resource, :extensions}, [])
end
end
| 34.203704 | 88 | 0.672442 |
f74bb2b616ba424bf8a8a91e27f436ad2ca7c3e3 | 1,117 | exs | Elixir | config/config.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | 18 | 2017-04-13T20:13:09.000Z | 2021-12-30T08:37:53.000Z | config/config.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | 16 | 2017-04-26T03:20:55.000Z | 2021-12-29T18:52:37.000Z | config/config.exs | ssbb/exshape | 7f0de9e7013cfc17d1e417bd2cf61b8be5259a63 | [
"MIT"
] | 9 | 2017-04-24T13:27:37.000Z | 2021-01-06T18:18:17.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :exshape, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:exshape, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.032258 | 73 | 0.751119 |
f74bfd8d770e4ae8a245cb24ca08d7cd69531ef4 | 535 | exs | Elixir | config/test.exs | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | null | null | null | config/test.exs | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | 6 | 2022-01-05T12:51:37.000Z | 2022-01-13T09:52:36.000Z | config/test.exs | cr0t/lexin | bff2997db52a00bf770614630b8684821ab72abc | [
"MIT"
] | null | null | null | import Config
config :lexin, :dictionaries_root, "test/fixtures/dictionaries"
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :lexin, LexinWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "8Q7QHw+qU9xnuRwI3P2MSGyDS6auQl/8Ki7GCB63toa6fS9kusXiFW9qExob1T/+",
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
| 31.470588 | 86 | 0.764486 |
f74c071ed88d75de2bd5760e8d00b0cdcbc8b094 | 649 | ex | Elixir | lib/grizzly/zwave/commands/no_operation.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 76 | 2019-09-04T16:56:58.000Z | 2022-03-29T06:54:36.000Z | lib/grizzly/zwave/commands/no_operation.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 124 | 2019-09-05T14:01:24.000Z | 2022-02-28T22:58:14.000Z | lib/grizzly/zwave/commands/no_operation.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 10 | 2019-10-23T19:25:45.000Z | 2021-11-17T13:21:20.000Z | defmodule Grizzly.ZWave.Commands.NoOperation do
@moduledoc """
This commands does nothing other than test if the node is responding
Params: - none -
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.NoOperation
@impl true
def new(_opts \\ []) do
command = %Command{
name: :no_operation,
command_byte: nil,
command_class: NoOperation,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(_command) do
<<>>
end
@impl true
def decode_params(_), do: {:ok, []}
end
| 18.542857 | 70 | 0.659476 |
f74c07c79621caa50dcdb0d367311a83f1a1a395 | 455 | ex | Elixir | lib/bitpal_schemas/currency_settings.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | lib/bitpal_schemas/currency_settings.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | lib/bitpal_schemas/currency_settings.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPalSchemas.CurrencySettings do
use TypedEctoSchema
alias BitPalSchemas.AddressKey
alias BitPalSchemas.Currency
alias BitPalSchemas.Store
typed_schema "currency_settings" do
field(:required_confirmations, :integer) :: non_neg_integer
field(:double_spend_timeout, :integer) :: non_neg_integer
has_one(:address_key, AddressKey)
belongs_to(:store, Store)
belongs_to(:currency, Currency, type: Ecto.Atom)
end
end
| 28.4375 | 63 | 0.778022 |
f74c4f2dc15523a7d49902b3fab033408fd983ea | 12,024 | exs | Elixir | test/phoenix_live_view/integrations/params_test.exs | snewcomer/phoenix_live_view | 477e9e23e9eec9163599649dab0b3ea48d54300e | [
"MIT"
] | 1 | 2019-12-07T19:28:57.000Z | 2019-12-07T19:28:57.000Z | test/phoenix_live_view/integrations/params_test.exs | snewcomer/phoenix_live_view | 477e9e23e9eec9163599649dab0b3ea48d54300e | [
"MIT"
] | 1 | 2019-03-17T23:44:09.000Z | 2019-03-19T21:52:06.000Z | test/phoenix_live_view/integrations/params_test.exs | snewcomer/phoenix_live_view | 477e9e23e9eec9163599649dab0b3ea48d54300e | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.ParamsTest do
use ExUnit.Case, async: false
use Phoenix.ConnTest
import Phoenix.LiveViewTest
alias Phoenix.LiveView
alias Phoenix.LiveViewTest.Endpoint
@endpoint Endpoint
@moduletag :capture_log
setup do
conn =
Phoenix.ConnTest.build_conn()
|> Plug.Test.init_test_session(%{})
|> put_session(:test_pid, self())
{:ok, conn: conn}
end
defp put_serialized_session(conn, key, value) do
put_session(conn, key, :erlang.term_to_binary(value))
end
describe "handle_params on disconnected mount" do
test "is called with named and query string params", %{conn: conn} do
conn = get(conn, "/counter/123", query1: "query1", query2: "query2")
response = html_response(conn, 200)
assert response =~
escape(~s|params: %{"id" => "123", "query1" => "query1", "query2" => "query2"}|)
assert response =~
escape(~s|mount: %{"id" => "123", "query1" => "query1", "query2" => "query2"}|)
end
test "hard redirects", %{conn: conn} do
assert conn
|> put_serialized_session(
:on_handle_params,
&{:noreply, LiveView.redirect(&1, to: "/")}
)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/"
end
test "hard redirect with flash message", %{conn: conn} do
conn =
put_serialized_session(conn, :on_handle_params, fn socket ->
{:noreply, socket |> LiveView.put_flash(:info, "msg") |> LiveView.redirect(to: "/")}
end)
|> fetch_flash()
|> get("/counter/123?from=handle_params")
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) == "msg"
end
test "push_patch", %{conn: conn} do
assert conn
|> put_serialized_session(:on_handle_params, fn socket ->
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
end)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/counter/123?from=rehandled_params"
end
test "push_redirect", %{conn: conn} do
assert conn
|> put_serialized_session(:on_handle_params, fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/456")}
end)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/thermo/456"
end
test "with encoded URL", %{conn: conn} do
assert get(conn, "/counter/Wm9uZTozNzYxOA%3D%3D?foo=bar+15%26")
assert_receive {:handle_params, _uri, _assigns,
%{"id" => "Wm9uZTozNzYxOA==", "foo" => "bar 15&"}}
end
end
describe "handle_params on connected mount" do
test "is called on connected mount with query string params from get", %{conn: conn} do
{:ok, _, html} =
conn
|> get("/counter/123?q1=1", q2: "2")
|> live()
assert html =~ escape(~s|params: %{"id" => "123", "q1" => "1"}|)
assert html =~ escape(~s|mount: %{"id" => "123", "q1" => "1"}|)
end
test "is called on connected mount with query string params from live", %{conn: conn} do
{:ok, _, html} =
conn
|> live("/counter/123?q1=1")
assert html =~ escape(~s|%{"id" => "123", "q1" => "1"}|)
end
test "hard redirects", %{conn: conn} do
{:error, %{redirect: %{to: "/thermo/456"}}} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.redirect(socket, to: "/thermo/456")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
end
test "push_patch", %{conn: conn} do
{:ok, counter_live, _html} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
response = render(counter_live)
assert response =~ escape(~s|params: %{"from" => "rehandled_params", "id" => "123"}|)
assert response =~ escape(~s|mount: %{"from" => "handle_params", "id" => "123"}|)
end
test "push_redirect", %{conn: conn} do
{:error, %{live_redirect: %{to: "/thermo/456"}}} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.push_redirect(socket, to: "/thermo/456")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
end
test "with encoded URL", %{conn: conn} do
{:ok, _counter_live, _html} = live(conn, "/counter/Wm9uZTozNzYxOA%3D%3D?foo=bar+15%26")
assert_receive {:handle_params, _uri, %{connected?: true},
%{"id" => "Wm9uZTozNzYxOA==", "foo" => "bar 15&"}}
end
end
describe "live_link" do
test "renders static container", %{conn: conn} do
assert conn
|> put_req_header("x-requested-with", "live-link")
|> get("/counter/123", query1: "query1", query2: "query2")
|> html_response(200) =~
~r(<div data-phx-session="[^"]+" data-phx-view="[^"]+" id="[^"]+"></div>)
end
test "invokes handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_patch(counter_live, "/counter/123?filter=true") =~
escape(~s|%{"filter" => "true", "id" => "123"}|)
end
test "with encoded URL", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_patch(counter_live, "/counter/Wm9uZTozNzYxOa%3d%3d?foo=bar+15%26") =~
escape(~s|%{"foo" => "bar 15&", "id" => "Wm9uZTozNzYxOa=="}|)
end
end
describe "push_patch" do
test "from event callback ack", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_click(counter_live, :push_patch, "/counter/123?from=event_ack") =~
escape(~s|%{"from" => "event_ack", "id" => "123"}|)
assert_redirect(counter_live, "/counter/123?from=event_ack")
end
test "from handle_info", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
send(counter_live.pid, {:push_patch, "/counter/123?from=handle_info"})
assert render(counter_live) =~ escape(~s|%{"from" => "handle_info", "id" => "123"}|)
end
test "from handle_cast", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
:ok = GenServer.cast(counter_live.pid, {:push_patch, "/counter/123?from=handle_cast"})
assert render(counter_live) =~ escape(~s|%{"from" => "handle_cast", "id" => "123"}|)
end
test "from handle_call", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
{:reply, :ok, LiveView.push_patch(socket, to: "/counter/123?from=handle_call")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
assert render(counter_live) =~ escape(~s|%{"from" => "handle_call", "id" => "123"}|)
end
test "from handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
send(self(), {:set, :val, 1000})
new_socket =
LiveView.assign(socket, :on_handle_params, fn socket ->
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
end)
{:reply, :ok, LiveView.push_patch(new_socket, to: "/counter/123?from=handle_params")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
html = render(counter_live)
assert html =~ escape(~s|%{"from" => "rehandled_params", "id" => "123"}|)
assert html =~ "The value is: 1000"
assert_receive {:handle_params, "http://localhost:4000/counter/123?from=rehandled_params",
%{val: 1}, %{"from" => "rehandled_params", "id" => "123"}}
end
end
describe "push_redirect" do
test "from event callback", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert {:error, {:live_redirect, %{to: "/thermo/123"}}} =
render_click(counter_live, :push_redirect, "/thermo/123")
assert_redirect(counter_live, "/thermo/123")
assert_remove(counter_live, {:redirect, "/thermo/123"})
end
test "from handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
new_socket =
LiveView.assign(socket, :on_handle_params, fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/123")}
end)
{:reply, :ok, LiveView.push_patch(new_socket, to: "/counter/123?from=handle_params")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
assert_receive {:handle_params, "http://localhost:4000/counter/123?from=handle_params",
%{val: 1}, %{"from" => "handle_params", "id" => "123"}}
assert_remove(counter_live, {:redirect, "/thermo/123"})
end
test "shuts down with push_redirect", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/123")}
end
assert {{:shutdown, {:redirect, "/thermo/123"}}, _} =
catch_exit(GenServer.call(counter_live.pid, {:push_redirect, next}))
end
end
describe "connect_params" do
test "connect_params can be read on mount", %{conn: conn} do
{:ok, counter_live, _html} =
live(conn, "/counter/123", connect_params: %{"connect1" => "1"})
assert render(counter_live) =~ escape(~s|connect: %{"connect1" => "1"}|)
end
end
describe "@live_view_action" do
test "when initially set to nil", %{conn: conn} do
{:ok, live, html} = live(conn, "/action")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: nil"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/index")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: :index"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/1/edit")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: :edit"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{"id" => "1"}"
end
test "when initially set to action", %{conn: conn} do
{:ok, live, html} = live(conn, "/action/index")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: :index"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{}"
html = render_patch(live, "/action")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: nil"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/1/edit")
assert html =~ "LiveView module: Phoenix.LiveViewTest.ActionLive"
assert html =~ "LiveView action: :edit"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{"id" => "1"}"
end
end
defp escape(str) do
str
|> Phoenix.HTML.html_escape()
|> Phoenix.HTML.safe_to_string()
end
end
| 34.852174 | 96 | 0.582834 |
f74c56b97bed7857d11d2b6873d7b67993b5af83 | 2,010 | exs | Elixir | phx-board-app/mix.exs | dannyh79/k8s-challenge-2021 | b48f8dc2d7a55b1f8fa91a13c54867197ef8e270 | [
"MIT"
] | null | null | null | phx-board-app/mix.exs | dannyh79/k8s-challenge-2021 | b48f8dc2d7a55b1f8fa91a13c54867197ef8e270 | [
"MIT"
] | null | null | null | phx-board-app/mix.exs | dannyh79/k8s-challenge-2021 | b48f8dc2d7a55b1f8fa91a13c54867197ef8e270 | [
"MIT"
] | null | null | null | defmodule Board.MixProject do
use Mix.Project
def project do
[
app: :board,
version: "0.1.0",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Board.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.5"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.6"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.17.5"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.6"},
{:esbuild, "~> 0.3", runtime: Mix.env() == :dev},
{:swoosh, "~> 1.3"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
"assets.deploy": ["esbuild default --minify", "phx.digest"]
]
end
end
| 28.309859 | 84 | 0.564179 |
f74c641850ff97f5ad1995572f05791370e8c3aa | 3,110 | ex | Elixir | lib/mix/lib/mix/cli.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/cli.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/cli.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.CLI do
@moduledoc false
@doc """
Runs Mix according to the command line arguments.
"""
def main(args \\ System.argv) do
Mix.Local.append_archives
Mix.Local.append_paths
if env_variable_activated?("MIX_QUIET"), do: Mix.shell(Mix.Shell.Quiet)
if env_variable_activated?("MIX_DEBUG"), do: Mix.debug(true)
case check_for_shortcuts(args) do
:help ->
proceed(["help"])
:version ->
display_version()
nil ->
proceed(args)
end
end
defp proceed(args) do
load_dot_config()
load_mixfile()
{task, args} = get_task(args)
ensure_hex(task)
change_env(task)
run_task(task, args)
end
defp load_mixfile() do
file = System.get_env("MIX_EXS") || "mix.exs"
_ = if File.regular?(file) do
Code.load_file(file)
end
end
defp get_task(["-" <> _|_]) do
Mix.shell.error "** (Mix) Cannot implicitly pass flags to default Mix task, " <>
"please invoke instead \"mix #{Mix.Project.config[:default_task]}\""
exit({:shutdown, 1})
end
defp get_task([h|t]) do
{h, t}
end
defp get_task([]) do
{Mix.Project.config[:default_task], []}
end
defp run_task(name, args) do
try do
ensure_no_slashes(name)
Mix.Task.run "loadconfig"
Mix.Task.run name, args
rescue
# We only rescue exceptions in the Mix namespace, all
# others pass through and will explode on the users face
exception ->
stacktrace = System.stacktrace
if Map.get(exception, :mix) do
mod = exception.__struct__ |> Module.split() |> Enum.at(0, "Mix")
Mix.shell.error "** (#{mod}) #{Exception.message(exception)}"
exit({:shutdown, 1})
else
reraise exception, stacktrace
end
end
end
defp env_variable_activated?(name) do
System.get_env(name) in ~w(1 true)
end
defp ensure_hex("local.hex"),
do: :ok
defp ensure_hex(_task),
do: Mix.Hex.ensure_updated?()
defp ensure_no_slashes(task) do
if String.contains?(task, "/") do
Mix.raise Mix.NoTaskError, task: task
end
end
defp change_env(task) do
if is_nil(System.get_env("MIX_ENV")) &&
(env = preferred_cli_env(task)) do
Mix.env(env)
if project = Mix.Project.pop do
%{name: name, file: file} = project
Mix.Project.push name, file
end
end
end
defp preferred_cli_env(task) do
task = String.to_atom(task)
Mix.Project.config[:preferred_cli_env][task] || Mix.Task.preferred_cli_env(task)
end
defp load_dot_config do
path = Path.join(Mix.Utils.mix_home, "config.exs")
if File.regular?(path) do
Mix.Task.run "loadconfig", [path]
end
end
defp display_version() do
IO.puts "Mix #{System.version}"
end
# Check for --help or --version in the args
defp check_for_shortcuts([first_arg|_]) when first_arg in
["--help", "-h", "-help"], do: :help
defp check_for_shortcuts([first_arg|_]) when first_arg in
["--version", "-v"], do: :version
defp check_for_shortcuts(_), do: nil
end
| 24.68254 | 88 | 0.624116 |
f74c6647c107bd1632e4bec79b51e35c2b04be40 | 182 | ex | Elixir | lib/wallaby/driver/external_command.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | 1 | 2019-01-30T12:08:17.000Z | 2019-01-30T12:08:17.000Z | lib/wallaby/driver/external_command.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | 2 | 2019-04-10T08:23:27.000Z | 2021-01-25T16:37:23.000Z | lib/wallaby/driver/external_command.ex | carl-al/wallaby | a4c9811902370b15db17fc62d451035ee7464eea | [
"MIT"
] | 1 | 2019-01-29T16:03:55.000Z | 2019-01-29T16:03:55.000Z | defmodule Wallaby.Driver.ExternalCommand do
@moduledoc false
@type t :: %__MODULE__{
executable: String.t,
args: [String.t]
}
defstruct [:executable, args: []]
end
| 16.545455 | 43 | 0.67033 |
f74c7c0f5eea8d1804aa9d88fb8afbc82119ef39 | 2,176 | ex | Elixir | lib/console/labels/label_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 83 | 2018-05-31T14:49:10.000Z | 2022-03-27T16:49:49.000Z | lib/console/labels/label_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 267 | 2018-05-22T23:19:02.000Z | 2022-03-31T04:31:06.000Z | lib/console/labels/label_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 18 | 2018-11-20T05:15:54.000Z | 2022-03-28T08:20:13.000Z | defmodule Console.Labels.LabelResolver do
alias Console.Repo
alias Console.Labels.Label
alias Console.Labels.DevicesLabels
import Ecto.Query
alias Console.Alerts
def paginate_by_device(%{page: page, page_size: page_size, device_id: device_id, column: column, order: order}, %{context: %{current_organization: current_organization}}) do
order_by = {String.to_existing_atom(order), String.to_existing_atom(column)}
query = from l in Label,
join: dl in DevicesLabels,
on: dl.label_id == l.id,
where: l.organization_id == ^current_organization.id and dl.device_id == ^device_id,
order_by: ^order_by
labels = query |> Repo.paginate(page: page, page_size: page_size)
{:ok, labels}
end
def find(%{id: id}, %{context: %{current_organization: current_organization}}) do
label = Ecto.assoc(current_organization, :labels) |> preload([:devices]) |> Repo.get!(id)
devices = label.devices
|> Enum.map(fn d ->
Map.drop(d, [:app_key])
end)
{:ok, label |> Map.put(:devices, devices)}
end
def all(_, %{context: %{current_organization: current_organization}}) do
device_count_query = from l in Label,
left_join: d in assoc(l, :devices),
where: l.organization_id == ^current_organization.id,
group_by: l.id,
select: %{label_id: l.id, device_count: count(d.id)}
device_counts = Repo.all(device_count_query)
labels = Label
|> where([l], l.organization_id == ^current_organization.id)
|> preload([:devices])
|> Repo.all()
|> Enum.map(
fn label ->
%{device_count: device_count} = device_counts
|> Enum.find(
fn rec ->
rec.label_id == label.id
end)
label |> Map.put(:device_count, device_count) |> Map.put(:alerts, Alerts.get_alerts_by_node(label.id, "label"))
end)
{:ok, labels}
end
def get_names(%{label_ids: label_ids}, %{context: %{current_organization: current_organization}}) do
query = from l in Label,
where: l.organization_id == ^current_organization.id and l.id in ^label_ids
{:ok, query |> Repo.all()}
end
end
| 32.477612 | 175 | 0.647059 |
f74c94f66d8b6d69d5557b2cbeeeb5cf5be993d6 | 3,730 | exs | Elixir | test/grpc/integration/service_test.exs | clearjs/grpc-elixir | 3dfa4648711ec0a59bb3bba2766de9ce770a0789 | [
"Apache-2.0"
] | 2 | 2019-06-18T18:31:24.000Z | 2020-12-31T07:24:12.000Z | test/grpc/integration/service_test.exs | clearjs/grpc-elixir | 3dfa4648711ec0a59bb3bba2766de9ce770a0789 | [
"Apache-2.0"
] | 4 | 2020-02-10T23:40:06.000Z | 2021-03-23T18:33:11.000Z | test/grpc/integration/service_test.exs | clearjs/grpc-elixir | 3dfa4648711ec0a59bb3bba2766de9ce770a0789 | [
"Apache-2.0"
] | 7 | 2019-08-14T21:46:56.000Z | 2021-03-23T18:09:21.000Z | defmodule GRPC.Integration.ServiceTest do
use GRPC.Integration.TestCase
defmodule FeatureServer do
use GRPC.Server, service: Routeguide.RouteGuide.Service
alias GRPC.Server
def get_feature(point, _stream) do
simple_feature(point)
end
def list_features(rectangle, stream) do
Enum.each([rectangle.lo, rectangle.hi], fn point ->
feature = simple_feature(point)
Server.send_reply(stream, feature)
end)
end
def record_route(req_enum, _stream) do
points =
Enum.reduce(req_enum, [], fn point, acc ->
[point | acc]
end)
fake_num = length(points)
Routeguide.RouteSummary.new(
point_count: fake_num,
feature_count: fake_num,
distance: fake_num,
elapsed_time: fake_num
)
end
def route_chat(req_enum, stream) do
Enum.each(req_enum, fn note ->
note = %{note | message: "Reply: #{note.message}"}
Server.send_reply(stream, note)
end)
end
defp simple_feature(point) do
Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}")
end
end
test "Unary RPC works" do
run_server(FeatureServer, fn port ->
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}")
point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906)
{:ok, feature} = channel |> Routeguide.RouteGuide.Stub.get_feature(point)
assert feature == Routeguide.Feature.new(location: point, name: "409146138,-746188906")
end)
end
test "Server streaming RPC works" do
run_server(FeatureServer, fn port ->
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}")
low = Routeguide.Point.new(latitude: 400_000_000, longitude: -750_000_000)
high = Routeguide.Point.new(latitude: 420_000_000, longitude: -730_000_000)
rect = Routeguide.Rectangle.new(lo: low, hi: high)
{:ok, stream} = channel |> Routeguide.RouteGuide.Stub.list_features(rect)
assert Enum.to_list(stream) == [
{:ok, Routeguide.Feature.new(location: low, name: "400000000,-750000000")},
{:ok, Routeguide.Feature.new(location: high, name: "420000000,-730000000")}
]
end)
end
test "Client streaming RPC works" do
run_server(FeatureServer, fn port ->
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}")
point1 = Routeguide.Point.new(latitude: 400_000_000, longitude: -750_000_000)
point2 = Routeguide.Point.new(latitude: 420_000_000, longitude: -730_000_000)
stream = channel |> Routeguide.RouteGuide.Stub.record_route()
GRPC.Stub.send_request(stream, point1)
GRPC.Stub.send_request(stream, point2, end_stream: true)
{:ok, res} = GRPC.Stub.recv(stream)
assert %Routeguide.RouteSummary{point_count: 2} = res
end)
end
test "Bidirectional streaming RPC works" do
run_server(FeatureServer, fn port ->
{:ok, channel} = GRPC.Stub.connect("localhost:#{port}")
stream = channel |> Routeguide.RouteGuide.Stub.route_chat()
task =
Task.async(fn ->
Enum.each(1..6, fn i ->
point = Routeguide.Point.new(latitude: 0, longitude: rem(i, 3) + 1)
note = Routeguide.RouteNote.new(location: point, message: "Message #{i}")
opts = if i == 6, do: [end_stream: true], else: []
GRPC.Stub.send_request(stream, note, opts)
end)
end)
{:ok, result_enum} = GRPC.Stub.recv(stream)
Task.await(task)
notes =
Enum.map(result_enum, fn {:ok, note} ->
assert "Reply: " <> _msg = note.message
note
end)
assert length(notes) == 6
end)
end
end
| 33.303571 | 93 | 0.634584 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.