hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7d18aa00437c0fb032736a1b1c62998e6fc2c35 | 309 | ex | Elixir | test/support/case.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | 8 | 2020-09-04T19:38:41.000Z | 2021-06-05T18:59:49.000Z | test/support/case.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | null | null | null | test/support/case.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | null | null | null | defmodule FlowAssertions.Case do
defmacro __using__(_) do
quote do
use ExUnit.Case, async: true
alias ExUnit.AssertionError
use FlowAssertions
import FlowAssertions.AssertionA
alias FlowAssertions.Messages
import FlowAssertions.Define.Tabular
end
end
end
| 20.6 | 42 | 0.708738 |
f7d18fced42f90595bd2f3d86140da53d31ad0bc | 2,339 | ex | Elixir | lib/codes/codes_z51.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_z51.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_z51.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_Z51 do
alias IcdCode.ICDCode
def _Z510 do
%ICDCode{full_code: "Z510",
category_code: "Z51",
short_code: "0",
full_name: "Encounter for antineoplastic radiation therapy",
short_name: "Encounter for antineoplastic radiation therapy",
category_name: "Encounter for antineoplastic radiation therapy"
}
end
def _Z5111 do
%ICDCode{full_code: "Z5111",
category_code: "Z51",
short_code: "11",
full_name: "Encounter for antineoplastic chemotherapy",
short_name: "Encounter for antineoplastic chemotherapy",
category_name: "Encounter for antineoplastic chemotherapy"
}
end
def _Z5112 do
%ICDCode{full_code: "Z5112",
category_code: "Z51",
short_code: "12",
full_name: "Encounter for antineoplastic immunotherapy",
short_name: "Encounter for antineoplastic immunotherapy",
category_name: "Encounter for antineoplastic immunotherapy"
}
end
def _Z515 do
%ICDCode{full_code: "Z515",
category_code: "Z51",
short_code: "5",
full_name: "Encounter for palliative care",
short_name: "Encounter for palliative care",
category_name: "Encounter for palliative care"
}
end
def _Z516 do
%ICDCode{full_code: "Z516",
category_code: "Z51",
short_code: "6",
full_name: "Encounter for desensitization to allergens",
short_name: "Encounter for desensitization to allergens",
category_name: "Encounter for desensitization to allergens"
}
end
def _Z5181 do
%ICDCode{full_code: "Z5181",
category_code: "Z51",
short_code: "81",
full_name: "Encounter for therapeutic drug level monitoring",
short_name: "Encounter for therapeutic drug level monitoring",
category_name: "Encounter for therapeutic drug level monitoring"
}
end
def _Z5189 do
%ICDCode{full_code: "Z5189",
category_code: "Z51",
short_code: "89",
full_name: "Encounter for other specified aftercare",
short_name: "Encounter for other specified aftercare",
category_name: "Encounter for other specified aftercare"
}
end
end
| 33.414286 | 74 | 0.637879 |
f7d1a4ab7626be46ea865439415be3e298e2b313 | 4,060 | exs | Elixir | .workshop/exercises/my_enum/test/check.exs | silesian-beamers/elixir-from-the-ground-up | 1ad8c2a4d429175461dc45e218849eb6a212c776 | [
"MIT"
] | 10 | 2015-12-13T07:29:08.000Z | 2016-09-22T03:47:35.000Z | .workshop/exercises/my_enum/test/check.exs | silesian-beamers/elixir-from-the-ground-up | 1ad8c2a4d429175461dc45e218849eb6a212c776 | [
"MIT"
] | 4 | 2015-12-02T12:12:14.000Z | 2016-01-11T07:33:24.000Z | .workshop/exercises/my_enum/test/check.exs | silesian-beamers/elixir-from-the-ground-up | 1ad8c2a4d429175461dc45e218849eb6a212c776 | [
"MIT"
] | null | null | null | defmodule Tracer do
use GenServer
def start(), do: GenServer.start(__MODULE__, [])
def trace(who, modules, functions), do: GenServer.call(who, {:trace, modules, functions})
def clear(who), do: GenServer.call(who, :clear)
def calls(who), do: GenServer.call(who, :get_calls)
def init([]) do
:erlang.trace(:all, true, [:call])
{:ok, %{ :modules => [], :functions => [], :calls => [] }}
end
def handle_call(:get_calls, _from, state) do
{:reply, state[:calls], state}
end
def handle_call({:trace, modules, functions}, _from, state) do
for module <- modules do
for function <- functions do
:erlang.trace_pattern({module, function, :_}, [{:_, [], [{:return_trace}]}])
end
end
{:reply, :traced, %{ state | :functions => functions, :modules => modules }}
end
def handle_call(:clear, _from, state) do
for module <- state[:modules] do
for function <- state[:functions] do
:erlang.trace_pattern({module, function, :_}, false)
end
end
{:reply, :cleared, %{ state | :functions => [], :modules => [], :calls => [] }}
end
def handle_info({:trace, _, :call, {_mod, call, _args}}, state) do
{:noreply, %{ state | :calls => state[:calls] ++ [ call ] }}
end
def handle_info(_, state) do
{:noreply, state}
end
end
defmodule Workshop.Exercise.MyEnumCheck do
use Workshop.Validator
defmacrop check_the_same(expected, given) do
quote do
if unquote(expected) == unquote(given) do
:ok
else
{:error,
"Expected `#{unquote(Macro.to_string(given))}`to have the same result " <>
"as `#{unquote(Macro.to_string(expected))}` - " <>
"#{inspect unquote(expected)}, instead got: #{inspect unquote(given)}."}
end
end
end
defmacrop should_not_use(module, function, call) do
quote do
{:ok, pid} = Tracer.start()
Tracer.trace(pid, [ unquote(module) ], [ unquote(function) ])
unquote(call)
:timer.sleep(100)
calls = Tracer.calls(pid)
result = if length(calls) > 0 do
{:error,
"Call to `#{unquote(Macro.to_string(module))}.#{unquote(Macro.to_string(function))}` detected. " <>
"You cannot use those function in your `#{unquote(Macro.to_string(call))}` implementation."}
else
:ok
end
Tracer.clear(pid)
Process.exit(pid, :normal)
result
end
end
verify "map with integers" do
check_the_same(Enum.map([1, 2, 3], &(&1+1)),
MyEnum.map([1, 2, 3], &(&1+1)))
end
verify "map with strings" do
check_the_same(Enum.map(~w(a b c), &String.upcase/1),
MyEnum.map(~w(a b c), &String.upcase/1))
end
verify "append" do
check_the_same([1, 2, 3] ++ [4, 5, 6],
MyEnum.append([1, 2, 3], [4, 5, 6]))
end
verify "length" do
check_the_same(length(:lists.seq(1, 10000)),
MyEnum.length(:lists.seq(1, 10000)))
end
verify "reverse" do
check_the_same(Enum.reverse(:lists.seq(1, 10000)),
MyEnum.reverse(:lists.seq(1, 10000)))
end
verify "do not use :lists.append/2 in MyEnum.append/2" do
should_not_use(:lists, :append, MyEnum.append([1], [2]))
end
verify "do not use :Kernel.++/2 in MyEnum.append/2" do
should_not_use(Kernel, :'++', MyEnum.append([1], [2]))
end
verify "do not use :erlang.++/2 in MyEnum.append/2" do
should_not_use(:erlang, :'++', MyEnum.append([1], [2]))
end
verify "do not use Kernel.length/1 function in MyEnum.length/1" do
should_not_use(Kernel, :length, MyEnum.length([1, 2, 3]))
end
verify "do not use :erlang.length/1 function in MyEnum.length/1" do
should_not_use(:erlang, :length, MyEnum.length([1, 2, 3]))
end
verify "do not use Enum.map/2 function in MyEnum.map/2" do
should_not_use(Enum, :map, MyEnum.map(["a"], &String.upcase/1))
end
verify "do not use Enum.reverse/2 function in MyEnum.reverse/2" do
should_not_use(Enum, :reverse, MyEnum.reverse([1, 2, 3]))
end
end
| 28.194444 | 108 | 0.601478 |
f7d1ba4aef389143673a8bf9a08186c28095677a | 716 | exs | Elixir | mix.exs | reergymerej/Hello-World-Elixir | dcdc1437821042225ebb2987dc015e356b0f87f5 | [
"MIT"
] | null | null | null | mix.exs | reergymerej/Hello-World-Elixir | dcdc1437821042225ebb2987dc015e356b0f87f5 | [
"MIT"
] | null | null | null | mix.exs | reergymerej/Hello-World-Elixir | dcdc1437821042225ebb2987dc015e356b0f87f5 | [
"MIT"
] | null | null | null | defmodule InchTest.Mixfile do
use Mix.Project
def project do
[app: :inch_test,
version: "0.0.1",
elixir: "~> 1.0-dev",
docs: [main: "README", readme: true],
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[{:ex_doc, "~> 0.6"},
{:earmark, ">= 0.0.0"},
{:inch_ex, only: :docs}]
end
end
| 21.058824 | 77 | 0.586592 |
f7d23f75730b00e2d87e80f018fec9a542acc27e | 9,349 | exs | Elixir | test/ex_pix_brcode/dynamic_pix_loader_test.exs | joaogularte/ex-pix-brcode | 2cd0d67db06cb5a250fa42cf5b4921ed84559b3e | [
"Apache-2.0"
] | null | null | null | test/ex_pix_brcode/dynamic_pix_loader_test.exs | joaogularte/ex-pix-brcode | 2cd0d67db06cb5a250fa42cf5b4921ed84559b3e | [
"Apache-2.0"
] | null | null | null | test/ex_pix_brcode/dynamic_pix_loader_test.exs | joaogularte/ex-pix-brcode | 2cd0d67db06cb5a250fa42cf5b4921ed84559b3e | [
"Apache-2.0"
] | null | null | null | defmodule ExPixBRCode.Payments.DynamicPixLoaderTest do
use ExUnit.Case, async: true
alias ExPixBRCode.Payments.DynamicPixLoader
alias ExPixBRCode.Payments.Models.DynamicImmediatePixPayment
alias ExPixBRCode.Payments.Models.DynamicImmediatePixPayment.{Calendario, Valor}
@client Tesla.client([], Tesla.Mock)
setup_all do
ca_key = X509.PrivateKey.new_rsa(1024)
ca =
X509.Certificate.self_signed(
ca_key,
"/C=BR/ST=SP/L=Sao Paulo/O=Acme/CN=RSA Pix Root CA",
template: :root_ca
)
my_key = X509.PrivateKey.new_rsa(1024)
my_cert =
my_key
|> X509.PublicKey.derive()
|> X509.Certificate.new(
"/C=BR/ST=RJ/L=Rio de Janeiro/O=PSP Bank/CN=PSP",
ca,
ca_key,
extensions: [
subject_alt_name:
X509.Certificate.Extension.subject_alt_name(["somepixpsp.br", "www.somepixpsp.br"])
]
)
raw_cert = X509.Certificate.to_der(my_cert)
x5c = [
Base.encode64(raw_cert),
ca |> X509.Certificate.to_der() |> Base.encode64()
]
{_, pubkey_map} =
my_cert
|> X509.Certificate.public_key()
|> JOSE.JWK.from_key()
|> JOSE.JWK.to_map()
thumbprint =
:sha
|> :crypto.hash(raw_cert)
|> Base.url_encode64(padding: false)
thumbprintS256 =
:sha256
|> :crypto.hash(raw_cert)
|> Base.url_encode64(padding: false)
kid = Ecto.UUID.generate()
pem = my_key |> JOSE.JWK.from_key() |> JOSE.JWK.to_pem()
jku = "https://somepixpsp.br/pix/v2/certs"
signer =
Joken.Signer.create("RS256", %{"pem" => pem}, %{
"x5t" => thumbprint,
"kid" => kid,
"jku" => jku
})
signerS256 =
Joken.Signer.create("RS256", %{"pem" => pem}, %{
"x5t#S256" => thumbprintS256,
"kid" => kid,
"jku" => jku
})
jwks = %{
"keys" => [
Map.merge(
pubkey_map,
%{
"kid" => kid,
"x5c" => x5c,
"x5t" => thumbprint,
"x5t#S256" => thumbprintS256,
"kty" => "RSA",
"key_ops" => ["verify"]
}
)
]
}
{:ok, jku: jku, signer: signer, signerS256: signerS256, jwks: jwks}
end
describe "load_pix/2" do
for key_type <- [
:cpf,
:cnpj,
:phone,
:email,
:random_key
] do
test "succeeds for payment with #{key_type} key", %{jku: jku} = ctx do
payment = build_pix_payment() |> with_key(unquote(key_type))
pix_url = "https://somepixpsp.br/pix/v2/#{Ecto.UUID.generate()}"
Tesla.Mock.mock(fn
%{url: ^pix_url} ->
%{}
|> Joken.generate_and_sign!(payment, ctx.signer)
|> Tesla.Mock.text(headers: [{"content-type", "application/jose"}])
%{url: ^jku} ->
Tesla.Mock.json(ctx.jwks)
end)
assert {:ok,
%DynamicImmediatePixPayment{
calendario: %Calendario{
apresentacao: ~U[2020-11-28 03:15:39Z],
criacao: ~U[2020-11-13 23:59:49Z],
expiracao: 86400
},
chave: payment.chave,
devedor: nil,
infoAdicionais: [],
revisao: 0,
solicitacaoPagador: nil,
status: :ATIVA,
txid: "4DE46328260C11EB91C04049FC2CA371",
valor: %Valor{original: Decimal.new("1.00")}
}} == DynamicPixLoader.load_pix(@client, pix_url)
x5t = ctx.jwks["keys"] |> hd() |> Map.get("x5t")
kid = ctx.jwks["keys"] |> hd() |> Map.get("kid")
key = {x5t, kid}
assert %{^key => _} = :persistent_term.get(ctx.jku)
end
end
test "can skip certifica validations", %{jku: jku} = ctx do
payment = build_pix_payment()
pix_url = "https://somepixpsp.br/pix/v2/#{Ecto.UUID.generate()}"
Tesla.Mock.mock(fn
%{url: ^pix_url} ->
%{}
|> Joken.generate_and_sign!(payment, ctx.signer)
|> Tesla.Mock.text(headers: [{"content-type", "application/jose"}])
%{url: ^jku} ->
key = ctx.jwks["keys"] |> hd()
key = %{key | "x5c" => Enum.reverse(key["x5c"])}
Tesla.Mock.json(%{keys: [key]})
end)
assert {:ok,
%DynamicImmediatePixPayment{
calendario: %Calendario{
apresentacao: ~U[2020-11-28 03:15:39Z],
criacao: ~U[2020-11-13 23:59:49Z],
expiracao: 86400
},
chave: "14413050762",
devedor: nil,
infoAdicionais: [],
revisao: 0,
solicitacaoPagador: nil,
status: :ATIVA,
txid: "4DE46328260C11EB91C04049FC2CA371",
valor: %Valor{original: Decimal.new("1.00")}
}} ==
DynamicPixLoader.load_pix(@client, pix_url,
leaf_certificate_should_fail: false,
x5c_should_fail: false
)
x5t = ctx.jwks["keys"] |> hd() |> Map.get("x5t")
kid = ctx.jwks["keys"] |> hd() |> Map.get("kid")
key = {x5t, kid}
assert %{^key => _} = :persistent_term.get(ctx.jku)
end
end
describe "load_pix/2 - x5t#S256" do
for key_type <- [
:cpf,
:cnpj,
:phone,
:email,
:random_key
] do
test "succeeds for payment with #{key_type} key", %{jku: jku} = ctx do
payment = build_pix_payment() |> with_key(unquote(key_type))
pix_url = "https://somepixpsp.br/pix/v2/#{Ecto.UUID.generate()}"
Tesla.Mock.mock(fn
%{url: ^pix_url} ->
%{}
|> Joken.generate_and_sign!(payment, ctx.signerS256)
|> Tesla.Mock.text(headers: [{"content-type", "application/jose"}])
%{url: ^jku} ->
Tesla.Mock.json(ctx.jwks)
end)
assert {:ok,
%DynamicImmediatePixPayment{
calendario: %Calendario{
apresentacao: ~U[2020-11-28 03:15:39Z],
criacao: ~U[2020-11-13 23:59:49Z],
expiracao: 86400
},
chave: payment.chave,
devedor: nil,
infoAdicionais: [],
revisao: 0,
solicitacaoPagador: nil,
status: :ATIVA,
txid: "4DE46328260C11EB91C04049FC2CA371",
valor: %Valor{original: Decimal.new("1.00")}
}} == DynamicPixLoader.load_pix(@client, pix_url)
x5t = ctx.jwks["keys"] |> hd() |> Map.get("x5t")
kid = ctx.jwks["keys"] |> hd() |> Map.get("kid")
key = {x5t, kid}
assert %{^key => _} = :persistent_term.get(ctx.jku)
end
end
test "can skip certifica validations", %{jku: jku} = ctx do
payment = build_pix_payment()
pix_url = "https://somepixpsp.br/pix/v2/#{Ecto.UUID.generate()}"
Tesla.Mock.mock(fn
%{url: ^pix_url} ->
%{}
|> Joken.generate_and_sign!(payment, ctx.signerS256)
|> Tesla.Mock.text(headers: [{"content-type", "application/jose"}])
%{url: ^jku} ->
key = ctx.jwks["keys"] |> hd()
key = %{key | "x5c" => Enum.reverse(key["x5c"])}
Tesla.Mock.json(%{keys: [key]})
end)
assert {:ok,
%DynamicImmediatePixPayment{
calendario: %Calendario{
apresentacao: ~U[2020-11-28 03:15:39Z],
criacao: ~U[2020-11-13 23:59:49Z],
expiracao: 86400
},
chave: "14413050762",
devedor: nil,
infoAdicionais: [],
revisao: 0,
solicitacaoPagador: nil,
status: :ATIVA,
txid: "4DE46328260C11EB91C04049FC2CA371",
valor: %Valor{original: Decimal.new("1.00")}
}} ==
DynamicPixLoader.load_pix(@client, pix_url,
leaf_certificate_should_fail: false,
x5c_should_fail: false
)
x5t = ctx.jwks["keys"] |> hd() |> Map.get("x5t")
kid = ctx.jwks["keys"] |> hd() |> Map.get("kid")
key = {x5t, kid}
assert %{^key => _} = :persistent_term.get(ctx.jku)
end
end
defp build_pix_payment do
%{
calendario: %{
apresentacao: "2020-11-28 03:15:39Z",
criacao: "2020-11-13 23:59:49Z",
expiracao: 86400
},
chave: "14413050762",
devedor: nil,
infoAdicionais: [],
revisao: 0,
solicitacaoPagador: nil,
status: :ATIVA,
txid: "4DE46328260C11EB91C04049FC2CA371",
valor: %{original: "1.00"}
}
end
defp with_key(payment, :cpf), do: %{payment | chave: Brcpfcnpj.cpf_generate()}
defp with_key(payment, :cnpj), do: %{payment | chave: Brcpfcnpj.cnpj_generate()}
defp with_key(payment, :phone), do: %{payment | chave: "+5521987676565"}
defp with_key(payment, :email), do: %{payment | chave: "[email protected]"}
defp with_key(payment, :random_key), do: %{payment | chave: Ecto.UUID.generate()}
end
| 30.652459 | 95 | 0.508183 |
f7d24e2710b6be86b403a2ead0a43039ef725a81 | 66 | exs | Elixir | test/earmark_test.exs | raygesualdo/earmark | 8c63366140d81205530cf68960f3d91ae85e63b7 | [
"Apache-1.1"
] | null | null | null | test/earmark_test.exs | raygesualdo/earmark | 8c63366140d81205530cf68960f3d91ae85e63b7 | [
"Apache-1.1"
] | null | null | null | test/earmark_test.exs | raygesualdo/earmark | 8c63366140d81205530cf68960f3d91ae85e63b7 | [
"Apache-1.1"
] | 1 | 2020-03-31T19:53:15.000Z | 2020-03-31T19:53:15.000Z | defmodule EarmarkTest do
use ExUnit.Case
doctest Earmark
end
| 11 | 24 | 0.787879 |
f7d25575b7f14c321c818f22ab2cb3acee668fe9 | 1,796 | exs | Elixir | mix.exs | douglastofoli/inmana | 0c2bd661f5a3c83557dc3fd1d094a195dae3a839 | [
"MIT"
] | null | null | null | mix.exs | douglastofoli/inmana | 0c2bd661f5a3c83557dc3fd1d094a195dae3a839 | [
"MIT"
] | null | null | null | mix.exs | douglastofoli/inmana | 0c2bd661f5a3c83557dc3fd1d094a195dae3a839 | [
"MIT"
] | null | null | null | defmodule Inmana.MixProject do
use Mix.Project
def project do
[
app: :inmana,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Inmana.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dotenv, "~> 3.0.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 27.212121 | 84 | 0.572383 |
f7d269afacbe26308f6a2ec8ef66571d68545df1 | 1,060 | ex | Elixir | lib/timber/logfmt_encoder.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | lib/timber/logfmt_encoder.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | lib/timber/logfmt_encoder.ex | girishramnani/timber-elixir | 7fda5c3cb5e765a34524d2ec21cfbd5b30240bd5 | [
"ISC"
] | null | null | null | defmodule Timber.LogfmtEncoder do
@moduledoc false
# Internal module for encoding maps to the logfmt standard
@spec encode!(map) :: IO.chardata
def encode!(value) when is_map(value) do
Enum.reduce(value, [], &encode_pair/2)
end
defp encode_pair({k1, value_map}, acc) when is_map(value_map) do
Enum.reduce(value_map, acc, fn ({k2, v}, a) ->
encode_pair({[to_string(k1), ?., to_string(k2)], v}, a)
end)
end
defp encode_pair({key, values}, acc) when is_list(values) do
Enum.reduce(values, acc, fn (value, a)->
encode_pair({key, value}, a)
end)
end
defp encode_pair({key, value}, acc) do
add_key_value(key, value, acc)
end
defp add_key_value(key, value, []) do
[to_bin(key), ?=, to_bin(value)]
end
defp add_key_value(key, value, acc) do
[to_bin(key), ?=, to_bin(value), ?\s | acc]
end
defp to_bin(value) when is_binary(value) do
if String.contains?(value, " ") do
[?", value, ?"]
else
value
end
end
defp to_bin(value) do
to_string(value)
end
end
| 23.043478 | 66 | 0.633019 |
f7d27c5db200a510f7347a44f63853a8b0271d1c | 49 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/CaptureNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/CaptureNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_relational_operation_parsing_test_case/CaptureNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | &one < &two
&one > &two
&one <= &two
&one >= &two | 12.25 | 12 | 0.489796 |
f7d2939ed62e9dcb2122fb6333fda1b8810871fd | 1,003 | ex | Elixir | examples/basic/lib/basic.ex | gliush/ex_ray | bfbf83d99626cfe4551efccb0b38291797f1e6ba | [
"Apache-2.0"
] | 60 | 2017-10-27T22:24:47.000Z | 2021-07-15T23:58:46.000Z | examples/basic/lib/basic.ex | gliush/ex_ray | bfbf83d99626cfe4551efccb0b38291797f1e6ba | [
"Apache-2.0"
] | 2 | 2018-05-29T01:29:20.000Z | 2020-04-14T09:05:56.000Z | examples/basic/lib/basic.ex | gliush/ex_ray | bfbf83d99626cfe4551efccb0b38291797f1e6ba | [
"Apache-2.0"
] | 9 | 2017-10-27T18:52:45.000Z | 2020-11-13T19:47:27.000Z | defmodule Basic do
use ExRay, pre: :before_fun, post: :after_fun
require Logger
alias ExRay.Span
# Generates a request id
@req_id :os.system_time(:milli_seconds) |> Integer.to_string
@trace kind: :critical
@spec fred(integer, integer) :: integer
def fred(a, b), do: a+b
# Called before the annotated function fred is called. Allows to start
# a span and decorate it with tags and log information
defp before_fun(ctx) do
Logger.debug(">>> Starting span for `#{ctx.target}...")
ctx.target
|> Span.open(@req_id)
|> :otter.tag(:kind, ctx.meta[:kind])
|> :otter.log(">>> #{ctx.target} with #{ctx.args |> inspect}")
end
# Called once the annotated function is called. In this hook you can
# add addtional span info and close the span as we are all done here.
defp after_fun(ctx, span, res) do
Logger.debug("<<< Closing span for `#{ctx.target}...")
span
|> :otter.log("<<< #{ctx.target} returned #{res}")
|> Span.close(@req_id)
end
end
| 29.5 | 72 | 0.658026 |
f7d2acf0a1cd433a3f977e20b1586e57f644d8fd | 1,806 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/model/list_build_triggers_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/model/list_build_triggers_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_build/lib/google_api/cloud_build/v1/model/list_build_triggers_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse do
@moduledoc """
Response containing existing `BuildTriggers`.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to receive the next page of results.
* `triggers` (*type:* `list(GoogleApi.CloudBuild.V1.Model.BuildTrigger.t)`, *default:* `nil`) - `BuildTriggers` for the project, sorted by `create_time` descending.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t(),
:triggers => list(GoogleApi.CloudBuild.V1.Model.BuildTrigger.t())
}
field(:nextPageToken)
field(:triggers, as: GoogleApi.CloudBuild.V1.Model.BuildTrigger, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse do
def decode(value, options) do
GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBuild.V1.Model.ListBuildTriggersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.12 | 168 | 0.741417 |
f7d2d3846488f5045c2683217959a3cc2b12fecd | 3,717 | exs | Elixir | .credo.exs | sineed/veritaserum | 40ce475baaf2f8f95a8e51007242f9e0d23caf22 | [
"Apache-2.0"
] | 81 | 2017-05-17T20:29:29.000Z | 2022-03-06T21:11:30.000Z | .credo.exs | chingan-tsc/veritaserum | 724d4c49e188f284c9acc90ea9743bfaac6a8b55 | [
"Apache-2.0"
] | 5 | 2017-09-14T13:05:05.000Z | 2022-01-05T11:40:57.000Z | .credo.exs | chingan-tsc/veritaserum | 724d4c49e188f284c9acc90ea9743bfaac6a8b55 | [
"Apache-2.0"
] | 9 | 2017-07-21T20:05:11.000Z | 2021-10-12T15:14:57.000Z | %{
configs: [
%{
name: "default",
files: %{
included: ["lib/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
check_for_updates: true,
strict: true,
color: true,
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
{Credo.Check.Design.AliasUsage, priority: :low},
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
# Deprecated checks (these will be deleted after a grace period)
{Credo.Check.Readability.Specs, false},
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
{Credo.Check.Warning.NameRedeclarationByCase, false},
{Credo.Check.Warning.NameRedeclarationByDef, false},
{Credo.Check.Warning.NameRedeclarationByFn, false},
# Custom checks can be created using `mix credo.gen.check`.
]
}
]
}
| 43.729412 | 80 | 0.679042 |
f7d2eef8aa5da831b7251d80680c88521185ab5e | 1,065 | exs | Elixir | test/radiator_web/graphql/admin/schema/query/podcasts_test.exs | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 92 | 2019-01-03T11:46:23.000Z | 2022-02-19T21:28:44.000Z | test/radiator_web/graphql/admin/schema/query/podcasts_test.exs | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 350 | 2019-04-11T07:55:51.000Z | 2021-08-03T11:19:05.000Z | test/radiator_web/graphql/admin/schema/query/podcasts_test.exs | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 10 | 2019-04-18T12:47:27.000Z | 2022-01-25T20:49:15.000Z | defmodule RadiatorWeb.GraphQL.Admin.Schema.Query.PodcastsTest do
use RadiatorWeb.ConnCase, async: true
import Radiator.Factory
@doc """
Generate user and add auth token to connection.
"""
def setup_user_and_conn(%{conn: conn}) do
user = Radiator.TestEntries.user()
[
conn: Radiator.TestEntries.put_current_user(conn, user),
user: user
]
end
setup :setup_user_and_conn
@single_query """
query ($id: ID!) {
podcast(id: $id) {
id
title
publishState
}
}
"""
test "podcast returns a podcast", %{conn: conn, user: user} do
podcast = insert(:podcast, title: "Lorem") |> publish() |> owned_by(user)
conn = get conn, "/api/graphql", query: @single_query, variables: %{"id" => podcast.id}
assert json_response(conn, 200) == %{
"data" => %{
"podcast" => %{
"id" => Integer.to_string(podcast.id),
"title" => "Lorem",
"publishState" => "published"
}
}
}
end
end
| 23.152174 | 91 | 0.558685 |
f7d2f0d24368bac59eee1390df1699a74c31e419 | 1,039 | exs | Elixir | config/config.exs | Aridjar/demo_fixtures | 3f427c3bd9aa6debab21b75122e0ba164f7fc217 | [
"MIT"
] | null | null | null | config/config.exs | Aridjar/demo_fixtures | 3f427c3bd9aa6debab21b75122e0ba164f7fc217 | [
"MIT"
] | 2 | 2021-03-09T19:18:16.000Z | 2021-05-10T16:10:11.000Z | config/config.exs | Aridjar/demo_fixtures | 3f427c3bd9aa6debab21b75122e0ba164f7fc217 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :demo_fixtures,
ecto_repos: [DemoFixtures.Repo]
# Configures the endpoint
config :demo_fixtures, DemoFixturesWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "5QeICDoJwXvEQuwcjPXKLOZIe8T/SmLHVoAff1VtaVWlK/kZ3rCy3SwE2j48ej3v",
render_errors: [view: DemoFixturesWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: DemoFixtures.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.516129 | 86 | 0.776708 |
f7d3020ba2e0cc38df2c543f2652571c3865b5af | 1,683 | ex | Elixir | lib/sobelow/config/csrf.ex | tmecklem/sobelow | 76b441da408b0156a05fa208a8426c63f3536fe5 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/config/csrf.ex | tmecklem/sobelow | 76b441da408b0156a05fa208a8426c63f3536fe5 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/config/csrf.ex | tmecklem/sobelow | 76b441da408b0156a05fa208a8426c63f3536fe5 | [
"Apache-2.0"
] | null | null | null | defmodule Sobelow.Config.CSRF do
@moduledoc """
# Cross-Site Request Forgery
In a Cross-Site Request Forgery (CSRF) attack, an untrusted
application can cause a user's browser to submit requests or perform
actions on the user's behalf.
Read more about CSRF here:
https://www.owasp.org/index.php/Cross-Site_Request_Forgery_(CSRF)
Cross-Site Request Forgery is flagged by `sobelow` when
a pipeline accepts "html" requests, but does not implement
the `:protect_from_forgery` plug.
CSRF checks can be ignored with the following command:
$ mix sobelow -i Config.CSRF
"""
alias Sobelow.Utils
use Sobelow.Finding
def run(router, _) do
Utils.get_pipelines(router)
|> Enum.each(&is_vuln_pipeline?/1)
end
defp is_vuln_pipeline?(pipeline) do
if Utils.is_vuln_pipeline?(pipeline, :csrf) do
add_finding(pipeline)
end
end
defp add_finding({:pipeline, [line: line_no], [pipeline_name, _]} = pipeline) do
type = "Missing CSRF Protections"
case Sobelow.format() do
"json" ->
finding = [
type: type,
pipeline: "#{pipeline_name}:#{line_no}"
]
Sobelow.log_finding(finding, :high)
"txt" ->
Sobelow.log_finding(type, :high)
IO.puts(IO.ANSI.red() <> type <> " - High Confidence" <> IO.ANSI.reset())
IO.puts("Pipeline: #{pipeline_name}:#{line_no}")
if Sobelow.get_env(:verbose), do: Utils.print_code(pipeline, pipeline_name)
IO.puts("\n-----------------------------------------------\n")
"compact" ->
Utils.log_compact_finding(type, :high)
_ ->
Sobelow.log_finding(type, :high)
end
end
end
| 27.590164 | 83 | 0.635769 |
f7d31dd026c8a3e1a1ac2a3e8a6eb9bc207f7bea | 465 | ex | Elixir | src/Control/Bind.ex | sriduth/purescript-prelude | 018250e052ec11dc6faae61ea41c1a11fdf64e0e | [
"MIT"
] | null | null | null | src/Control/Bind.ex | sriduth/purescript-prelude | 018250e052ec11dc6faae61ea41c1a11fdf64e0e | [
"MIT"
] | null | null | null | src/Control/Bind.ex | sriduth/purescript-prelude | 018250e052ec11dc6faae61ea41c1a11fdf64e0e | [
"MIT"
] | null | null | null | defmodule Bind.Foreign do
def arrayBind do
fn(arr) ->
fn(f) ->
len = length arr
result = []
Enum.each(0..len, fn(i) ->
result = [result | f.(arr[i])]
end)
result
end
end
end
end
# "use strict";
# exports.arrayBind = function (arr) {
# return function (f) {
# var result = [];
# for (var i = 0, l = arr.length; i < l; i++) {
# Array.prototype.push.apply(result, f(arr[i]));
# }
# return result;
# };
# };
| 16.607143 | 54 | 0.52043 |
f7d339cc0869f0eb55b5f2706161fc737cee7f89 | 176 | ex | Elixir | lib/requestbox_web/views/session_view.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 18 | 2015-11-18T09:52:34.000Z | 2021-04-27T19:38:08.000Z | lib/requestbox_web/views/session_view.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 3 | 2017-01-11T18:55:39.000Z | 2021-06-15T05:46:34.000Z | lib/requestbox_web/views/session_view.ex | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 7 | 2016-08-17T10:24:20.000Z | 2020-07-10T13:00:36.000Z | defmodule RequestboxWeb.SessionView do
use Requestbox.Web, :view
use RequestboxWeb.Helpers.HTTP
use RequestboxWeb.Helpers.Date
use RequestboxWeb.Helpers.Pagination
end
| 25.142857 | 38 | 0.823864 |
f7d341cff3306efa139301d0b9a110aafb85c7e7 | 1,759 | ex | Elixir | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/list_agent_pools_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/list_agent_pools_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/list_agent_pools_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.StorageTransfer.V1.Model.ListAgentPoolsResponse do
@moduledoc """
Response from ListAgentPools.
## Attributes
* `agentPools` (*type:* `list(GoogleApi.StorageTransfer.V1.Model.AgentPool.t)`, *default:* `nil`) - A list of agent pools.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The list next page token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:agentPools => list(GoogleApi.StorageTransfer.V1.Model.AgentPool.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:agentPools, as: GoogleApi.StorageTransfer.V1.Model.AgentPool, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.StorageTransfer.V1.Model.ListAgentPoolsResponse do
def decode(value, options) do
GoogleApi.StorageTransfer.V1.Model.ListAgentPoolsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StorageTransfer.V1.Model.ListAgentPoolsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.18 | 126 | 0.739056 |
f7d35cdf8d7ac10e3ab3eb4f11ce6f34ee03b47e | 724 | ex | Elixir | web/gettext.ex | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | web/gettext.ex | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | web/gettext.ex | sotte/python_channels | dcec42d4a787cbb52c8be13a576e5dcadc1fda00 | [
"MIT"
] | null | null | null | defmodule PythonChannels.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import PythonChannels.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :python_channels
end
| 28.96 | 72 | 0.687845 |
f7d37128b6b224f64429b12e933b0a575cb99aee | 73 | exs | Elixir | test/views/layout_view_test.exs | Cobenian/rackit | e886ebbf1a590d4031d5f79f868ce2d725e9910a | [
"Apache-2.0"
] | null | null | null | test/views/layout_view_test.exs | Cobenian/rackit | e886ebbf1a590d4031d5f79f868ce2d725e9910a | [
"Apache-2.0"
] | 6 | 2015-06-10T13:33:57.000Z | 2015-06-10T14:10:06.000Z | test/views/layout_view_test.exs | Cobenian/rackit | e886ebbf1a590d4031d5f79f868ce2d725e9910a | [
"Apache-2.0"
] | null | null | null | defmodule Rackit.LayoutViewTest do
use Rackit.ConnCase, async: true
end | 24.333333 | 34 | 0.821918 |
f7d38849d30663d96e8ca941457b20f276a0d40c | 1,051 | exs | Elixir | mix.exs | membraneframework/membrane_h264_format | 5483c497a6f88c250c81ac969bc77a615cdd294d | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/membrane_h264_format | 5483c497a6f88c250c81ac969bc77a615cdd294d | [
"Apache-2.0"
] | 1 | 2022-01-10T09:15:49.000Z | 2022-01-10T09:15:49.000Z | mix.exs | membraneframework/membrane-caps-video-h264 | 5483c497a6f88c250c81ac969bc77a615cdd294d | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.H264.Mixfile do
use Mix.Project
@version "0.3.0"
@github_url "https://github.com/membraneframework/membrane_h264_format"
def project do
[
app: :membrane_h264_format,
version: @version,
elixir: "~> 1.7",
description: "Membrane Multimedia Framework (H264 video format definition)",
package: package(),
name: "Membrane H264 Format",
source_url: @github_url,
docs: docs(),
deps: deps()
]
end
defp docs do
[
main: "readme",
extras: ["README.md", "LICENSE"],
source_ref: "v#{@version}"
]
end
defp package do
[
maintainers: ["Membrane Team"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => @github_url,
"Membrane Framework Homepage" => "https://membraneframework.org"
}
]
end
defp deps do
[
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:dialyxir, "~> 1.1", only: :dev, runtime: false},
{:credo, "~> 1.5", only: :dev, runtime: false}
]
end
end
| 21.895833 | 82 | 0.567079 |
f7d3ced18992ae9ad2cc7283434770a1014797e0 | 476 | ex | Elixir | lib/langue/formatter/strings/parser.ex | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/langue/formatter/strings/parser.ex | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/langue/formatter/strings/parser.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Langue.Formatter.Strings.Parser do
@behaviour Langue.Formatter.Parser
alias Langue.Utils.{LineByLineHelper, Placeholders}
@prop_line_regex ~r/^(?<comment>.+)?"(?<key>.+)" ?= ?"(?<value>.*)"$/sm
def parse(%{render: render}) do
entries =
render
|> LineByLineHelper.Parser.lines(@prop_line_regex, ";\n")
|> Placeholders.parse(Langue.Formatter.Strings.placeholder_regex())
%Langue.Formatter.ParserResult{entries: entries}
end
end
| 28 | 73 | 0.686975 |
f7d40b0ba80b4b32f4773b6ae33fa5238ecc42e1 | 1,116 | exs | Elixir | priv/repo/migrations/20201016124426_migrate_daa_to_metric_signal.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | priv/repo/migrations/20201016124426_migrate_daa_to_metric_signal.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | priv/repo/migrations/20201016124426_migrate_daa_to_metric_signal.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule Sanbase.Repo.Migrations.MigrateDaaToMetricAlert do
use Ecto.Migration
alias Sanbase.Alert.Trigger.{
DailyActiveAddressesSettings,
MetricTriggerSettings
}
alias Sanbase.Alert.UserTrigger
def up do
setup()
migrate_daa_signals()
end
def down do
:ok
end
defp migrate_daa_signals() do
DailyActiveAddressesSettings.type()
|> UserTrigger.get_all_triggers_by_type()
|> Enum.each(fn user_trigger ->
%{trigger: %{settings: settings}} = user_trigger
{:ok, _} =
UserTrigger.update_user_trigger(user_trigger.user.id, %{
id: user_trigger.id,
settings: %{
type: MetricTriggerSettings.type(),
metric: "active_addresses_24h",
target: settings.target,
channel: settings.channel,
time_window: settings.time_window,
operation: settings.operation
}
})
end)
end
defp setup() do
Application.ensure_all_started(:tzdata)
Application.ensure_all_started(:prometheus_ecto)
Sanbase.Prometheus.EctoInstrumenter.setup()
end
end
| 23.744681 | 64 | 0.660394 |
f7d412e7bda6e56255a43068e956a2802fdba3f5 | 5,400 | ex | Elixir | lib/amqp/application/connection.ex | alappe/amqp | 1beda620362ba75b3cedea1c48fffd19be026042 | [
"MIT"
] | null | null | null | lib/amqp/application/connection.ex | alappe/amqp | 1beda620362ba75b3cedea1c48fffd19be026042 | [
"MIT"
] | null | null | null | lib/amqp/application/connection.ex | alappe/amqp | 1beda620362ba75b3cedea1c48fffd19be026042 | [
"MIT"
] | null | null | null | defmodule AMQP.Application.Connection do
@moduledoc false
# This module will stay as a private module at least during 2.0.x.
# There might be non backward compatible changes on this module on 2.1.x.
use GenServer
require Logger
alias AMQP.Connection
@default_interval 5_000
@doc """
Starts a GenServer process linked to the current process.
It expects options to be a combination of connection args, proc_name and retry_interval.
## Examples
Combines name and retry interval with the connection options.
iex> opts = [proc_name: :my_conn, retry_interval: 10_000, host: "localhost"]
iex> {:ok, pid} = AMQP.Application.Connection.start_link(opts)
iex> {:ok, conn} = AMQP.Application.Connection.get_connection(:my_conn)
Passes URL instead of options and use a default proc name when you need only a single connection.
iex> opts = [url: "amqp://guest:guest@localhost"]
iex> :ok = AMQP.Application.Connection.start_link(opts)
iex> {:ok, conn} = AMQP.Application.Connection.get_connection()
iex> {:ok, conn} = AMQP.Application.Connection.get_connection(:default)
"""
@spec start_link(keyword) :: GenServer.on_start()
def start_link(opts) do
{name, init_arg} = link_opts_to_init_arg(opts)
GenServer.start_link(__MODULE__, init_arg, name: name)
end
@doc false
def start(opts) do
{name, init_arg} = link_opts_to_init_arg(opts)
GenServer.start(__MODULE__, init_arg, name: name)
end
defp link_opts_to_init_arg(opts) do
proc_name = Keyword.get(opts, :proc_name, :default)
server_name = get_server_name(proc_name)
retry_interval = Keyword.get(opts, :retry_interval, @default_interval)
open_arg = Keyword.drop(opts, [:proc_name, :retry_interval])
init_arg = %{
retry_interval: retry_interval,
open_arg: open_arg,
name: proc_name,
connection: nil,
monitor_ref: nil
}
{server_name, init_arg}
end
@doc """
Returns a GenServer reference for the connection name.
"""
@spec get_server_name(binary | atom) :: binary
def get_server_name(name) do
:"#{__MODULE__}::#{name}"
end
@doc false
def get_state(name \\ :default) do
GenServer.call(get_server_name(name), :get_state)
catch
:exit, {:timeout, _} -> %{}
end
@doc """
Returns pid for the server referred by the name.
It is a wrapper of `GenServer.whereis/1`.
"""
@spec whereis(binary() | atom()) :: pid() | {atom(), node()} | nil
def whereis(name) do
name
|> get_server_name()
|> GenServer.whereis()
end
@doc """
Returns a connection referred by the name.
"""
@spec get_connection(binary | atom) :: {:ok, Connection.t()} | {:error, any}
def get_connection(name \\ :default) do
with false <- name |> whereis() |> is_nil(),
conn = %{} <- GenServer.call(get_server_name(name), :get_connection) do
{:ok, conn}
else
true -> {:error, :connection_not_found}
nil -> {:error, :not_connected}
end
catch
:exit, {:timeout, _} ->
# This would happen when the connection is stuck when opening.
# See do_open/1 to understand - it can block the GenSever.
{:error, :timeout}
end
@impl true
def init(state) do
send(self(), :connect)
Process.flag(:trap_exit, true)
{:ok, state}
end
@impl true
def handle_call(:get_state, _, state) do
{:reply, state, state}
end
def handle_call(:get_connection, _, state) do
if state[:connection] && Process.alive?(state[:connection].pid) do
{:reply, state[:connection], state}
else
{:reply, nil, state}
end
end
@impl true
def handle_info(:connect, state) do
case do_open(state[:open_arg]) do
{:ok, conn} ->
# Get notifications when the connection goes down
ref = Process.monitor(conn.pid)
{:noreply, %{state | connection: conn, monitor_ref: ref}}
{:error, _} ->
Logger.error("Failed to open AMQP connection (#{state[:name]}). Retrying later...")
# Retry later
Process.send_after(self(), :connect, state[:retry_interval])
{:noreply, state}
end
end
def handle_info({:DOWN, _, :process, pid, _reason}, %{connection: %{pid: pid}} = state)
when is_pid(pid) do
Logger.info("AMQP connection is gone (#{state[:name]}). Reconnecting...")
send(self(), :connect)
{:noreply, %{state | connection: nil, monitor_ref: nil}}
end
def handle_info({:EXIT, _from, reason}, state) do
close(state)
{:stop, reason, %{state | connection: nil, monitor_ref: nil}}
end
# When GenServer call gets timeout and the message arrives later,
# it attempts to deliver the message to the server inbox.
# AMQP handles the message but simply ignores it.
#
# See `GenServer.call/3` for more details.
def handle_info({ref, _res}, state) when is_reference(ref) do
{:noreply, state}
end
@impl true
def terminate(_reason, state) do
close(state)
%{state | connection: nil, monitor_ref: nil}
end
defp close(%{connection: %Connection{} = conn, monitor_ref: ref}) do
if Process.alive?(conn.pid) do
Process.demonitor(ref)
Connection.close(conn)
end
end
defp close(_), do: :ok
defp do_open(options) do
if url = options[:url] do
Connection.open(url, Keyword.delete(options, :url))
else
Connection.open(options)
end
end
end
| 28.272251 | 99 | 0.657593 |
f7d41729bc5518761e7a7954a13c1b8a71ac7441 | 1,252 | exs | Elixir | test/level_web/graphql/subscriptions/post_closed_test.exs | denismitr/level | 0b01b0e46c252510dd50b3558d987c6f1e0b1da9 | [
"Apache-2.0"
] | 1 | 2019-06-11T20:20:32.000Z | 2019-06-11T20:20:32.000Z | test/level_web/graphql/subscriptions/post_closed_test.exs | denismitr/level | 0b01b0e46c252510dd50b3558d987c6f1e0b1da9 | [
"Apache-2.0"
] | null | null | null | test/level_web/graphql/subscriptions/post_closed_test.exs | denismitr/level | 0b01b0e46c252510dd50b3558d987c6f1e0b1da9 | [
"Apache-2.0"
] | null | null | null | defmodule LevelWeb.GraphQL.PostClosedTest do
use LevelWeb.ChannelCase
alias Level.Posts
@operation """
subscription PostSubscription(
$id: ID!
) {
postSubscription(postId: $id) {
__typename
... on PostClosedPayload {
post {
id
state
}
}
}
}
"""
setup do
{:ok, result} = create_user_and_space()
{:ok, Map.put(result, :socket, build_socket(result.user))}
end
test "receives an event when a user closes a post", %{socket: socket, space_user: space_user} do
{:ok, %{group: group}} = create_group(space_user)
{:ok, %{post: post}} = create_post(space_user, group)
ref = push_subscription(socket, @operation, %{"id" => post.id})
assert_reply(ref, :ok, %{subscriptionId: subscription_id}, 1000)
{:ok, _} = Posts.close_post(space_user, post)
payload = %{
result: %{
data: %{
"postSubscription" => %{
"__typename" => "PostClosedPayload",
"post" => %{
"id" => post.id,
"state" => "CLOSED"
}
}
}
},
subscriptionId: subscription_id
}
assert_push("subscription:data", ^payload)
end
end
| 23.185185 | 98 | 0.545527 |
f7d45a92a555ce1c81afe631895ed05a29a7a908 | 482 | ex | Elixir | lib/cloister/listener.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | 12 | 2020-05-07T08:57:06.000Z | 2020-08-19T01:32:03.000Z | lib/cloister/listener.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | 21 | 2020-07-12T14:17:22.000Z | 2021-08-03T04:42:30.000Z | lib/cloister/listener.ex | am-kantox/cloister | b5e661fab5dc2eeb27b175bf0069bf963b1afce6 | [
"MIT"
] | null | null | null | defmodule Cloister.Listener do
@moduledoc """
The behavior to be implemented by `Cloister.Monitor` listeners.
"""
@doc """
Passed to the `Cloister.Monitor.start_link/1` and is being called
on each subsequent monitored node state change.
Listeners are obliged to handle `:up`, `:rehashing` and `:stopping` events.
"""
@callback on_state_change(
from :: Cloister.Monitor.status(),
state :: Cloister.Monitor.t()
) :: :ok
end
| 28.352941 | 77 | 0.649378 |
f7d46c2f8feba57459f545071ba589ba0595f3a7 | 17,074 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/placement_groups.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/placement_groups.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/placement_groups.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V33.Api.PlacementGroups do
@moduledoc """
API calls for all endpoints tagged `PlacementGroups`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets one placement group by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Placement group ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_placement_groups_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.DFAReporting.V33.Model.PlacementGroup.t()} | {:error, Tesla.Env.t()}
def dfareporting_placement_groups_get(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/placementGroups/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}])
end
@doc """
Inserts a new placement group.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.PlacementGroup.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_placement_groups_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.PlacementGroup.t()} | {:error, Tesla.Env.t()}
def dfareporting_placement_groups_insert(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/placementGroups", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}])
end
@doc """
Retrieves a list of placement groups, possibly filtered. This method supports paging.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:advertiserIds` (*type:* `list(String.t)`) - Select only placement groups that belong to these advertisers.
* `:archived` (*type:* `boolean()`) - Select only archived placements. Don't set this field to select both archived and non-archived placements.
* `:campaignIds` (*type:* `list(String.t)`) - Select only placement groups that belong to these campaigns.
* `:contentCategoryIds` (*type:* `list(String.t)`) - Select only placement groups that are associated with these content categories.
* `:directorySiteIds` (*type:* `list(String.t)`) - Select only placement groups that are associated with these directory sites.
* `:ids` (*type:* `list(String.t)`) - Select only placement groups with these IDs.
* `:maxEndDate` (*type:* `String.t`) - Select only placements or placement groups whose end date is on or before the specified maxEndDate. The date should be formatted as "yyyy-MM-dd".
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:maxStartDate` (*type:* `String.t`) - Select only placements or placement groups whose start date is on or before the specified maxStartDate. The date should be formatted as "yyyy-MM-dd".
* `:minEndDate` (*type:* `String.t`) - Select only placements or placement groups whose end date is on or after the specified minEndDate. The date should be formatted as "yyyy-MM-dd".
* `:minStartDate` (*type:* `String.t`) - Select only placements or placement groups whose start date is on or after the specified minStartDate. The date should be formatted as "yyyy-MM-dd".
* `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page.
* `:placementGroupType` (*type:* `String.t`) - Select only placement groups belonging with this group type. A package is a simple group of placements that acts as a single pricing point for a group of tags. A roadblock is a group of placements that not only acts as a single pricing point but also assumes that all the tags in it will be served at the same time. A roadblock requires one of its assigned placements to be marked as primary for reporting.
* `:placementStrategyIds` (*type:* `list(String.t)`) - Select only placement groups that are associated with these placement strategies.
* `:pricingTypes` (*type:* `list(String.t)`) - Select only placement groups with these pricing types.
* `:searchString` (*type:* `String.t`) - Allows searching for placement groups by name or ID. Wildcards (*) are allowed. For example, "placement*2015" will return placement groups with names like "placement group June 2015", "placement group May 2015", or simply "placements 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "placementgroup" will match placement groups with name "my placementgroup", "placementgroup 2015", or simply "placementgroup".
* `:siteIds` (*type:* `list(String.t)`) - Select only placement groups that are associated with these sites.
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.PlacementGroupsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_placement_groups_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.PlacementGroupsListResponse.t()}
| {:error, Tesla.Env.t()}
def dfareporting_placement_groups_list(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:advertiserIds => :query,
:archived => :query,
:campaignIds => :query,
:contentCategoryIds => :query,
:directorySiteIds => :query,
:ids => :query,
:maxEndDate => :query,
:maxResults => :query,
:maxStartDate => :query,
:minEndDate => :query,
:minStartDate => :query,
:pageToken => :query,
:placementGroupType => :query,
:placementStrategyIds => :query,
:pricingTypes => :query,
:searchString => :query,
:siteIds => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/placementGroups", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.PlacementGroupsListResponse{}]
)
end
@doc """
Updates an existing placement group. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Placement group ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.PlacementGroup.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_placement_groups_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.DFAReporting.V33.Model.PlacementGroup.t()} | {:error, Tesla.Env.t()}
def dfareporting_placement_groups_patch(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/placementGroups", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}])
end
@doc """
Updates an existing placement group.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.PlacementGroup.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_placement_groups_update(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.PlacementGroup.t()} | {:error, Tesla.Env.t()}
def dfareporting_placement_groups_update(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/placementGroups", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.PlacementGroup{}])
end
end
| 48.231638 | 546 | 0.645074 |
f7d472f3936559a0635dc34ed298c0c55bc60e7b | 589 | ex | Elixir | lib/exkontist/application.ex | lorenzosinisi/exkontist | 723ac521ae32e76ef190c476a4c5116fa40b880a | [
"Apache-2.0"
] | 1 | 2020-01-14T05:03:07.000Z | 2020-01-14T05:03:07.000Z | lib/exkontist/application.ex | lorenzosinisi/exkontist | 723ac521ae32e76ef190c476a4c5116fa40b880a | [
"Apache-2.0"
] | null | null | null | lib/exkontist/application.ex | lorenzosinisi/exkontist | 723ac521ae32e76ef190c476a4c5116fa40b880a | [
"Apache-2.0"
] | null | null | null | defmodule Exkontist.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Starts a worker by calling: Exkontist.Worker.start_link(arg)
# {Exkontist.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Exkontist.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 28.047619 | 68 | 0.714771 |
f7d4890c29da2bf215c0b4740c8c75639fd36f62 | 537 | ex | Elixir | lib/extenant_web/views/error_view.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | null | null | null | lib/extenant_web/views/error_view.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | 1 | 2018-11-24T15:09:30.000Z | 2018-11-24T15:09:30.000Z | lib/extenant_web/views/error_view.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | null | null | null | defmodule ExtenantWeb.ErrorView do
use ExtenantWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
| 31.588235 | 83 | 0.72067 |
f7d4906472bf0591ce9ce3a17f511d3b3e117b59 | 441 | ex | Elixir | lib/majudge/elections/vote.ex | coltonw/majudge | 4f81a66abe6a2e82f42131982e7a9b26951b9124 | [
"MIT"
] | null | null | null | lib/majudge/elections/vote.ex | coltonw/majudge | 4f81a66abe6a2e82f42131982e7a9b26951b9124 | [
"MIT"
] | 1 | 2021-05-10T04:23:56.000Z | 2021-05-10T04:23:56.000Z | lib/majudge/elections/vote.ex | coltonw/majudge | 4f81a66abe6a2e82f42131982e7a9b26951b9124 | [
"MIT"
] | null | null | null | defmodule Majudge.Elections.Vote do
use Ecto.Schema
import Ecto.Changeset
alias Majudge.Elections.Ballot
schema "votes" do
field :email, :string
field :name, :string
field :vote, :map
belongs_to :ballot, Ballot
timestamps()
end
@doc false
def changeset(vote, attrs) do
vote
|> cast(attrs, [:name, :email, :vote, :ballot_id])
|> validate_required([:name, :email, :vote, :ballot_id])
end
end
| 20.045455 | 60 | 0.662132 |
f7d4a9cc0bfe3326e404ce415a2ca62891883f7d | 5,764 | ex | Elixir | lib/coherence/redirects.ex | harmon25/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 1 | 2022-03-06T16:30:21.000Z | 2022-03-06T16:30:21.000Z | lib/coherence/redirects.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | null | null | null | lib/coherence/redirects.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule Redirects do
@moduledoc """
Define controller action redirection behaviour.
Defines the default redirect functions for each of the controller
actions that perform redirects. By using this Module you get the following
functions:
* session_create/2
* session_delete/2
* password_create/2
* password_update/2,
* unlock_create_not_locked/2
* unlock_create_invalid/2
* unlock_create/2
* unlock_edit_not_locked/2
* unlock_edit/2
* unlock_edit_invalid/2
* registration_create/2
* registration_delete/2
* invitation_create/2
* confirmation_create/2
* confirmation_edit_invalid/2
* confirmation_edit_expired/2
* confirmation_edit/2
* confirmation_edit_error/2
You can override any of the functions to customize the redirect path. Each
function is passed the `conn` and `params` arguments from the controller.
## Examples
use Redirects
import MyProject.Router.Helpers
# override the log out action back to the log in page
def session_delete(conn, _), do: redirect(conn, session_path(conn, :new))
# redirect the user to the login page after registering
def registration_create(conn, _), do: redirect(conn, session_path(conn, :new))
# disable the user_return_to feature on login
def session_create(conn, _), do: redirect(conn, landing_path(conn, :index))
"""
@callback session_create(conn :: term, params :: term) :: term
@callback session_delete(conn :: term, params :: term) :: term
@callback password_create(conn :: term, params :: term) :: term
@callback password_update(conn :: term, params :: term) :: term
@callback unlock_create(conn :: term, params :: term) :: term
@callback unlock_create_not_locked(conn :: term, params :: term) :: term
@callback unlock_create_invalid(conn :: term, params :: term) :: term
@callback unlock_edit(conn :: term, params :: term) :: term
@callback unlock_edit_not_locked(conn :: term, params :: term) :: term
@callback unlock_edit_invalid(conn :: term, params :: term) :: term
@callback registration_create(conn :: term, params :: term) :: term
@callback registration_update(conn :: term, params :: term, user :: term) :: term
@callback registration_delete(conn :: term, params :: term) :: term
@callback invitation_create(conn :: term, params :: term) :: term
@callback confirmation_create(conn :: term, params :: term) :: term
@callback confirmation_edit_invalid(conn :: term, params :: term) :: term
@callback confirmation_edit_expired(conn :: term, params :: term) :: term
@callback confirmation_edit(conn :: term, params :: term) :: term
@callback confirmation_edit_error(conn :: term, params :: term) :: term
defmacro __using__(_) do
quote location: :keep do
@behaviour Redirects
import Phoenix.Controller, only: [redirect: 2]
import Coherence.ControllerHelpers
import Plug.Conn, only: [get_session: 2, put_session: 3]
@doc false
def session_delete(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def session_create(conn, _) do
url = case get_session(conn, "user_return_to") do
nil -> "/"
value -> value
end
conn
|> put_session("user_return_to", nil)
|> redirect(to: url)
end
@doc false
def password_create(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def password_update(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_create(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_create_not_locked(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_create_invalid(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_edit(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_edit_not_locked(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def unlock_edit_invalid(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def registration_create(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def registration_update(conn, _, user) do
path = Application.get_env(:coherence, :module)
|> Module.concat(Router.Helpers)
|> apply(:registration_path, [conn, :show])
redirect(conn, to: path)
end
@doc false
def registration_delete(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def invitation_create(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def confirmation_create(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def confirmation_edit_invalid(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def confirmation_edit_expired(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def confirmation_edit(conn, _), do: redirect(conn, to: logged_out_url(conn))
@doc false
def confirmation_edit_error(conn, _), do: redirect(conn, to: logged_out_url(conn))
defoverridable [
session_create: 2, session_delete: 2, password_create: 2, password_update: 2,
unlock_create_not_locked: 2, unlock_create_invalid: 2, unlock_create: 2,
unlock_edit_not_locked: 2, unlock_edit: 2, unlock_edit_invalid: 2,
registration_create: 2, registration_update: 3, registration_delete: 2,
invitation_create: 2,
confirmation_create: 2, confirmation_edit_invalid: 2, confirmation_edit_expired: 2,
confirmation_edit: 2, confirmation_edit_error: 2
]
end
end
end
| 37.921053 | 91 | 0.686329 |
f7d5293b93cb505c70afa1f595514cfd8d7c7189 | 3,215 | ex | Elixir | lib/auto_api/states/lights_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/states/lights_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/states/lights_state.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.LightsState do
@moduledoc """
Lights state
"""
alias AutoApi.{CommonData, State}
use AutoApi.State, spec_file: "lights.json"
@type front_exterior_light :: :inactive | :active | :active_with_full_beam | :dlr | :automatic
@type ambient_light :: %{
red: integer,
green: integer,
blue: integer
}
@type light :: %{
location: CommonData.location_longitudinal(),
state: CommonData.activity()
}
@type reading_lamp :: %{
location: CommonData.location(),
state: CommonData.activity()
}
@type switch_position ::
:automatic
| :dipped_headlights
| :parking_light_right
| :parking_light_left
| :sidelights
@type t :: %__MODULE__{
front_exterior_light: State.property(front_exterior_light),
rear_exterior_light: State.property(CommonData.activity()),
ambient_light_colour: State.property(ambient_light),
reverse_light: State.property(CommonData.activity()),
emergency_brake_light: State.property(CommonData.activity()),
fog_lights: State.multiple_property(light),
reading_lamps: State.multiple_property(reading_lamp),
interior_lights: State.multiple_property(light),
switch_position: State.property(switch_position())
}
@doc """
Build state based on binary value
iex> bin = <<5, 0, 4, 1, 0, 1, 1>>
iex> AutoApi.LightsState.from_bin(bin)
%AutoApi.LightsState{reverse_light: %AutoApi.PropertyComponent{data: :active}}
"""
@spec from_bin(binary) :: __MODULE__.t()
def from_bin(bin) do
parse_bin_properties(bin, %__MODULE__{})
end
@doc """
Parse state to bin
iex> state = %AutoApi.LightsState{reverse_light: %AutoApi.PropertyComponent{data: :active}}
iex> AutoApi.LightsState.to_bin(state)
<<5, 0, 4, 1, 0, 1, 1>>
"""
@spec to_bin(__MODULE__.t()) :: binary
def to_bin(%__MODULE__{} = state) do
parse_state_properties(state)
end
end
| 36.123596 | 96 | 0.691135 |
f7d5335c02ddf3d1b358327f1c50ef9cefecf8ae | 8,232 | ex | Elixir | lib/kaffy/resource_schema.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 3 | 2020-08-18T15:04:10.000Z | 2021-05-10T11:10:44.000Z | lib/kaffy/resource_schema.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 2 | 2020-05-31T18:19:05.000Z | 2020-06-15T13:43:12.000Z | lib/kaffy/resource_schema.ex | clszzyh/kaffy | 411766de3bf1b5b88ca0cb5078b7421154c70db5 | [
"MIT"
] | 1 | 2020-09-18T14:25:00.000Z | 2020-09-18T14:25:00.000Z | defmodule Kaffy.ResourceSchema do
@moduledoc false
def primary_key(schema) do
schema.__schema__(:primary_key)
end
def excluded_fields(schema) do
{pk, _, _} = schema.__schema__(:autogenerate_id)
autogenerated = schema.__schema__(:autogenerate)
case length(autogenerated) do
1 ->
[{auto_fields, _}] = autogenerated
[pk] ++ auto_fields
_ ->
[pk]
end
end
def index_fields(schema) do
Keyword.drop(fields(schema), fields_to_be_removed(schema))
end
def form_fields(schema) do
to_be_removed = fields_to_be_removed(schema) ++ [:id, :inserted_at, :updated_at]
Keyword.drop(fields(schema), to_be_removed)
end
def cast_fields(schema) do
to_be_removed =
fields_to_be_removed(schema) ++
get_has_many_associations(schema) ++
get_has_one_assocations(schema) ++
get_many_to_many_associations(schema) ++ [:id, :inserted_at, :updated_at]
Keyword.drop(fields(schema), to_be_removed)
end
def fields(schema) do
schema
|> get_all_fields()
|> reorder_fields(schema)
end
defp get_all_fields(schema) do
schema.__changeset__()
|> Enum.map(fn {k, _} -> {k, default_field_options(schema, k)} end)
end
def default_field_options(schema, field) do
type = field_type(schema, field)
label = Kaffy.ResourceForm.form_label_string(field)
merge_field_options(%{label: label, type: type})
end
def merge_field_options(options) do
default = %{
create: :editable,
update: :editable,
label: nil,
type: nil,
choices: nil
}
Map.merge(default, options || %{})
end
defp fields_to_be_removed(schema) do
# if schema defines belongs_to assocations, remove assoc fields and keep their actual *_id fields.
schema.__changeset__()
|> Enum.reduce([], fn {field, type}, all ->
case type do
{:assoc, %Ecto.Association.BelongsTo{}} ->
[field | all]
{:assoc, %Ecto.Association.Has{cardinality: :many}} ->
[field | all]
{:assoc, %Ecto.Association.Has{cardinality: :one}} ->
[field | all]
_ ->
all
end
end)
end
defp reorder_fields(fields_list, schema) do
[_id, first_field | _fields] = schema.__schema__(:fields)
# this is a "nice" feature to re-order the default fields to put the specified fields at the top/bottom of the form
fields_list
|> reorder_field(first_field, :first)
|> reorder_field(:email, :first)
|> reorder_field(:name, :first)
|> reorder_field(:title, :first)
|> reorder_field(:id, :first)
|> reorder_field(:inserted_at, :last)
|> reorder_field(:updated_at, :last)
# |> reorder_field(Kaffy.ResourceSchema.embeds(schema), :last)
end
defp reorder_field(fields_list, [], _), do: fields_list
defp reorder_field(fields_list, [field | rest], position) do
fields_list = reorder_field(fields_list, field, position)
reorder_field(fields_list, rest, position)
end
defp reorder_field(fields_list, field_name, position) do
if field_name in Keyword.keys(fields_list) do
{field_options, fields_list} = Keyword.pop(fields_list, field_name)
case position do
:first -> [{field_name, field_options}] ++ fields_list
:last -> fields_list ++ [{field_name, field_options}]
end
else
fields_list
end
end
def has_field_filters?(resource) do
admin_fields = Kaffy.ResourceAdmin.index(resource)
fields_with_filters =
Enum.map(admin_fields, fn f -> kaffy_field_filters(resource[:schema], f) end)
Enum.any?(fields_with_filters, fn
{_, filters} -> filters
_ -> false
end)
end
def kaffy_field_filters(_schema, {field, options}) do
{field, Map.get(options || %{}, :filters, false)}
end
def kaffy_field_filters(_, _), do: false
def kaffy_field_name(schema, {field, options}) do
default_name = kaffy_field_name(schema, field)
name = Map.get(options || %{}, :name)
cond do
is_binary(name) -> name
is_function(name) -> name.(schema)
true -> default_name
end
end
def kaffy_field_name(_schema, field) when is_atom(field) do
Kaffy.ResourceAdmin.humanize_term(field)
end
def kaffy_field_value(conn, schema, {field, options}) do
default_value = kaffy_field_value(schema, field)
ft = Kaffy.ResourceSchema.field_type(schema.__struct__, field)
value = Map.get(options || %{}, :value)
cond do
is_function(value) ->
value.(schema)
is_map(value) && Map.has_key?(value, :__struct__) ->
if value.__struct__ in [NaiveDateTime, DateTime, Date, Time] do
value
else
Map.from_struct(value)
|> Map.drop([:__meta__])
|> Kaffy.Utils.json().encode!(escape: :html_safe, pretty: true)
end
Kaffy.Utils.is_module(ft) && Keyword.has_key?(ft.__info__(:functions), :render_index) ->
ft.render_index(conn, schema, field, options)
is_map(value) ->
Kaffy.Utils.json().encode!(value, escape: :html_safe, pretty: true)
is_binary(value) ->
value
true ->
default_value
end
end
def kaffy_field_value(schema, field) when is_atom(field) do
value = Map.get(schema, field, "")
cond do
is_map(value) && Map.has_key?(value, :__struct__) && value.__struct__ == Decimal ->
value
is_map(value) && Map.has_key?(value, :__struct__) ->
if value.__struct__ in [NaiveDateTime, DateTime, Date, Time] do
value
else
Map.from_struct(value)
|> Map.drop([:__meta__])
|> Kaffy.Utils.json().encode!(escape: :html_safe, pretty: true)
end
is_map(value) ->
Kaffy.Utils.json().encode!(value, escape: :html_safe, pretty: true)
is_binary(value) ->
String.slice(value, 0, 140)
true ->
value
end
end
def display_string_fields([], all), do: Enum.reverse(all) |> Enum.join(",")
def display_string_fields([{field, _} | rest], all) do
display_string_fields(rest, [field | all])
end
def display_string_fields([field | rest], all) do
display_string_fields(rest, [field | all])
end
def associations(schema) do
schema.__schema__(:associations)
end
def get_has_many_associations(schema) do
associations(schema)
|> Enum.filter(fn a ->
case association(schema, a) do
%Ecto.Association.Has{cardinality: :many} -> true
_ -> false
end
end)
end
def get_has_one_assocations(schema) do
associations(schema)
|> Enum.filter(fn a ->
case association(schema, a) do
%Ecto.Association.Has{cardinality: :one} -> true
_ -> false
end
end)
end
def get_many_to_many_associations(schema) do
associations(schema)
|> Enum.filter(fn a ->
case association(schema, a) do
%Ecto.Association.ManyToMany{cardinality: :many} -> true
_ -> false
end
end)
end
def association(schema, name) do
schema.__schema__(:association, name)
end
def association_schema(schema, assoc) do
association(schema, assoc).queryable
end
def embeds(schema) do
schema.__schema__(:embeds)
end
def embed(schema, name) do
schema.__schema__(:embed, name)
end
def embed_struct(schema, name) do
embed(schema, name).related
end
def search_fields(resource) do
schema = resource[:schema]
persisted_fields = schema.__schema__(:fields)
Enum.filter(fields(schema), fn f ->
field_name = elem(f, 0)
field_type(schema, f).type in [:string, :textarea, :richtext] &&
field_name in persisted_fields
end)
|> Enum.map(fn {f, _} -> f end)
end
def filter_fields(_), do: nil
def field_type(_schema, {_, type}), do: type
def field_type(schema, field), do: schema.__changeset__() |> Map.get(field, :string)
# def field_type(schema, field), do: schema.__schema__(:type, field)
def get_map_fields(schema) do
get_all_fields(schema)
|> Enum.filter(fn
{_f, options} ->
options.type == :map
f when is_atom(f) ->
f == :map
end)
end
def widgets(_resource) do
[]
end
end
| 26.050633 | 119 | 0.642493 |
f7d57265deab5189306e69e3738e52fc01ae9bb1 | 505 | exs | Elixir | test/unit/hologram/compiler/transformers/unary_positive_operator_transformer_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | test/unit/hologram/compiler/transformers/unary_positive_operator_transformer_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | test/unit/hologram/compiler/transformers/unary_positive_operator_transformer_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Compiler.UnaryPositiveOperatorTransformerTest do
use Hologram.Test.UnitCase, async: true
alias Hologram.Compiler.{Context, UnaryPositiveOperatorTransformer}
alias Hologram.Compiler.IR.{IntegerType, UnaryPositiveOperator}
test "transform/3" do
code = "+2"
ast = ast(code)
result = UnaryPositiveOperatorTransformer.transform(ast, %Context{})
expected = %UnaryPositiveOperator{
value: %IntegerType{value: 2}
}
assert result == expected
end
end
| 25.25 | 72 | 0.742574 |
f7d583bf42463c38a2a8295fd48f31db69b15609 | 1,889 | exs | Elixir | mix.exs | membraneframework/elixir_dtls | b4954fc1dacec6df6df043fcd7c600d931bf6a7c | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/elixir_dtls | b4954fc1dacec6df6df043fcd7c600d931bf6a7c | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/elixir_dtls | b4954fc1dacec6df6df043fcd7c600d931bf6a7c | [
"Apache-2.0"
] | null | null | null | defmodule ExDTLS.Mixfile do
use Mix.Project
@version "0.11.0"
@github_url "https://github.com/membraneframework/ex_dtls"
def project do
[
app: :ex_dtls,
version: @version,
elixir: "~> 1.12",
compilers: [:unifex, :bundlex] ++ Mix.compilers(),
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
dialyzer: dialyzer(),
# hex
description: "Elixir wrapper over OpenSSL for performing DTLS handshake",
package: package(),
# docs
name: "ExDTLS",
source_url: @github_url,
homepage_url: "https://membraneframework.org",
docs: docs()
]
end
def application do
[
extra_applications: []
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_env), do: ["lib"]
defp deps do
[
{:unifex, "~> 1.0"},
{:ex_doc, "~> 0.22", only: :dev, runtime: false},
{:dialyxir, "~> 1.0.0", only: :dev, runtime: false},
{:credo, "~> 1.4", only: :dev, runtime: false}
]
end
defp dialyzer() do
opts = [
flags: [:error_handling]
]
if System.get_env("CI") == "true" do
# Store PLTs in cacheable directory for CI
[plt_local_path: "priv/plts", plt_core_path: "priv/plts"] ++ opts
else
opts
end
end
defp package do
[
maintainers: ["Membrane Team"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => @github_url,
"Membrane Framework Homepage" => "https://membraneframework.org"
},
files: ["lib", "mix.exs", "README*", "LICENSE*", ".formatter.exs", "bundlex.exs", "c_src"]
]
end
defp docs do
[
main: "readme",
extras: ["README.md", "LICENSE"],
formatters: ["html"],
source_ref: "v#{@version}",
nest_modules_by_prefix: [ExDTLS]
]
end
end
| 22.759036 | 96 | 0.562732 |
f7d59fb25be388f8df9cf23a8bb0ddda206dac6f | 17,778 | exs | Elixir | lib/elixir/test/elixir/module/types/types_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/module/types/types_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/module/types/types_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("type_helper.exs", __DIR__)
defmodule Module.Types.TypesTest do
use ExUnit.Case, async: true
alias Module.Types
alias Module.Types.{Pattern, Expr}
defmacro warning(patterns \\ [], guards \\ [], body) do
min_line = min_line(patterns ++ guards ++ [body])
patterns = reset_line(patterns, min_line)
guards = reset_line(guards, min_line)
body = reset_line(body, min_line)
expr = TypeHelper.expand_expr(patterns, guards, body, __CALLER__)
quote do
Module.Types.TypesTest.__expr__(unquote(Macro.escape(expr)))
end
end
defmacro generated(ast) do
Macro.prewalk(ast, fn node -> Macro.update_meta(node, &([generated: true] ++ &1)) end)
end
def __expr__({patterns, guards, body}) do
with {:ok, _types, context} <-
Pattern.of_head(patterns, guards, TypeHelper.new_stack(), TypeHelper.new_context()),
{:ok, _type, context} <- Expr.of_expr(body, :dynamic, TypeHelper.new_stack(), context) do
case context.warnings do
[warning] -> to_message(:warning, warning)
_ -> :none
end
else
{:error, {type, reason, context}} ->
to_message(:error, {type, reason, context})
end
end
defp reset_line(ast, min_line) do
Macro.prewalk(ast, fn ast ->
Macro.update_meta(ast, fn meta ->
Keyword.update!(meta, :line, &(&1 - min_line + 1))
end)
end)
end
defp min_line(ast) do
{_ast, min} =
Macro.prewalk(ast, :infinity, fn
{_fun, meta, _args} = ast, min -> {ast, min(min, Keyword.get(meta, :line, 1))}
other, min -> {other, min}
end)
min
end
defp to_message(:warning, {module, warning, _location}) do
warning
|> module.format_warning()
|> IO.iodata_to_binary()
end
defp to_message(:error, {type, reason, context}) do
{Module.Types, error, _location} = Module.Types.error_to_warning(type, reason, context)
error
|> Module.Types.format_warning()
|> IO.iodata_to_binary()
|> String.trim_trailing("\nConflict found at")
end
test "expr_to_string/1" do
assert Types.expr_to_string({1, 2}) == "{1, 2}"
assert Types.expr_to_string(quote(do: Foo.bar(arg))) == "Foo.bar(arg)"
assert Types.expr_to_string(quote(do: :erlang.band(a, b))) == "Bitwise.band(a, b)"
assert Types.expr_to_string(quote(do: :erlang.orelse(a, b))) == "a or b"
assert Types.expr_to_string(quote(do: :erlang."=:="(a, b))) == "a === b"
assert Types.expr_to_string(quote(do: :erlang.list_to_atom(a))) == "List.to_atom(a)"
assert Types.expr_to_string(quote(do: :maps.remove(a, b))) == "Map.delete(b, a)"
assert Types.expr_to_string(quote(do: :erlang.element(1, a))) == "elem(a, 0)"
assert Types.expr_to_string(quote(do: :erlang.element(:erlang.+(a, 1), b))) == "elem(b, a)"
end
test "undefined function warnings" do
assert warning([], URI.unknown("foo")) ==
"URI.unknown/1 is undefined or private"
assert warning([], if(true, do: URI.unknown("foo"))) ==
"URI.unknown/1 is undefined or private"
assert warning([], try(do: :ok, after: URI.unknown("foo"))) ==
"URI.unknown/1 is undefined or private"
end
describe "function head warnings" do
test "warns on literals" do
string = warning([var = 123, var = "abc"], var)
assert string == """
incompatible types:
integer() !~ binary()
in expression:
# types_test.ex:1
var = "abc"
where "var" was given the type integer() in:
# types_test.ex:1
var = 123
where "var" was given the type binary() in:
# types_test.ex:1
var = "abc"
"""
end
test "warns on binary patterns" do
string = warning([<<var::integer, var::binary>>], var)
assert string == """
incompatible types:
integer() !~ binary()
in expression:
# types_test.ex:1
<<..., var::binary()>>
where "var" was given the type integer() in:
# types_test.ex:1
<<var::integer(), ...>>
where "var" was given the type binary() in:
# types_test.ex:1
<<..., var::binary()>>
"""
end
test "warns on recursive patterns" do
string = warning([{var} = var], var)
assert string == """
incompatible types:
{var1} !~ var1
in expression:
# types_test.ex:1
{var} = var
where "var" was given the type {var1} in:
# types_test.ex:1
{var} = var
"""
end
test "warns on guards" do
string = warning([var], [is_integer(var) and is_binary(var)], var)
assert string == """
incompatible types:
integer() !~ binary()
in expression:
# types_test.ex:1
is_binary(var)
where "var" was given the type integer() in:
# types_test.ex:1
is_integer(var)
where "var" was given the type binary() in:
# types_test.ex:1
is_binary(var)
"""
end
test "warns on guards with multiple variables" do
string = warning([x = y], [is_integer(x) and is_binary(y)], {x, y})
assert string == """
incompatible types:
integer() !~ binary()
in expression:
# types_test.ex:1
is_binary(y)
where "y" was given the same type as "x" in:
# types_test.ex:1
x = y
where "y" was given the type binary() in:
# types_test.ex:1
is_binary(y)
where "x" was given the type integer() in:
# types_test.ex:1
is_integer(x)
"""
end
test "warns on guards from cases unless generated" do
string =
warning(
[var],
[is_integer(var)],
case var do
_ when is_binary(var) -> :ok
end
)
assert is_binary(string)
string =
generated(
warning(
[var],
[is_integer(var)],
case var do
_ when is_binary(var) -> :ok
end
)
)
assert string == :none
end
test "only show relevant traces in warning" do
string = warning([x = y, z], [is_integer(x) and is_binary(y) and is_boolean(z)], {x, y, z})
assert string == """
incompatible types:
integer() !~ binary()
in expression:
# types_test.ex:1
is_binary(y)
where "y" was given the same type as "x" in:
# types_test.ex:1
x = y
where "y" was given the type binary() in:
# types_test.ex:1
is_binary(y)
where "x" was given the type integer() in:
# types_test.ex:1
is_integer(x)
"""
end
test "check body" do
string = warning([x], [is_integer(x)], :foo = x)
assert string == """
incompatible types:
integer() !~ :foo
in expression:
# types_test.ex:1
:foo = x
where "x" was given the type integer() in:
# types_test.ex:1
is_integer(x)
where "x" was given the type :foo in:
# types_test.ex:1
:foo = x
"""
end
test "check binary" do
string = warning([foo], [is_binary(foo)], <<foo>>)
assert string == """
incompatible types:
binary() !~ integer()
in expression:
# types_test.ex:1
<<foo>>
where "foo" was given the type binary() in:
# types_test.ex:1
is_binary(foo)
where "foo" was given the type integer() in:
# types_test.ex:1
<<foo>>
HINT: all expressions given to binaries are assumed to be of type \
integer() unless said otherwise. For example, <<expr>> assumes "expr" \
is an integer. Pass a modifier, such as <<expr::float>> or <<expr::binary>>, \
to change the default behaviour.
"""
string = warning([foo], [is_binary(foo)], <<foo::integer>>)
assert string == """
incompatible types:
binary() !~ integer()
in expression:
# types_test.ex:1
<<foo::integer()>>
where "foo" was given the type binary() in:
# types_test.ex:1
is_binary(foo)
where "foo" was given the type integer() in:
# types_test.ex:1
<<foo::integer()>>
"""
end
test "is_tuple warning" do
string = warning([foo], [is_tuple(foo)], {_} = foo)
assert string == """
incompatible types:
tuple() !~ {dynamic()}
in expression:
# types_test.ex:1
{_} = foo
where "foo" was given the type tuple() in:
# types_test.ex:1
is_tuple(foo)
where "foo" was given the type {dynamic()} in:
# types_test.ex:1
{_} = foo
HINT: use pattern matching or "is_tuple(foo) and tuple_size(foo) == 1" to guard a sized tuple.
"""
end
test "function call" do
string = warning([foo], [rem(foo, 2.0) == 0], foo)
assert string == """
expected Kernel.rem/2 to have signature:
var1, float() -> dynamic()
but it has signature:
integer(), integer() -> integer()
in expression:
# types_test.ex:1
rem(foo, 2.0)
"""
end
test "operator call" do
string = warning([foo], [foo - :bar == 0], foo)
assert string == """
expected Kernel.-/2 to have signature:
var1, :bar -> dynamic()
but it has signature:
integer(), integer() -> integer()
float(), integer() | float() -> float()
integer() | float(), float() -> float()
in expression:
# types_test.ex:1
foo - :bar
"""
end
end
describe "map warnings" do
test "handling of non-singleton types in maps" do
string =
warning(
[],
(
event = %{"type" => "order"}
%{"amount" => amount} = event
%{"user" => user} = event
%{"id" => user_id} = user
{:order, user_id, amount}
)
)
assert string == """
incompatible types:
binary() !~ map()
in expression:
# types_test.ex:5
%{"id" => user_id} = user
where "user" was given the same type as "amount" in:
# types_test.ex:4
%{"user" => user} = event
where "user" was given the type map() in:
# types_test.ex:5
%{"id" => user_id} = user
where "amount" was given the type binary() in:
# types_test.ex:3
%{"amount" => amount} = event
"""
end
test "show map() when comparing against non-map" do
string =
warning(
[foo],
(
foo.bar
:atom = foo
)
)
assert string == """
incompatible types:
map() !~ :atom
in expression:
# types_test.ex:4
:atom = foo
where "foo" was given the type map() (due to calling var.field) in:
# types_test.ex:3
foo.bar
where "foo" was given the type :atom in:
# types_test.ex:4
:atom = foo
HINT: "var.field" (without parentheses) implies "var" is a map() while \
"var.fun()" (with parentheses) implies "var" is an atom()
"""
end
test "use module as map (without parentheses)" do
string =
warning(
[foo],
(
%module{} = foo
module.__struct__
)
)
assert string == """
incompatible types:
map() !~ atom()
in expression:
# types_test.ex:4
module.__struct__
where "module" was given the type atom() in:
# types_test.ex:3
%module{}
where "module" was given the type map() (due to calling var.field) in:
# types_test.ex:4
module.__struct__
HINT: "var.field" (without parentheses) implies "var" is a map() while \
"var.fun()" (with parentheses) implies "var" is an atom()
"""
end
test "use map as module (with parentheses)" do
string = warning([foo], [is_map(foo)], foo.__struct__())
assert string == """
incompatible types:
map() !~ atom()
in expression:
# types_test.ex:1
foo.__struct__()
where "foo" was given the type map() in:
# types_test.ex:1
is_map(foo)
where "foo" was given the type atom() (due to calling var.fun()) in:
# types_test.ex:1
foo.__struct__()
HINT: "var.field" (without parentheses) implies "var" is a map() while \
"var.fun()" (with parentheses) implies "var" is an atom()
"""
end
test "non-existent map field warning" do
string =
warning(
(
map = %{foo: 1}
map.bar
)
)
assert string == """
undefined field "bar" in expression:
# types_test.ex:3
map.bar
expected one of the following fields: foo
where "map" was given the type map() in:
# types_test.ex:2
map = %{foo: 1}
"""
end
test "non-existent struct field warning" do
string =
warning(
[foo],
(
%URI{} = foo
foo.bar
)
)
assert string == """
undefined field "bar" in expression:
# types_test.ex:4
foo.bar
expected one of the following fields: __struct__, authority, fragment, host, path, port, query, scheme, userinfo
where "foo" was given the type %URI{} in:
# types_test.ex:3
%URI{} = foo
"""
end
test "expands type variables" do
string =
warning(
[%{foo: key} = event, other_key],
[is_integer(key) and is_atom(other_key)],
%{foo: ^other_key} = event
)
assert string == """
incompatible types:
%{foo: integer()} !~ %{foo: atom()}
in expression:
# types_test.ex:3
%{foo: ^other_key} = event
where "event" was given the type %{foo: integer(), optional(dynamic()) => dynamic()} in:
# types_test.ex:1
%{foo: key} = event
where "event" was given the type %{foo: atom(), optional(dynamic()) => dynamic()} in:
# types_test.ex:3
%{foo: ^other_key} = event
"""
end
test "expands map when maps are nested" do
string =
warning(
[map1, map2],
(
[_var1, _var2] = [map1, map2]
%{} = map1
%{} = map2.subkey
)
)
assert string == """
incompatible types:
%{subkey: var1, optional(dynamic()) => dynamic()} !~ %{optional(dynamic()) => dynamic()} | %{optional(dynamic()) => dynamic()}
in expression:
# types_test.ex:5
map2.subkey
where "map2" was given the type %{optional(dynamic()) => dynamic()} | %{optional(dynamic()) => dynamic()} in:
# types_test.ex:3
[_var1, _var2] = [map1, map2]
where "map2" was given the type %{subkey: var1, optional(dynamic()) => dynamic()} (due to calling var.field) in:
# types_test.ex:5
map2.subkey
HINT: "var.field" (without parentheses) implies "var" is a map() while "var.fun()" (with parentheses) implies "var" is an atom()
"""
end
end
describe "regressions" do
test "recursive map fields" do
assert warning(
[queried],
with(
true <- is_nil(queried.foo.bar),
_ = queried.foo
) do
%{foo: %{other_id: _other_id} = foo} = queried
%{other_id: id} = foo
%{id: id}
end
) == :none
end
end
end
| 25.690751 | 143 | 0.4712 |
f7d5a69c8a0302b5b5f6ac822c2b9ecef23ca166 | 7,447 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat_web/channels/client.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/channels/client.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/channels/client.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChatWeb.Client do
use UccChatWeb.RoomChannel.Constants
import UcxUccWeb.Utils
import Rebel.Query, warn: false
import Rebel.Core, except: [broadcast_js: 2, async_js: 2]
alias Rebel.Element
alias UccChatWeb.RebelChannel.Client, as: RebelClient
alias UcxUccWeb.Query
require Logger
# alias Rebel.Element
@wrapper ".messages-box .wrapper"
@wrapper_list @wrapper <> " > ul"
# defmacro __using__(_) do
# quote do
# import UcxUccWeb.Utils
# defdelegate send_js(socket, js), to: unquote(__MODULE__)
# defdelegate send_js!(socket, js), to: unquote(__MODULE__)
# defdelegate closest(socket, selector, class, attr), to: unquote(__MODULE__)
# defdelegate append(socket, selector, html), to: unquote(__MODULE__)
# defdelegate broadcast!(socket, event, bindings), to: Phoenix.Channel
# defdelegate render_to_string(view, templ, bindings), to: Phoenix.View
# defdelegate insert_html(socket, selector, position, html), to: Rebel.Element
# defdelegate query_one(socket, selector, prop), to: Rebel.Element
# defdelegate toastr!(socket, which, message), to: UccChatWeb.RebelChannel.Client
# defdelegate toastr(socket, which, message), to: UccChatWeb.RebelChannel.Client
# end
# end
def send_js(socket, js) do
exec_js socket, strip_nl(js)
end
def send_js!(socket, js) do
exec_js! socket, strip_nl(js)
end
# not sure how to do this
def closest(socket, selector, class, attr) do
exec_js! socket, """
var el = document.querySelector('#{selector}');
el = el.closest('#{class}');
if (el) {
el.getAttribute('#{attr}');
} else {
null;
}
"""
end
def append(socket, selector, html) do
Rebel.Query.insert socket, html, append: selector
end
def replace_with(socket, selector, html) do
Query.update socket, :replaceWith, set: html, on: selector
end
def html(socket, selector, html) do
Query.update socket, :html, set: html, on: selector
end
def remove_closest(socket, selector, parent, children) do
js =
~s/$('#{selector}').closest('#{parent}').find('#{children}').remove()/
# Logger.warn "remove closest js: #{inspect js}"
async_js socket, js
socket
end
def close_popup(socket) do
Query.update socket, :html, set: "", on: ".message-popup-results"
end
def has_class?(socket, selector, class) do
exec_js! socket,
"document.querySelector('#{selector}').classList.contains('#{class}')"
end
def editing_message?(socket) do
has_class?(socket, @message_box, "editing")
end
def get_message_box_value(socket) do
exec_js! socket, "document.querySelector('#{@message_box}').value;"
end
def set_message_box_focus(socket) do
async_js socket, set_message_box_focus_js()
end
def set_message_box_focus_js,
do: "var elem = document.querySelector('#{@message_box}'); elem.focus();"
def clear_message_box(socket) do
assigns = socket.assigns
socket
|> UccChatWeb.RebelChannel.Client.push_message_box(assigns.channel_id, assigns.user_id)
|> set_inputbox_buttons(false)
end
def clear_message_box_js,
do: set_message_box_focus_js() <> ~s(elem.value = "";)
def render_popup_results(html, socket) do
Query.update socket, :html, set: html, on: ".message-popup-results"
end
def get_selected_item(socket) do
case Element.query_one socket, ".popup-item.selected", :dataset do
{:ok, %{"dataset" => %{"name" => name}}} -> name
_other -> nil
end
end
def push_message({message, html}, socket) do
async_js socket, push_message_js(html, message) <>
RebelClient.scroll_bottom_js('#{@wrapper}')
end
def push_update_message({message, html}, socket) do
socket
|> Query.update(:replaceWith, set: html,
on: ~s/#{@wrapper_list} li[id="#{message.id}"]/)
|> async_js("UccChat.roomManager.updateMentionsMarksOfRoom()")
end
def push_update_reactions({message, html}, socket) do
socket
|> Query.update(:replaceWith, set: html,
on: ~s/#{@wrapper_list} li[id="#{message.id}"] ul.reactions/)
|> async_js("if (UccUtils.is_scroll_bottom(50)) { UccUtils.scroll_bottom(); }")
end
def push_message_js(html, message) do
encoded = Poison.encode! html
"""
var node = document.createRange().createContextualFragment(#{encoded});
var elem = document.querySelector('#{@wrapper_list}');
var at_bottom = UccUtils.is_scroll_bottom(30);
var user_id = '#{message.user_id}';
var id = '#{message.id}';
elem.append(node);
Rebel.set_event_handlers('[id="#{message.id}"]');
UccChat.normalize_message(id);
if (at_bottom || user_id == ucxchat.user_id) {
UccUtils.scroll_bottom();
}
UccChat.roomManager.updateMentionsMarksOfRoom();
UccChat.roomManager.new_message(id, user_id);
"""
end
def broadcast_message({message, html}, socket) do
js = push_message_js(html, message)
broadcast_js socket, js
end
def broadcast_update_message({message, html}, socket) do
broadcast_js socket, update_message_js(html, message)
end
def update_message_js(html, message) do
encoded = Poison.encode! html
"""
$('[id="#{message.id}"]').replaceWith(#{encoded});
Rebel.set_event_handlers('[id="#{message.id}"]');
UccChat.normalize_message('#{message.id}');
UccChat.roomManager.updateMentionsMarksOfRoom();
"""
end
def delete_message(message_id, socket) do
delete socket, "li.message#" <> message_id
end
def set_inputbox_buttons(socket, mode) when mode in [true, :active] do
async_js socket, """
$('.message-buttons').hide();
$('.message-buttons.send-button').show();
$('#{@message_box}').addClass('dirty');
"""
end
def set_inputbox_buttons(socket, mode) when mode in [false, nil, :empty] do
async_js socket, """
$('.message-buttons').show();
$('.message-buttons.send-button').hide();
$('#{@message_box}').removeClass('dirty');
"""
end
def desktop_notify(socket, name, body, message, duration) do
title = ~s/"Message from @#{name}"/
body = Poison.encode! body
id = inspect message.id
channel_id = inspect message.channel_id
channel_name = inspect message.channel.name
async_js socket, """
UccChat.notifier.desktop(#{title}, #{body}, {
duration: #{duration},
onclick: function(event) {
UccChat.userchan.push("notification:click",
{message_id: #{id}, name: #{name}, channel_id: #{channel_id}, channel_name: #{channel_name}});
}
});
"""
|> String.replace("\n", "")
socket
end
def notify_audio(socket, sound) do
async_js socket, ~s/UccChat.notifier.audio('#{sound}')/
socket
end
def close_flex_bar(socket) do
Query.delete socket, class: "opened", from: "#flex-tabs.opened"
# async_js socket, "$('#flex-tabs"
end
defdelegate broadcast!(socket, event, bindings), to: Phoenix.Channel
defdelegate render_to_string(view, templ, bindings), to: Phoenix.View
defdelegate insert_html(socket, selector, position, html), to: Rebel.Element
defdelegate toastr!(socket, which, message), to: UccChatWeb.RebelChannel.Client
defdelegate toastr(socket, which, message), to: UccChatWeb.RebelChannel.Client
defdelegate broadcast_js(socket, js), to: Rebel.Core
defdelegate async_js(socket, js), to: Rebel.Core
end
| 31.689362 | 106 | 0.669397 |
f7d5d9519f969ca080e7ba284579f231187eb2b9 | 12,660 | exs | Elixir | test/elixir/test/attachments_multipart_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/attachments_multipart_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/attachments_multipart_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | defmodule AttachmentMultipartTest do
use CouchTestCase
@moduletag :attachments
@moduledoc """
Test CouchDB attachment multipart requests
This is a port of the attachments_multipart.js suite
"""
@tag :with_db
test "manages attachments multipart requests successfully", context do
db_name = context[:db_name]
document = """
{
"body": "This is a body.",
"_attachments": {
"foo.txt": {
"follows": true,
"content_type": "application/test",
"length": 21
},
"bar.txt": {
"follows": true,
"content_type": "application/test",
"length": 20
},
"baz.txt": {
"follows": true,
"content_type": "text/plain",
"length": 19
}
}
}
"""
multipart_data =
"--abc123\r\n" <>
"content-type: application/json\r\n" <>
"\r\n" <>
document <>
"\r\n--abc123\r\n" <>
"\r\n" <>
"this is 21 chars long" <>
"\r\n--abc123\r\n" <>
"\r\n" <>
"this is 20 chars lon" <>
"\r\n--abc123\r\n" <> "\r\n" <> "this is 19 chars lo" <> "\r\n--abc123--epilogue"
resp =
Couch.put(
"/#{db_name}/multipart",
body: multipart_data,
headers: ["Content-Type": "multipart/related;boundary=\"abc123\""]
)
assert resp.status_code in [201, 202]
assert resp.body["ok"] == true
resp = Couch.get("/#{db_name}/multipart/foo.txt")
assert resp.body == "this is 21 chars long"
resp = Couch.get("/#{db_name}/multipart/bar.txt")
assert resp.body == "this is 20 chars lon"
resp = Couch.get("/#{db_name}/multipart/baz.txt")
assert resp.body == "this is 19 chars lo"
doc = Couch.get("/#{db_name}/multipart", query: %{att_encoding_info: true})
first_rev = doc.body["_rev"]
assert doc.body["_attachments"]["foo.txt"]["stub"] == true
assert doc.body["_attachments"]["bar.txt"]["stub"] == true
assert doc.body["_attachments"]["baz.txt"]["stub"] == true
assert Map.has_key?(doc.body["_attachments"]["foo.txt"], "encoding") == false
assert Map.has_key?(doc.body["_attachments"]["bar.txt"], "encoding") == false
assert doc.body["_attachments"]["baz.txt"]["encoding"] == "gzip"
document_updated = """
{
"_rev": "#{first_rev}",
"body": "This is a body.",
"_attachments": {
"foo.txt": {
"stub": true,
"content_type": "application/test"
},
"bar.txt": {
"follows": true,
"content_type": "application/test",
"length": 18
}
}
}
"""
multipart_data_updated =
"--abc123\r\n" <>
"content-type: application/json\r\n" <>
"\r\n" <>
document_updated <>
"\r\n--abc123\r\n" <> "\r\n" <> "this is 18 chars l" <> "\r\n--abc123--"
resp =
Couch.put(
"/#{db_name}/multipart",
body: multipart_data_updated,
headers: ["Content-Type": "multipart/related;boundary=\"abc123\""]
)
assert resp.status_code in [201, 202]
resp = Couch.get("/#{db_name}/multipart/bar.txt")
assert resp.body == "this is 18 chars l"
resp = Couch.get("/#{db_name}/multipart/baz.txt")
assert resp.status_code == 404
resp =
Couch.get(
"/#{db_name}/multipart",
query: %{:attachments => true},
headers: [accept: "multipart/related,*/*;"]
)
assert resp.status_code == 200
assert resp.headers["Content-length"] == "790"
# parse out the multipart
sections = parse_multipart(resp)
assert length(sections) == 3
# The first section is the json doc. Check it's content-type.
# Each part carries their own meta data.
assert Enum.at(sections, 0).headers["Content-Type"] == "application/json"
assert Enum.at(sections, 1).headers["Content-Type"] == "application/test"
assert Enum.at(sections, 2).headers["Content-Type"] == "application/test"
assert Enum.at(sections, 1).headers["Content-Length"] == "21"
assert Enum.at(sections, 2).headers["Content-Length"] == "18"
assert Enum.at(sections, 1).headers["Content-Disposition"] ==
~s(attachment; filename="foo.txt")
assert Enum.at(sections, 2).headers["Content-Disposition"] ==
~s(attachment; filename="bar.txt")
doc = :jiffy.decode(Enum.at(sections, 0).body, [:return_maps])
assert doc["_attachments"]["foo.txt"]["follows"] == true
assert doc["_attachments"]["bar.txt"]["follows"] == true
assert Enum.at(sections, 1).body == "this is 21 chars long"
assert Enum.at(sections, 2).body == "this is 18 chars l"
# now get attachments incrementally (only the attachments changes since
# a certain rev).
resp =
Couch.get(
"/#{db_name}/multipart",
query: %{:atts_since => ~s(["#{first_rev}"])},
headers: [accept: "multipart/related,*/*;"]
)
assert resp.status_code == 200
sections = parse_multipart(resp)
assert length(sections) == 2
doc = :jiffy.decode(Enum.at(sections, 0).body, [:return_maps])
assert doc["_attachments"]["foo.txt"]["stub"] == true
assert doc["_attachments"]["bar.txt"]["follows"] == true
assert Enum.at(sections, 1).body == "this is 18 chars l"
# try the atts_since parameter together with the open_revs parameter
resp =
Couch.get(
"/#{db_name}/multipart",
query: %{
:open_revs => ~s(["#{doc["_rev"]}"]),
:atts_since => ~s(["#{first_rev}"])
},
headers: [accept: "multipart/related,*/*;"]
)
assert resp.status_code == 200
sections = parse_multipart(resp)
# 1 section, with a multipart/related Content-Type
assert length(sections) == 1
ctype_value = Enum.at(sections, 0).headers["Content-Type"]
assert String.starts_with?(ctype_value, "multipart/related;") == true
inner_sections = parse_multipart(Enum.at(sections, 0))
# 2 inner sections: a document body section plus an attachment data section
assert length(inner_sections) == 3
assert Enum.at(inner_sections, 0).headers["Content-Type"] == "application/json"
doc = :jiffy.decode(Enum.at(inner_sections, 0).body, [:return_maps])
assert doc["_attachments"]["foo.txt"]["follows"] == true
assert doc["_attachments"]["bar.txt"]["follows"] == true
assert Enum.at(inner_sections, 1).body == "this is 21 chars long"
assert Enum.at(inner_sections, 2).body == "this is 18 chars l"
# try it with a rev that doesn't exist (should get all attachments)
resp =
Couch.get(
"/#{db_name}/multipart",
query: %{
:atts_since => ~s(["1-2897589","#{first_rev}"])
},
headers: [accept: "multipart/related,*/*;"]
)
assert resp.status_code == 200
sections = parse_multipart(resp)
assert length(sections) == 2
doc = :jiffy.decode(Enum.at(sections, 0).body, [:return_maps])
assert doc["_attachments"]["foo.txt"]["stub"] == true
assert doc["_attachments"]["bar.txt"]["follows"] == true
assert Enum.at(sections, 1).body == "this is 18 chars l"
end
@tag :with_db
test "manages compressed attachments successfully", context do
db_name = context[:db_name]
# check that with the document multipart/mixed API it's possible to receive
# attachments in compressed form (if they're stored in compressed form)
server_config = [
%{
:section => "attachments",
:key => "compression_level",
:value => "8"
},
%{
:section => "attachments",
:key => "compressible_types",
:value => "text/plain"
}
]
run_on_modified_server(
server_config,
fn -> test_multipart_att_compression(db_name) end
)
end
defp test_multipart_att_compression(dbname) do
doc = %{
"_id" => "foobar"
}
lorem = Couch.get("/_utils/script/test/lorem.txt").body
hello_data = "hello world"
{_, resp} = create_doc(dbname, doc)
first_rev = resp.body["rev"]
resp =
Couch.put(
"/#{dbname}/#{doc["_id"]}/data.bin",
query: %{:rev => first_rev},
body: hello_data,
headers: ["Content-Type": "application/binary"]
)
assert resp.status_code in [201, 202]
second_rev = resp.body["rev"]
resp =
Couch.put(
"/#{dbname}/#{doc["_id"]}/lorem.txt",
query: %{:rev => second_rev},
body: lorem,
headers: ["Content-Type": "text/plain"]
)
assert resp.status_code in [201, 202]
third_rev = resp.body["rev"]
resp =
Couch.get(
"/#{dbname}/#{doc["_id"]}",
query: %{:open_revs => ~s(["#{third_rev}"])},
headers: [Accept: "multipart/mixed", "X-CouchDB-Send-Encoded-Atts": "true"]
)
assert resp.status_code == 200
sections = parse_multipart(resp)
# 1 section, with a multipart/related Content-Type
assert length(sections) == 1
ctype_value = Enum.at(sections, 0).headers["Content-Type"]
assert String.starts_with?(ctype_value, "multipart/related;") == true
inner_sections = parse_multipart(Enum.at(sections, 0))
# 3 inner sections: a document body section plus 2 attachment data sections
assert length(inner_sections) == 3
assert Enum.at(inner_sections, 0).headers["Content-Type"] == "application/json"
doc = :jiffy.decode(Enum.at(inner_sections, 0).body, [:return_maps])
assert doc["_attachments"]["lorem.txt"]["follows"] == true
assert doc["_attachments"]["lorem.txt"]["encoding"] == "gzip"
assert doc["_attachments"]["data.bin"]["follows"] == true
assert doc["_attachments"]["data.bin"]["encoding"] != "gzip"
if Enum.at(inner_sections, 1).body == hello_data do
assert Enum.at(inner_sections, 2).body != lorem
else
if assert Enum.at(inner_sections, 2).body == hello_data do
assert Enum.at(inner_sections, 1).body != lorem
else
assert false, "Could not found data.bin attachment data"
end
end
# now test that it works together with the atts_since parameter
resp =
Couch.get(
"/#{dbname}/#{doc["_id"]}",
query: %{:open_revs => ~s(["#{third_rev}"]), :atts_since => ~s(["#{second_rev}"])},
headers: [Accept: "multipart/mixed", "X-CouchDB-Send-Encoded-Atts": "true"]
)
assert resp.status_code == 200
sections = parse_multipart(resp)
# 1 section, with a multipart/related Content-Type
assert length(sections) == 1
ctype_value = Enum.at(sections, 0).headers["Content-Type"]
assert String.starts_with?(ctype_value, "multipart/related;") == true
inner_sections = parse_multipart(Enum.at(sections, 0))
# 3 inner sections: a document body section plus 2 attachment data sections
assert length(inner_sections) == 3
assert Enum.at(inner_sections, 0).headers["Content-Type"] == "application/json"
doc = :jiffy.decode(Enum.at(inner_sections, 0).body, [:return_maps])
assert doc["_attachments"]["lorem.txt"]["follows"] == true
assert doc["_attachments"]["lorem.txt"]["encoding"] == "gzip"
assert Enum.at(inner_sections, 1).body != lorem
end
def get_boundary(response) do
ctype = response.headers["Content-Type"]
ctype_args = String.split(ctype, "; ")
ctype_args = Enum.slice(ctype_args, 1, length(ctype_args))
boundary_arg =
Enum.find(
ctype_args,
fn arg -> String.starts_with?(arg, "boundary=") end
)
boundary = Enum.at(String.split(boundary_arg, "="), 1)
if String.starts_with?(boundary, ~s(")) do
:jiffy.decode(boundary)
else
boundary
end
end
def parse_multipart(response) do
boundary = get_boundary(response)
leading = "--#{boundary}\r\n"
last = "\r\n--#{boundary}--"
body = response.body
mimetext = Enum.at(String.split(body, leading, parts: 2), 1)
mimetext = Enum.at(String.split(mimetext, last, parts: 2), 0)
sections = String.split(mimetext, ~s(\r\n--#{boundary}))
Enum.map(sections, fn section ->
section_parts = String.split(section, "\r\n\r\n", parts: 2)
raw_headers = String.split(Enum.at(section_parts, 0), "\r\n")
body = Enum.at(section_parts, 1)
headers =
Enum.reduce(raw_headers, %{}, fn raw_header, acc ->
if raw_header != "" do
header_parts = String.split(raw_header, ": ")
Map.put(acc, Enum.at(header_parts, 0), Enum.at(header_parts, 1))
else
acc
end
end)
%{
:headers => headers,
:body => body
}
end)
end
end
| 30.878049 | 91 | 0.595103 |
f7d5e0ff87a1c1fdf0b26ed90d3ced6651a4a135 | 4,857 | ex | Elixir | lib/rihanna/migration/upgrade.ex | jonnystoten/rihanna | f0c2709f93d9fb1b68100e1722b31f8674c0a9f5 | [
"MIT"
] | null | null | null | lib/rihanna/migration/upgrade.ex | jonnystoten/rihanna | f0c2709f93d9fb1b68100e1722b31f8674c0a9f5 | [
"MIT"
] | null | null | null | lib/rihanna/migration/upgrade.ex | jonnystoten/rihanna | f0c2709f93d9fb1b68100e1722b31f8674c0a9f5 | [
"MIT"
] | null | null | null | defmodule Rihanna.Migration.Upgrade do
@moduledoc """
A set of tools for upgrading an existing Rihanna jobs table.
Rihanna stores jobs in a table in your database. The default table name is
"rihanna_jobs". The name is configurable by either passing it as an argument
to the functions below or setting `:jobs_table_name` in Rihanna's config.
#### Using Ecto
The easiest way to upgrade the database is with Ecto.
Run `mix ecto.gen.migration upgrade_rihanna_jobs` and make your migration look
like this:
```elixir
defmodule MyApp.UpgradeRihannaJobs do
use Rihanna.Migration.Upgrade
end
```
Now you can run `mix ecto.migrate`.
#### Without Ecto
Ecto is not required to run Rihanna. If you want to upgrade the table yourself,
without Ecto, take a look at either `statements/0` or `sql/0`.
"""
alias Rihanna.Migration.Upgrade
defmacro __using__(opts) do
table_name = Keyword.get(opts, :table_name, Rihanna.Config.jobs_table_name()) |> to_string
quote do
use Ecto.Migration
def up do
Enum.each(Upgrade.statements(unquote(table_name)), &execute/1)
end
def down do
Enum.each(Upgrade.drop_statements(unquote(table_name)), &execute/1)
end
end
end
@doc """
Returns a list of SQL statements that will rollback the upgrade of Rihanna jobs table if
executed sequentially.
By default it takes the name of the table from the application config.
You may optionally supply a table name as an argument if you want to override
this.
## Examples
> Rihanna.Migration.Upgrade.drop_statements
[...]
> Rihanna.Migration.Upgrade.drop_statements("my_alternative_table_name")
[...]
"""
@spec drop_statements() :: list[String.t()]
@spec drop_statements(String.t() | atom) :: list[String.t()]
def drop_statements(table_name \\ Rihanna.Config.jobs_table_name()) do
[
"""
ALTER TABLE #{table_name} DROP COLUMN due_at;
""",
"""
ALTER TABLE #{table_name} DROP COLUMN rihanna_internal_meta;
""",
"""
ALTER TABLE #{table_name} DROP COLUMN priority;
""",
"""
DO $$
BEGIN
DROP INDEX IF EXISTS rihanna_jobs_priority_enqueued_at_id;
DROP INDEX IF EXISTS #{table_name}_locking_index;
CREATE INDEX IF NOT EXISTS #{table_name}_locking_index ON #{table_name} (priority ASC, due_at ASC, enqueued_at ASC, id ASC);
END;
$$
"""
]
end
@doc """
Returns a list of SQL statements that will upgrade the Rihanna jobs table if
executed sequentially.
By default it takes the name of the table from the application config.
You may optionally supply a table name as an argument if you want to override
this.
## Examples
> Rihanna.Migration.Upgrade.statements
[...]
> Rihanna.Migration.Upgrade.statements("my_alternative_table_name")
[...]
"""
@spec statements() :: list[String.t()]
@spec statements(String.t() | atom) :: list[String.t()]
def statements(table_name \\ Rihanna.Config.jobs_table_name())
when is_binary(table_name) or is_atom(table_name) do
[
# Postgres versions earlier than v9.6 do not suppport `IF EXISTS` predicates
# on alter table commands. For backwards compatibility we're using a try/catch
# approach to add the `due_at` column idempotently.
"""
DO $$
BEGIN
BEGIN
ALTER TABLE #{table_name} ADD COLUMN due_at timestamp with time zone;
ALTER TABLE #{table_name} ADD COLUMN rihanna_internal_meta jsonb NOT NULL DEFAULT '{}';
EXCEPTION
WHEN duplicate_column THEN
RAISE NOTICE 'column already exists in #{table_name}.';
END;
END;
$$
""",
"""
DO $$
BEGIN
ALTER TABLE #{table_name} ADD COLUMN priority integer NOT NULL DEFAULT 50;
EXCEPTION
WHEN duplicate_column THEN
RAISE NOTICE 'column already exists in #{table_name}.';
END;
$$
""",
"""
DO $$
BEGIN
DROP INDEX IF EXISTS #{table_name}_enqueued_at_id;
DROP INDEX IF EXISTS #{table_name}_priority_enqueued_at_id;
DROP INDEX IF EXISTS #{table_name}_locking_index;
CREATE INDEX IF NOT EXISTS #{table_name}_locking_index ON #{table_name} (priority ASC, due_at ASC, enqueued_at ASC, id ASC);
END;
$$
"""
]
end
@doc """
Returns a string of semi-colon-terminated SQL statements that you can execute
directly to upgrade the Rihanna jobs table.
"""
@spec sql(String.t() | atom) :: String.t()
def sql(table_name \\ Rihanna.Config.jobs_table_name()) do
Enum.join(statements(table_name), "\n")
end
end
| 30.167702 | 138 | 0.643195 |
f7d5eac6b218ee065b96cf1d315f458d932742a2 | 768 | ex | Elixir | clients/elixir/generated/lib/swaggy_jenkins/model/favorite_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 23 | 2017-08-01T12:25:26.000Z | 2022-01-25T03:44:11.000Z | clients/elixir/generated/lib/swaggy_jenkins/model/favorite_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 35 | 2017-06-14T03:28:15.000Z | 2022-02-14T10:25:54.000Z | clients/elixir/generated/lib/swaggy_jenkins/model/favorite_impl.ex | PankTrue/swaggy-jenkins | aca35a7cca6e1fcc08bd399e05148942ac2f514b | [
"MIT"
] | 11 | 2017-08-31T19:00:20.000Z | 2021-12-19T12:04:12.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule SwaggyJenkins.Model.FavoriteImpl do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"_class",
:"_links",
:"item"
]
@type t :: %__MODULE__{
:"_class" => String.t,
:"_links" => FavoriteImpllinks,
:"item" => PipelineImpl
}
end
defimpl Poison.Decoder, for: SwaggyJenkins.Model.FavoriteImpl do
import SwaggyJenkins.Deserializer
def decode(value, options) do
value
|> deserialize(:"_links", :struct, SwaggyJenkins.Model.FavoriteImpllinks, options)
|> deserialize(:"item", :struct, SwaggyJenkins.Model.PipelineImpl, options)
end
end
| 23.272727 | 91 | 0.6875 |
f7d5f6ee749858d80aaca4dbb5416779d5b2f3a7 | 1,078 | exs | Elixir | deps/phoenix_html/mix.exs | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | 1 | 2021-11-17T10:24:48.000Z | 2021-11-17T10:24:48.000Z | deps/phoenix_html/mix.exs | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | null | null | null | deps/phoenix_html/mix.exs | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | null | null | null | defmodule PhoenixHtml.Mixfile do
use Mix.Project
# Also change package.json version
@version "3.0.4"
def project do
[
app: :phoenix_html,
version: @version,
elixir: "~> 1.7",
deps: deps(),
name: "Phoenix.HTML",
description: "Phoenix view functions for working with HTML templates",
package: package(),
docs: [
source_ref: "v#{@version}",
main: "Phoenix.HTML",
source_url: "https://github.com/phoenixframework/phoenix_html"
]
]
end
def application do
[
extra_applications: [:eex, :logger],
env: [csrf_token_reader: {Plug.CSRFProtection, :get_csrf_token_for, []}]
]
end
defp deps do
[
{:plug, "~> 1.5", optional: true},
{:ex_doc, ">= 0.0.0", only: :docs}
]
end
defp package do
[
maintainers: ["Chris McCord", "José Valim"],
licenses: ["MIT"],
links: %{github: "https://github.com/phoenixframework/phoenix_html"},
files: ~w(lib priv CHANGELOG.md LICENSE mix.exs package.json README.md)
]
end
end
| 22.93617 | 78 | 0.589981 |
f7d5f797b40822383174c3732ee22f9036503dcd | 749 | exs | Elixir | test/state_test.exs | lee-dohm/git-delete-safe | b8d161bbdc64f0b735265db337eb878cb36e552e | [
"MIT"
] | 2 | 2019-07-06T02:41:05.000Z | 2020-01-26T17:54:16.000Z | test/state_test.exs | lee-dohm/git-delete-safe | b8d161bbdc64f0b735265db337eb878cb36e552e | [
"MIT"
] | null | null | null | test/state_test.exs | lee-dohm/git-delete-safe | b8d161bbdc64f0b735265db337eb878cb36e552e | [
"MIT"
] | null | null | null | defmodule GitDeleteSafe.StateTest do
use ExUnit.Case, async: true
alias GitDeleteSafe.State
def options(options \\ [], arguments \\ [], invalid \\ []) do
{options, arguments, invalid}
end
test "creates a state struct from an OptionParser tuple" do
state = State.new(options())
assert state.arguments == []
assert state.invalid_options == []
assert state.options == %{}
end
test "converts the options into a map so they can be pattern-matched" do
state = State.new(options(foo: "bar"))
assert state.options == %{foo: "bar"}
end
test "concatenates multiple option keys into a list" do
state = State.new(options(foo: "bar", foo: "baz"))
assert state.options.foo == ["baz", "bar"]
end
end
| 24.966667 | 74 | 0.659546 |
f7d60fe0028aae023d35c9584ee4b589c0d39017 | 730 | ex | Elixir | examples/phoenix_basic_example/lib/dripper_example_web/gettext.ex | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | 1 | 2018-06-28T11:15:14.000Z | 2018-06-28T11:15:14.000Z | examples/phoenix_basic_example/lib/dripper_example_web/gettext.ex | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | null | null | null | examples/phoenix_basic_example/lib/dripper_example_web/gettext.ex | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | null | null | null | defmodule DripperExampleWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import DripperExampleWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :dripper_example
end
| 29.2 | 72 | 0.690411 |
f7d62f5382aeb04f61c6f66d9bc5f3ac0325541e | 705 | exs | Elixir | test/test_helper.exs | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 1 | 2018-04-26T08:40:13.000Z | 2018-04-26T08:40:13.000Z | test/test_helper.exs | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 14 | 2018-04-02T09:12:03.000Z | 2019-10-08T04:43:39.000Z | test/test_helper.exs | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 2 | 2018-04-26T05:50:06.000Z | 2021-09-01T18:48:43.000Z | # Copyright(c) 2015-2021 ACCESS CO., LTD. All rights reserved.
Antikythera.Test.Config.init()
Antikythera.Test.GearConfigHelper.set_config(%{"BASIC_AUTHENTICATION_ID" => "admin", "BASIC_AUTHENTICATION_PW" => "password"})
defmodule Req do
use Antikythera.Test.HttpClient
end
defmodule Socket do
use Antikythera.Test.WebsocketClient
end
defmodule Cookie do
def response_to_request_cookie(res) do
cookie_header_value = Enum.map_join(res.cookies, "; ", fn {name, cookie} -> "#{name}=#{cookie.value}" end)
%{"cookie" => cookie_header_value}
end
def valid?(res, name) do
Map.has_key?(res.cookies, name)
end
def expired?(res, name) do
res.cookies[name].max_age == 0
end
end
| 25.178571 | 126 | 0.723404 |
f7d632a1c86a12975da7d02e8c5ad6772adc83f2 | 2,475 | ex | Elixir | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/endpoint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/endpoint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/endpoint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RealTimeBidding.V1.Model.Endpoint do
@moduledoc """
Bidder endpoint that receives bid requests.
## Attributes
* `bidProtocol` (*type:* `String.t`, *default:* `nil`) - The protocol that the bidder endpoint is using.
* `maximumQps` (*type:* `String.t`, *default:* `nil`) - The maximum number of queries per second allowed to be sent to this server.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. Name of the endpoint resource that must follow the pattern `bidders/{bidderAccountId}/endpoints/{endpointId}`, where {bidderAccountId} is the account ID of the bidder who operates this endpoint, and {endpointId} is a unique ID assigned by the server.
* `tradingLocation` (*type:* `String.t`, *default:* `nil`) - The trading location that bid requests should be sent from. See https://developers.google.com/authorized-buyers/rtb/peer-guide#trading-locations for further information.
* `url` (*type:* `String.t`, *default:* `nil`) - Output only. The URL that bid requests should be sent to.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bidProtocol => String.t() | nil,
:maximumQps => String.t() | nil,
:name => String.t() | nil,
:tradingLocation => String.t() | nil,
:url => String.t() | nil
}
field(:bidProtocol)
field(:maximumQps)
field(:name)
field(:tradingLocation)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.RealTimeBidding.V1.Model.Endpoint do
def decode(value, options) do
GoogleApi.RealTimeBidding.V1.Model.Endpoint.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.RealTimeBidding.V1.Model.Endpoint do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.949153 | 317 | 0.709899 |
f7d6533e232b64ee0411868a00cdd1a338163cd0 | 474 | ex | Elixir | lib/central/communication/schemas/chat_response.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/central/communication/schemas/chat_response.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/central/communication/schemas/chat_response.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Central.Communication.ChatResponse do
@moduledoc false
use CentralWeb, :schema
schema "communication_chat_responses" do
field :content, :string
belongs_to :user, Central.Account.User
belongs_to :chat_room, Central.Communication.ChatRoom
timestamps()
end
@doc false
def changeset(chat, attrs) do
chat
|> cast(attrs, [:content, :user_id, :chat_room_id])
|> validate_required([:content, :user_id, :chat_room_id])
end
end
| 22.571429 | 61 | 0.719409 |
f7d662dc1663bd9d163317eb8092968627a04e74 | 383 | exs | Elixir | test/ex_doc/markdown/earmark_test.exs | k1complete/ex_doc | 512dcabef778f9af675dbdfc44fa2bef2b8924f0 | [
"Apache-2.0"
] | null | null | null | test/ex_doc/markdown/earmark_test.exs | k1complete/ex_doc | 512dcabef778f9af675dbdfc44fa2bef2b8924f0 | [
"Apache-2.0"
] | null | null | null | test/ex_doc/markdown/earmark_test.exs | k1complete/ex_doc | 512dcabef778f9af675dbdfc44fa2bef2b8924f0 | [
"Apache-2.0"
] | null | null | null | defmodule MarkdownTest.EarmarkTest do
use ExUnit.Case, async: true
alias ExDoc.Markdown.Earmark, as: Markdown
@moduletag :earmark
test "to_html generate the HTML from the markdown" do
assert Markdown.to_html("# Test\n\nHello") ==
~s(<h1>Test</h1>\n<p>Hello</p>)
end
test "to_html handles empty input" do
assert Markdown.to_html("") == ""
end
end
| 22.529412 | 55 | 0.67624 |
f7d6e6be87dd4e6e21e92a07ce383849617c30a7 | 1,830 | ex | Elixir | clients/games_management/lib/google_api/games_management/v1management/model/player_name.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/games_management/lib/google_api/games_management/v1management/model/player_name.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/games_management/lib/google_api/games_management/v1management/model/player_name.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.GamesManagement.V1management.Model.PlayerName do
@moduledoc """
An object representation of the individual components of the player's name. For some players, these fields may not be present.
## Attributes
- familyName (String.t): The family name of this player. In some places, this is known as the last name. Defaults to: `null`.
- givenName (String.t): The given name of this player. In some places, this is known as the first name. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:familyName => any(),
:givenName => any()
}
field(:familyName)
field(:givenName)
end
defimpl Poison.Decoder, for: GoogleApi.GamesManagement.V1management.Model.PlayerName do
def decode(value, options) do
GoogleApi.GamesManagement.V1management.Model.PlayerName.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GamesManagement.V1management.Model.PlayerName do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.882353 | 132 | 0.744809 |
f7d6f29ff85d99e76938fd2202ff7bab4e1ef4e5 | 1,899 | ex | Elixir | apps/eth_blockchain/lib/eth_blockchain/balance.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/eth_blockchain/lib/eth_blockchain/balance.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/eth_blockchain/lib/eth_blockchain/balance.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EthBlockchain.Balance do
@moduledoc false
import Utils.Helpers.Encoding
alias EthBlockchain.{Adapter, ABIEncoder}
@doc """
Retrieve the balance of all given `contract_addresses` for the provided wallet `address`.
Ether is represented with `0x0000000000000000000000000000000000000000` as contract address.
Any other given contract address will have their balance retrived on the corresponding
smart contract.
Returns a tuple of
```
{
:ok,
%{
"contract_address_1" => integer_balance_1,
"contract_address_2" => integer_balance_2
}
}
```
if successful or {:error, error_code} if failed.
"""
def get(attrs, adapter \\ nil, pid \\ nil)
def get(%{block: _} = attrs, adapter, pid) do
do_get(attrs, adapter, pid)
end
def get(attrs, adapter, pid) do
attrs
|> Map.put(:block, "latest")
|> do_get(adapter, pid)
end
defp do_get(
%{address: address, contract_addresses: contract_addresses, block: block},
adapter,
pid
) do
case ABIEncoder.balance_of(address) do
{:ok, encoded_abi_data} ->
Adapter.call(
{:get_balances, address, contract_addresses, to_hex(encoded_abi_data), block},
adapter,
pid
)
error ->
error
end
end
end
| 27.521739 | 93 | 0.682991 |
f7d710c5d61d0f1031e9d05b0a2bf2fc238633cd | 1,264 | ex | Elixir | lib/idea_portal/recaptcha/mock.ex | BaltimoreCity/IdeaPortal | dc1c775dfaec2aac974b821cd3700d76770c1e76 | [
"MIT"
] | 5 | 2019-08-29T20:22:25.000Z | 2020-04-01T17:40:48.000Z | lib/idea_portal/recaptcha/mock.ex | BaltimoreCity/IdeaPortal | dc1c775dfaec2aac974b821cd3700d76770c1e76 | [
"MIT"
] | 34 | 2019-03-06T17:53:29.000Z | 2021-09-01T01:25:23.000Z | lib/idea_portal/recaptcha/mock.ex | BaltimoreCity/IdeaPortal | dc1c775dfaec2aac974b821cd3700d76770c1e76 | [
"MIT"
] | 2 | 2020-01-10T22:12:36.000Z | 2021-01-22T04:37:45.000Z | defmodule IdeaPortal.Recaptcha.Mock do
@moduledoc """
Mock implementation details for Recaptcha
"""
@behaviour IdeaPortal.Recaptcha
alias __MODULE__.FakeCaptcha
@doc false
def start_mock() do
{:ok, pid} = FakeCaptcha.start_link()
Process.put(:recpatcha, pid)
end
def set_valid_token_response(is_valid) do
start_mock()
pid = Process.get(:recpatcha)
FakeCaptcha.set_valid_token(pid, is_valid)
end
@impl true
def valid_token?(_token) do
case Process.get(:recpatcha) do
nil ->
true
pid ->
FakeCaptcha.valid_token?(pid)
end
end
defmodule FakeCaptcha do
@moduledoc false
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, [])
end
def set_valid_token(pid, is_valid) do
GenServer.call(pid, {:set_valid, is_valid})
end
def valid_token?(pid) do
GenServer.call(pid, :valid_token?)
end
def init(_) do
{:ok, %{valid_token?: false}}
end
def handle_call({:set_valid, is_valid}, _from, state) do
state = Map.put(state, :valid_token?, is_valid)
{:reply, :ok, state}
end
def handle_call(:valid_token?, _from, state) do
{:reply, state.valid_token?, state}
end
end
end
| 19.446154 | 60 | 0.648734 |
f7d73ed81630f2d2da8250cd7c944ee05d7eb97d | 76 | exs | Elixir | test/rumbl_web/views/page_view_test.exs | brunorafa/rumbl | 910e6ecfaae8da8e54da9e67871a02885c2f383f | [
"MIT"
] | 1 | 2021-05-30T20:57:51.000Z | 2021-05-30T20:57:51.000Z | test/rumbl_web/views/page_view_test.exs | brunorafa/rumbl | 910e6ecfaae8da8e54da9e67871a02885c2f383f | [
"MIT"
] | 2 | 2021-03-09T19:04:16.000Z | 2021-05-10T16:20:10.000Z | test/rumbl_web/views/page_view_test.exs | brunorafa/rumbl | 910e6ecfaae8da8e54da9e67871a02885c2f383f | [
"MIT"
] | 1 | 2020-07-17T14:48:52.000Z | 2020-07-17T14:48:52.000Z | defmodule RumblWeb.PageViewTest do
use RumblWeb.ConnCase, async: true
end
| 19 | 36 | 0.815789 |
f7d74f1d9bcd72e9ead1ce5aec3fa202d27f4bc5 | 568 | ex | Elixir | lib/swedbankpay_checkout/model/shipping_address.ex | hooplab/swedbank-pay-checkout-elixir | 91c9034d898907d15c513623519382a0648212c3 | [
"MIT"
] | null | null | null | lib/swedbankpay_checkout/model/shipping_address.ex | hooplab/swedbank-pay-checkout-elixir | 91c9034d898907d15c513623519382a0648212c3 | [
"MIT"
] | null | null | null | lib/swedbankpay_checkout/model/shipping_address.ex | hooplab/swedbank-pay-checkout-elixir | 91c9034d898907d15c513623519382a0648212c3 | [
"MIT"
] | null | null | null | defmodule SwedbankpayCheckout.Model.ShippingAddress do
@moduledoc """
"""
@typedoc """
.shipping_address
"""
@type t :: %__MODULE__{
:addressee => String.t(),
:co_address => String.t(),
:street_address => String.t(),
:zip_code => String.t(),
:city => String.t(),
:country_code => String.t()
}
@derive Poison.Encoder
defstruct [
:addressee,
:co_address,
:street_address,
:zip_code,
:city,
:country_code
]
@doc false
def shell(), do: %__MODULE__{}
end
| 19.586207 | 54 | 0.549296 |
f7d75dbf74d4169afce735854c1f1a0349837569 | 75 | exs | Elixir | test/test_helper.exs | hiivemarkets/persona-elixir | 58a03859a4a0f64c973b9df0394bc5335d5c7096 | [
"MIT"
] | null | null | null | test/test_helper.exs | hiivemarkets/persona-elixir | 58a03859a4a0f64c973b9df0394bc5335d5c7096 | [
"MIT"
] | null | null | null | test/test_helper.exs | hiivemarkets/persona-elixir | 58a03859a4a0f64c973b9df0394bc5335d5c7096 | [
"MIT"
] | null | null | null | ExUnit.start()
Mox.defmock(
Pandadoc.Tesla.Mock,
for: Tesla.Adapter
)
| 10.714286 | 22 | 0.706667 |
f7d770f294c79a1392e60d8cfcc70c476e49aed6 | 821 | exs | Elixir | test/remote_checkout_test.exs | sxarp/remote-checkout | 8bafe01458d92441ea3dd5e701f151a361fa4f27 | [
"MIT"
] | null | null | null | test/remote_checkout_test.exs | sxarp/remote-checkout | 8bafe01458d92441ea3dd5e701f151a361fa4f27 | [
"MIT"
] | null | null | null | test/remote_checkout_test.exs | sxarp/remote-checkout | 8bafe01458d92441ea3dd5e701f151a361fa4f27 | [
"MIT"
] | null | null | null | defmodule RemoteCheckoutTest do
use ExUnit.Case
doctest RemoteCheckout
alias RemoteCheckout, as: RC
alias TreeStorage, as: TS
@expand :expand
@root [
%{"name" => "LICENSE", "oid" => "82e", "type" => "blob"},
%{"name" => "README.md", "oid" => "f48", "type" => "blob"},
%{"name" => "lib", "oid" => "3b8", "type" => "tree"},
%{"name" => "test", "oid" => "0f5", "type" => "tree"}]
test "to_tree" do
assert [%{"name" => "LICENSE", "oid" => "82e", "type" => "blob"},
%{"name" => "lib", "oid" => "3b8", "type" => "tree"}]
|> RC.to_tree() ==
[TS.leaf("LICENSE", "82e"), TS.tree("lib", [TS.leaf(@expand, "3b8")])]
end
test "find and get" do
root = RC.to_tree(@root)
assert RC.find_expand(root) == ["lib"]
assert RC.get_expand(root, ["lib"]) == "3b8"
end
end
| 30.407407 | 74 | 0.518879 |
f7d7742895f6785d9ab7b6be6ebbeee5c2b9401f | 2,238 | exs | Elixir | config/dev.exs | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | config/dev.exs | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | config/dev.exs | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :employee_reward_app, EmployeeRewardApp.Repo,
username: "postgres",
password: "postgres",
database: "employee_reward_app_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :employee_reward_app, EmployeeRewardAppWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :employee_reward_app, EmployeeRewardAppWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/employee_reward_app_web/(live|views)/.*(ex)$",
~r"lib/employee_reward_app_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 29.064935 | 68 | 0.704647 |
f7d7a565fc91dd2ea9183b7f0f4ce25415f52a0d | 348 | ex | Elixir | lib/meeseeks/selector/combinator/parent.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 291 | 2017-03-27T15:53:36.000Z | 2022-03-14T23:01:42.000Z | lib/meeseeks/selector/combinator/parent.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 70 | 2017-03-30T23:32:34.000Z | 2021-06-27T06:26:28.000Z | lib/meeseeks/selector/combinator/parent.ex | RichMorin/meeseeks | d52a15a0b78acfc4d7b979d1df6e146482dc3a10 | [
"Apache-2.0",
"MIT"
] | 23 | 2017-06-18T10:29:04.000Z | 2021-11-04T13:08:12.000Z | defmodule Meeseeks.Selector.Combinator.Parent do
use Meeseeks.Selector.Combinator
@moduledoc false
alias Meeseeks.Document
defstruct selector: nil
@impl true
def next(_combinator, node, document) do
case Document.parent(document, node.id) do
nil -> nil
parent -> Document.get_node(document, parent)
end
end
end
| 20.470588 | 51 | 0.724138 |
f7d7f229b3de544cd6e8e2ec7ed17f4a1e84d37f | 2,608 | ex | Elixir | lib/konvex/implementation/riak/ability/to_delete_key.ex | KosyanMedia/konvex | 03f538a7223bbfa85a82ebdbd77f1fd70dc23c2a | [
"MIT"
] | 1 | 2022-03-08T08:30:24.000Z | 2022-03-08T08:30:24.000Z | lib/konvex/implementation/riak/ability/to_delete_key.ex | KosyanMedia/konvex | 03f538a7223bbfa85a82ebdbd77f1fd70dc23c2a | [
"MIT"
] | 1 | 2022-01-28T16:45:57.000Z | 2022-01-28T16:56:22.000Z | lib/konvex/implementation/riak/ability/to_delete_key.ex | KosyanMedia/konvex | 03f538a7223bbfa85a82ebdbd77f1fd70dc23c2a | [
"MIT"
] | null | null | null | defmodule Konvex.Implementation.Riak.Ability.ToDeleteKey do
@doc """
Value type specification is compulsory
due to Riak library has different delete functions for different data types
(it has Riak.delete/3 for regular KV-objects and Riak.delete/4 for CRDTs)
https://docs.riak.com/riak/kv/2.2.3/developing/key-value-modeling/index.html#bucket-types-as-additional-namespaces
"""
defmacro __using__(
[
bucket_name: <<_, _ :: binary>> = bucket_name,
connection: quoted_riak_connection,
crdt_name: <<_, _ :: binary>> = crdt_name,
value_type: :crdt
]
) do
quote do
import Konvex.Implementation.Riak.Connection.Usage, only: [using: 2]
@behaviour Konvex.Ability.ToDeleteKey
@impl Konvex.Ability.ToDeleteKey
@spec delete_key(key :: String.t) :: :unit
def delete_key(key) when is_binary(key) do
using unquote(quoted_riak_connection), fn connection_pid ->
case :riakc_pb_socket.delete(connection_pid, {unquote(crdt_name), unquote(bucket_name)}, key) do
:ok ->
:unit
{:error, riakc_pb_socket_delete_error} ->
object_locator =
"#{unquote(bucket_name)}:#{key}"
error_message =
inspect riakc_pb_socket_delete_error
raise "Failed to delete #{object_locator} from Riak, :riakc_pb_socket.delete/3 responded: #{error_message}"
end
end
end
end
end
defmacro __using__(
[
bucket_name: <<_, _ :: binary>> = bucket_name,
connection: quoted_riak_connection,
value_type: :text
]
) do
quote do
import Konvex.Implementation.Riak.Connection.Usage, only: [using: 2]
@behaviour Konvex.Ability.ToDeleteKey
@impl Konvex.Ability.ToDeleteKey
@spec delete_key(key :: String.t) :: :unit
def delete_key(key) when is_binary(key) do
using unquote(quoted_riak_connection), fn connection_pid ->
case :riakc_pb_socket.delete(connection_pid, unquote(bucket_name), key) do
:ok ->
:unit
{:error, riakc_pb_socket_delete_error} ->
object_locator =
"#{unquote(bucket_name)}:#{key}"
error_message =
inspect riakc_pb_socket_delete_error
raise "Failed to delete #{object_locator} from Riak, :riakc_pb_socket.delete/3 responded: #{error_message}"
end
end
end
end
end
end
| 35.726027 | 121 | 0.604678 |
f7d7f62bfe72e5e3574f2225bbc17999679a261b | 1,524 | exs | Elixir | lib/ex_unit/test/ex_unit/case_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | 4 | 2015-12-22T02:46:39.000Z | 2016-04-26T06:11:09.000Z | lib/ex_unit/test/ex_unit/case_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/case_test.exs | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:48.000Z | 2017-07-25T21:46:48.000Z | Code.require_file "../test_helper.exs", __DIR__
defmodule ExUnit.CaseTest do
use ExUnit.Case, async: true
ExUnit.Case.register_attribute __MODULE__, :foo
ExUnit.Case.register_attribute __MODULE__, :bar, accumulate: true
ExUnit.Case.register_attribute __MODULE__, :baz
@moduletag :moduletag
test "defines test case info" do
assert %ExUnit.TestCase{name: __MODULE__, tests: tests} = __ex_unit__(:case)
assert length(tests) > 0
end
@tag hello: false
@tag :hello
@tag world: :bad
@tag world: :good
test "tags", context do
line = __ENV__.line - 1
assert context[:case] == __MODULE__
assert context[:test] == __ENV__.function |> elem(0)
assert context[:line] == line
assert context[:async] == true
assert context[:hello] == true
assert context[:world] == :good
end
test "reset tags", context do
assert is_nil(context[:hello])
assert is_nil(context[:world])
end
test "module tags", context do
assert context[:moduletag] == true
end
@tag moduletag: :overridden
test "module tags can be overridden", context do
assert context[:moduletag] == :overridden
end
@foo :hello
@bar :world
test "registered attributes are in context", context do
assert context.registered.foo == :hello
assert context.registered.bar == [:world]
assert context.registered.baz == nil
end
test "registered attributes are set per test", context do
assert context.registered.foo == nil
assert context.registered.bar == []
end
end
| 26.275862 | 80 | 0.69357 |
f7d8016abb4e1e8d75d92cc4975876f5247f54e2 | 245 | ex | Elixir | lib/crew.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | null | null | null | lib/crew.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | 5 | 2020-07-20T01:49:01.000Z | 2021-09-08T00:17:04.000Z | lib/crew.ex | anamba/crew | c25f6a1d6ddbe0b58da9d556ff53a641c4d2a7b1 | [
"BSL-1.0"
] | null | null | null | defmodule Crew do
@moduledoc """
Crew keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.5 | 66 | 0.746939 |
f7d81a0270a9847737aa82cddf4a5b16db576824 | 1,849 | exs | Elixir | test/grizzly/command_class/time/time_get_test.exs | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | test/grizzly/command_class/time/time_get_test.exs | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | test/grizzly/command_class/time/time_get_test.exs | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.CommandClass.Time.TimeGet.Test do
use ExUnit.Case, async: true
alias Grizzly.Packet
alias Grizzly.CommandClass.Time.TimeGet
describe "implements Grizzly.Command correctly" do
test "initializes to command" do
assert {:ok, %TimeGet{}} == TimeGet.init([])
end
test "encodes correctly" do
{:ok, command} = TimeGet.init(seq_number: 10)
binary = <<35, 2, 128, 208, 10, 0, 0, 3, 2, 0, 0x8A, 0x01>>
assert {:ok, binary} == TimeGet.encode(command)
end
test "handles ack responses" do
{:ok, command} = TimeGet.init(seq_number: 0x05)
packet = Packet.new(seq_number: 0x05, types: [:ack_response])
assert {:continue, ^command} = TimeGet.handle_response(command, packet)
end
test "handles nack respones" do
{:ok, command} = TimeGet.init(seq_number: 0x07, retries: 0)
packet = Packet.new(seq_number: 0x07, types: [:nack_response])
assert {:done, {:error, :nack_response}} == TimeGet.handle_response(command, packet)
end
test "handles retries" do
{:ok, command} = TimeGet.init(seq_number: 0x07)
packet = Packet.new(seq_number: 0x07, types: [:nack_response])
assert {:retry, %TimeGet{}} = TimeGet.handle_response(command, packet)
end
test "handles time report responses" do
report = %{
command_class: :time,
command: :time_report,
value: %{hour: 1, minute: 2, second: 3}
}
packet = Packet.new(body: report)
{:ok, command} = TimeGet.init([])
assert {:done, {:ok, %{hour: 1, minute: 2, second: 3}}} ==
TimeGet.handle_response(command, packet)
end
test "handles responses" do
{:ok, command} = TimeGet.init([])
assert {:continue, %TimeGet{}} = TimeGet.handle_response(command, %{command_class: :foo})
end
end
end
| 30.816667 | 95 | 0.631693 |
f7d81f72f067189934135dc2e1180c5b47caf6d3 | 1,497 | ex | Elixir | apps/mishka_database/lib/schema/mishka_user/user_role.ex | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 35 | 2021-06-26T09:05:50.000Z | 2022-03-30T15:41:22.000Z | apps/mishka_database/lib/schema/mishka_user/user_role.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 101 | 2021-01-01T09:54:07.000Z | 2022-03-28T10:02:24.000Z | apps/mishka_database/lib/schema/mishka_user/user_role.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 8 | 2021-01-17T17:08:07.000Z | 2022-03-11T16:12:06.000Z | defmodule MishkaDatabase.Schema.MishkaUser.UserRole do
use Ecto.Schema
require MishkaTranslator.Gettext
import Ecto.Changeset
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "users_roles" do
belongs_to :users, MishkaDatabase.Schema.MishkaUser.User, foreign_key: :user_id, type: :binary_id
belongs_to :roles, MishkaDatabase.Schema.MishkaUser.Role, foreign_key: :role_id, type: :binary_id
timestamps(type: :utc_datetime)
end
@spec changeset(struct(), map()) :: Ecto.Changeset.t()
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:user_id, :role_id])
|> validate_required([:user_id, :role_id], message: MishkaTranslator.Gettext.dgettext("db_schema_user", "فیلد مذکور نمی تواند خالی باشد"))
|> MishkaDatabase.validate_binary_id(:user_id)
|> MishkaDatabase.validate_binary_id(:role_id)
|> foreign_key_constraint(:user_id, message: MishkaTranslator.Gettext.dgettext("db_schema_user", "ممکن است فیلد مذکور اشتباه باشد یا برای حذف آن اگر اقدام می کنید برای آن وابستگی وجود داشته باشد"))
|> foreign_key_constraint(:role_id, message: MishkaTranslator.Gettext.dgettext("db_schema_user", "ممکن است فیلد مذکور اشتباه باشد یا برای حذف آن اگر اقدام می کنید برای آن وابستگی وجود داشته باشد"))
|> unique_constraint(:role_id, name: :index_users_roles_on_role_id_and_user_id, message: MishkaTranslator.Gettext.dgettext("db_schema_user", "این حساب کربری از قبل در سیستم ثبت شده است."))
end
end
| 51.62069 | 201 | 0.758183 |
f7d8476863fae60e5847e29358626af75a2d39f7 | 2,851 | ex | Elixir | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_android_app_data_stream.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_android_app_data_stream.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_android_app_data_stream.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaAndroidAppDataStream do
@moduledoc """
A resource message representing a Google Analytics Android app stream.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Time when this stream was originally created.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Human-readable display name for the Data Stream. The max allowed display name length is 255 UTF-16 code units.
* `firebaseAppId` (*type:* `String.t`, *default:* `nil`) - Output only. ID of the corresponding Android app in Firebase, if any. This ID can change if the Android app is deleted and recreated.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. Resource name of this Data Stream. Format: properties/{property_id}/androidAppDataStreams/{stream_id} Example: "properties/1000/androidAppDataStreams/2000"
* `packageName` (*type:* `String.t`, *default:* `nil`) - Immutable. The package name for the app being measured. Example: "com.example.myandroidapp"
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Time when stream payload fields were last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:displayName => String.t(),
:firebaseAppId => String.t(),
:name => String.t(),
:packageName => String.t(),
:updateTime => DateTime.t()
}
field(:createTime, as: DateTime)
field(:displayName)
field(:firebaseAppId)
field(:name)
field(:packageName)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaAndroidAppDataStream do
def decode(value, options) do
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaAndroidAppDataStream.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaAndroidAppDataStream do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.552239 | 222 | 0.722203 |
f7d8564ed425afdbb73fa31536aec37656a4b386 | 7,527 | ex | Elixir | lib/aws/generated/code_star_connections.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/code_star_connections.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/code_star_connections.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.CodeStarConnections do
@moduledoc """
AWS CodeStar Connections
The CodeStar Connections feature is in preview release and is subject to change.
This AWS CodeStar Connections API Reference provides descriptions and usage
examples of the operations and data types for the AWS CodeStar Connections API.
You can use the connections API to work with connections and installations.
*Connections* are configurations that you use to connect AWS resources to
external code repositories. Each connection is a resource that can be given to
services such as CodePipeline to connect to a third-party repository such as
Bitbucket. For example, you can add the connection in CodePipeline so that it
triggers your pipeline when a code change is made to your third-party code
repository. Each connection is named and associated with a unique ARN that is
used to reference the connection.
When you create a connection, the console initiates a third-party connection
handshake. *Installations* are the apps that are used to conduct this handshake.
For example, the installation for the Bitbucket provider type is the Bitbucket
Cloud app. When you create a connection, you can choose an existing installation
or create one.
When you want to create a connection to an installed provider type such as
GitHub Enterprise Server, you create a *host* for your connections.
You can work with connections by calling:
* `CreateConnection`, which creates a uniquely named connection that
can be referenced by services such as CodePipeline.
* `DeleteConnection`, which deletes the specified connection.
* `GetConnection`, which returns information about the connection,
including the connection status.
* `ListConnections`, which lists the connections associated with
your account.
You can work with hosts by calling:
* `CreateHost`, which creates a host that represents the
infrastructure where your provider is installed.
* `DeleteHost`, which deletes the specified host.
* `GetHost`, which returns information about the host, including the
setup status.
* `ListHosts`, which lists the hosts associated with your account.
You can work with tags in AWS CodeStar Connections by calling the following:
* `ListTagsForResource`, which gets information about AWS tags for a
specified Amazon Resource Name (ARN) in AWS CodeStar Connections.
* `TagResource`, which adds or updates tags for a resource in AWS
CodeStar Connections.
* `UntagResource`, which removes tags for a resource in AWS CodeStar
Connections.
For information about how to use AWS CodeStar Connections, see the [Developer Tools User
Guide](https://docs.aws.amazon.com/dtconsole/latest/userguide/welcome-connections.html).
"""
@doc """
Creates a connection that can then be given to other AWS services like
CodePipeline so that it can access third-party code repositories.
The connection is in pending status until the third-party connection handshake
is completed from the console.
"""
def create_connection(client, input, options \\ []) do
request(client, "CreateConnection", input, options)
end
@doc """
Creates a resource that represents the infrastructure where a third-party
provider is installed.
The host is used when you create connections to an installed third-party
provider type, such as GitHub Enterprise Server. You create one host for all
connections to that provider.
A host created through the CLI or the SDK is in `PENDING` status by default. You
can make its status `AVAILABLE` by setting up the host in the console.
"""
def create_host(client, input, options \\ []) do
request(client, "CreateHost", input, options)
end
@doc """
The connection to be deleted.
"""
def delete_connection(client, input, options \\ []) do
request(client, "DeleteConnection", input, options)
end
@doc """
The host to be deleted.
Before you delete a host, all connections associated to the host must be
deleted.
A host cannot be deleted if it is in the VPC_CONFIG_INITIALIZING or
VPC_CONFIG_DELETING state.
"""
def delete_host(client, input, options \\ []) do
request(client, "DeleteHost", input, options)
end
@doc """
Returns the connection ARN and details such as status, owner, and provider type.
"""
def get_connection(client, input, options \\ []) do
request(client, "GetConnection", input, options)
end
@doc """
Returns the host ARN and details such as status, provider type, endpoint, and,
if applicable, the VPC configuration.
"""
def get_host(client, input, options \\ []) do
request(client, "GetHost", input, options)
end
@doc """
Lists the connections associated with your account.
"""
def list_connections(client, input, options \\ []) do
request(client, "ListConnections", input, options)
end
@doc """
Lists the hosts associated with your account.
"""
def list_hosts(client, input, options \\ []) do
request(client, "ListHosts", input, options)
end
@doc """
Gets the set of key-value pairs (metadata) that are used to manage the resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Adds to or modifies the tags of the given resource.
Tags are metadata that can be used to manage a resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes tags from an AWS resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "codestar-connections"}
host = build_host("codestar-connections", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "com.amazonaws.codestar.connections.CodeStar_connections_20191201.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 33.753363 | 100 | 0.710376 |
f7d864412e56c9ac64e750b80226e28eb3261fc9 | 4,861 | ex | Elixir | plugins/one_chat/lib/one_chat/slash_commands.ex | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | plugins/one_chat/lib/one_chat/slash_commands.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | plugins/one_chat/lib/one_chat/slash_commands.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule OneChat.SlashCommands do
@doc """
Handles the definition of commands available to the user by starting
a message with the `/` character.
"""
import Phoenix.HTML.Tag
use InfinityOneWeb.Gettext
@default_count 10
# @commands [
# "join", "archive", "kick", "lennyface", "leave", "gimme", "create", "invite",
# "invite-all-to", "invite-all-from", "msg", "part", "unarchive", "tableflip",
# "topic", "mute", "me", "open", "unflip", "shrug", "unmute", "unhide"]
@commands [
"join", "archive", "kick", "lennyface", "leave", "gimme", "create", "invite",
"invite-all-to", "invite-all-from", "part", "unarchive", "tableflip",
"topic", "mute", "open", "unflip", "shrug", "unmute", "unhide", "call"]
@special_text %{
"gimme" => "༼ つ ◕_◕ ༽つ",
"lennyface" => "( ͡° ͜ʖ ͡°)",
"tableflip" => "(╯°□°)╯︵ ┻━┻",
"unflip" => "┬─┬ ノ( ゜-゜ノ)",
"shrug" => "¯\_(ツ)_/¯",
}
@command_data [
%{command: "join", args: "#channel", description: ~g"Join the given channel"},
%{command: "archive", args: "#channel", description: ~g"Archive"},
%{command: "kick", args: "@username", description: ~g"Remove someone from the room"},
%{command: "lennyface", args: ~g"your message (optional)", description: ~g"Displays ( ͡° ͜ʖ ͡°) after your message"},
%{command: "leave", args: "", description: ~g"Leave the current channel"},
%{command: "gimme", args: ~g"your message (optional)", description: ~g"Displays ༼ つ ◕_◕ ༽つ before your message"},
%{command: "create", args: "#channel", description: ~g"Create a new channel"},
%{command: "invite", args: "@username", description: ~g"invite one user to join this channel"},
%{command: "invite-all-to", args: "#room", description: ~g"Invite all users from this channel to join [#channel]"},
%{command: "invite-all-from", args: "#room", description: ~g"Invite all users from [#channel] to join this channel"},
# %{command: "msg", args: "@username <message>", description: ~g"Direct message someone"},
%{command: "part", args: "", description: ~g"Leave the current channel"},
%{command: "unarchive", args: "#channel", description: ~g"Unarchive"},
%{command: "tableflip", args: ~g"your message (optional)", description: ~g"Displays (╯°□°)╯︵ ┻━┻"},
%{command: "topic", args: ~g"Topic message", description: ~g"Set topic"},
%{command: "mute", args: "@username", description: ~g"Mute someone in the room"},
# %{command: "me", args: ~g"your message", description: ~g"Display action text"},
%{command: "open", args: ~g"room name", description: ~g"Opens a channel, group or direct message"},
%{command: "unflip", args: ~g"your message (optional)", description: ~g"Displays ┬─┬ ノ( ゜-゜ノ)"},
%{command: "shrug", args: ~g"your message (optional)", description: ~g"Displays ¯\_(ツ)_/¯ after your message"},
%{command: "unmute", args: "@username", description: ~g"Unmute someone in the room"},
%{command: "unhide", args: "#channel", description: ~g"Unhide a hidden channel"},
%{command: "call", args: "phone number | @username", description: ~g"Call a phone number or a user"}
]
@command_map @command_data |> Enum.reduce(%{}, fn %{command: command} = map, acc -> Map.put(acc, command, map) end)
@command_list @command_data |> Enum.filter(& &1.command in @commands) |> Enum.sort(& &1.command < &2.command)
def all_commands, do: @commands |> Enum.sort
def command_list, do: @command_list
def commands(pattern, count \\ @default_count) do
pattern
|> find(count)
|> Enum.reduce([], fn
cmd, [] -> [format_command(@command_map[cmd], " selected")]
cmd, acc -> [format_command(@command_map[cmd])|acc]
end)
|> Enum.reverse
|> case do
[] -> nil
list ->
content_tag :div, class: "message-popup-items" do
list
end
end
end
def find(pattern, count \\ @default_count)
def find("", count), do: Enum.take(@commands, count)
def find(pattern, count) do
@commands
|> Enum.reduce([], fn command, acc ->
if String.contains?(command, pattern), do: [command|acc], else: acc
end)
|> Enum.sort
|> Enum.take(count)
end
def format_command %{command: command, args: args, description: description}, class \\ "" do
content_tag :div, class: "popup-item#{class}", "data-name": command,
"rebel-click": "click_popup" do
[
content_tag :strong do
["/", command]
end,
[" "],
args,
content_tag :div, class: "popup-slash-command-description" do
content_tag :i do
description
end
end
]
end
end
def special_text(message) when is_binary(message) do
case @special_text[message] do
nil -> "/" <> message
value -> value
end
end
def special_text(_), do: nil
end
| 40.508333 | 121 | 0.603374 |
f7d88e799222c0a41139a961727c24b41f1a47bc | 4,960 | exs | Elixir | test/transaction_test.exs | kianmeng/tds | 36f228275c2bfde2baa45fe70d29d1eb1d44379b | [
"Apache-2.0"
] | null | null | null | test/transaction_test.exs | kianmeng/tds | 36f228275c2bfde2baa45fe70d29d1eb1d44379b | [
"Apache-2.0"
] | null | null | null | test/transaction_test.exs | kianmeng/tds | 36f228275c2bfde2baa45fe70d29d1eb1d44379b | [
"Apache-2.0"
] | 1 | 2021-09-07T15:25:40.000Z | 2021-09-07T15:25:40.000Z | defmodule Tds.TransactionTest do
use ExUnit.Case, async: true
# import ExUnit.CaptureLog
import Tds.TestHelper
setup context do
transactions =
case context[:mode] do
:transaction -> :strict
:savepoint -> :naive
end
opts = [
transactions: transactions,
isolation_level: :snapshot,
idle: :active,
backoff_type: :stop,
prepare: context[:prepare] || :named
]
opts =
Application.get_env(:tds, :opts)
|> Keyword.merge(opts)
{:ok, pid} = Tds.start_link(opts)
{:ok, pid2} = Tds.start_link(opts)
{:ok, [pid: pid, pid2: pid2]}
end
@tag mode: :transaction
@tag :transaction
test "connection works after failure during commit transaction", context do
assert transaction(fn conn ->
assert {:error, %Tds.Error{mssql: %{class: 14, number: 2627}}} =
Tds.query(
conn,
"insert into uniques values (1), (1);",
[]
)
# assert {:error, %Tds.Error{mssql: %{class: 16, number: 3971}}} =
assert {:ok, %Tds.Result{columns: [""], num_rows: 1, rows: ['*']}} =
Tds.query(conn, "SELECT 42", [])
:hi
end) == {:error, :rollback}
assert [[42]] = query("SELECT 42", [])
assert [[0]] = query("SELECT COUNT(*) FROM uniques", [])
end
@tag mode: :transaction
@tag :transaction
test "connection works after failure during rollback transaction", context do
assert transaction(fn conn ->
Tds.query(conn, "insert into uniques values (1), (2);", [])
assert {:error, %Tds.Error{mssql: %{class: 14, number: 2627}}} =
Tds.query(
conn,
"insert into uniques values (3), (3);",
[]
)
# assert {:error, %Tds.Error{mssql: %{class: 16, number: 3971}}} =
assert {:ok, %Tds.Result{columns: [""], num_rows: 1, rows: ['*']}} =
Tds.query(conn, "SELECT 42", [])
Tds.rollback(conn, :oops)
end) == {:error, :oops}
assert [[42]] = query("SELECT 42", [])
end
@tag mode: :transaction
@tag :transaction
@tag :transaction_status
test "transaction shows correct transaction status", context do
pid = context[:pid]
opts = [mode: :transaction]
assert DBConnection.status(pid, opts) == :idle
assert query("SELECT 42", []) == [[42]]
assert DBConnection.status(pid, opts) == :idle
assert DBConnection.transaction(
pid,
fn conn ->
assert DBConnection.status(conn, opts) == :transaction
assert {:error, %Tds.Error{mssql: %{class: 14, number: 2627}}} =
Tds.query(
conn,
"insert into uniques values (1), (1);",
[],
opts
)
assert DBConnection.status(conn, opts) == :error
# assert {:error, %Tds.Error{mssql: %{class: 16, number: 3971}}} =
assert {:ok,
%Tds.Result{columns: [""], num_rows: 1, rows: ['*']}} =
Tds.query(conn, "SELECT 42", [], opts)
assert DBConnection.status(conn, opts) == :error
end,
opts
) == {:error, :rollback}
assert DBConnection.status(pid, opts) == :idle
assert query("SELECT 42", []) == [[42]]
assert DBConnection.status(pid) == :idle
end
@tag mode: :transaction
@tag :transaction
test "should set transaction isolation level", context do
:ok = query("CREATE TABLE TranIsolation (num INT)", [])
# opts = Application.get_env(:tds, :opts)
# {:ok, conn} = Tds.start_link(opts)
# {:ok, conn2} = Tds.start_link(opts)
conn = context[:pid]
conn2 = context[:pid2]
pid = self()
new_pid = spawn_link fn ->
Tds.transaction(conn2, fn conn2 ->
Tds.query!(conn2, "insert into TranIsolation values (1)", [])
assert %{rows: [[1]]} = Tds.query!(conn2, "select num from TranIsolation", [])
send(pid, :in_transaction)
receive do
:commit -> :ok
after
5000 -> raise "timout"
end
end, [isolation_level: :snapshot])
send(pid, :committed)
end
receive do
:in_transaction -> :ok
after
5000 -> raise "timeout"
end
assert {:ok, %{rows: []}} = Tds.transaction(conn, fn conn ->
assert %{rows: []} = Tds.query!(conn, "select num from TranIsolation", [])
end, isolation_level: :snapshot)
send(new_pid, :commit)
receive do
:committed -> :ok
after
5000 -> raise "timeout"
end
assert %{rows: [[1]]} = Tds.query!(conn, "select num from TranIsolation", [])
end
end
| 30.429448 | 86 | 0.517137 |
f7d8a76c098c9d30cf9fc4b2c72243547b00d173 | 9,218 | ex | Elixir | lib/mix/lib/mix/compilers/elixir.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/elixir.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/compilers/elixir.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Compilers.Elixir do
@moduledoc false
@manifest_vsn :v3
@doc """
Compiles stale Elixir files.
It expects a `manifest` file, the source directories, the source directories to skip,
the extensions to read in sources, the destination directory, a flag to know if
compilation is being forced or not and a callback to be invoked
once (and only if) compilation starts.
The `manifest` is written down with information including dependencies
between modules, which helps it recompile only the modules that
have changed at runtime.
"""
def compile(manifest, srcs, exts, dest, force, on_start) do
all = Mix.Utils.extract_files(srcs, exts)
{all_entries, all_sources} = parse_manifest(manifest)
modified = Mix.Utils.last_modified(manifest)
removed =
for {source, _files} <- all_sources,
not(source in all),
do: source
changed =
if force do
# A config, path dependency or manifest has
# changed, let's just compile everything
all
else
sources_mtimes = mtimes(all_sources)
# Otherwise let's start with the new sources
for(source <- all,
not Map.has_key?(all_sources, source),
do: source)
++
# Plus the sources that have changed in disk
for({source, files} <- all_sources,
times = Enum.map([source | files], &Map.fetch!(sources_mtimes, &1)),
Mix.Utils.stale?(times, [modified]),
do: source)
end
sources = update_stale_sources(all_sources, removed, changed)
{entries, changed} = update_stale_entries(all_entries, removed ++ changed,
stale_local_deps(manifest, modified))
stale = changed -- removed
cond do
stale != [] ->
compile_manifest(manifest, entries, sources, stale, dest, on_start)
:ok
removed != [] ->
write_manifest(manifest, entries, sources)
:ok
true ->
:noop
end
end
defp mtimes(sources) do
Enum.reduce(sources, %{}, fn {source, files}, map ->
Enum.reduce([source | files], map, fn file, map ->
Map.put_new_lazy(map, file, fn -> Mix.Utils.last_modified(file) end)
end)
end)
end
@doc """
Removes compiled files for the given `manifest`.
"""
def clean(manifest) do
Enum.each read_manifest(manifest), fn
{beam, _, _, _, _, _, _} ->
File.rm(beam)
{_, _} ->
:ok
end
end
@doc """
Returns protocols and implementations for the given `manifest`.
"""
def protocols_and_impls(manifest) do
for {beam, module, kind, _, _, _, _} <- read_manifest(manifest),
match?(:protocol, kind) or match?({:impl, _}, kind),
do: {module, kind, beam}
end
defp compile_manifest(manifest, entries, sources, stale, dest, on_start) do
Mix.Project.ensure_structure()
true = Code.prepend_path(dest)
on_start.()
cwd = File.cwd!
# Starts a server responsible for keeping track which files
# were compiled and the dependencies between them.
{:ok, pid} = Agent.start_link(fn -> {entries, sources} end)
try do
_ = Kernel.ParallelCompiler.files :lists.usort(stale),
each_module: &each_module(pid, dest, cwd, &1, &2, &3),
each_file: &each_file(&1),
dest: dest
Agent.cast pid, fn {entries, sources} ->
write_manifest(manifest, entries, sources)
{entries, sources}
end
after
Agent.stop(pid, :normal, :infinity)
end
:ok
end
defp each_module(pid, dest, cwd, source, module, binary) do
beam =
dest
|> Path.join(Atom.to_string(module) <> ".beam")
|> Path.relative_to(cwd)
{compile, runtime} = Kernel.LexicalTracker.remotes(module)
compile =
compile
|> List.delete(module)
|> Enum.reject(&match?("elixir_" <> _, Atom.to_string(&1)))
runtime =
runtime
|> List.delete(module)
|> Enum.reject(&match?("elixir_" <> _, Atom.to_string(&1)))
kind = detect_kind(module)
source = Path.relative_to(source, cwd)
files = get_external_resources(module, cwd)
Agent.cast pid, fn {entries, sources} ->
entries = List.keystore(entries, beam, 0, {beam, module, kind, source, compile, runtime, binary})
sources = Map.update(sources, source, files, & files ++ &1)
{entries, sources}
end
end
defp detect_kind(module) do
impl = Module.get_attribute(module, :impl)
cond do
is_list(impl) and impl[:protocol] ->
{:impl, impl[:protocol]}
is_list(Module.get_attribute(module, :protocol)) ->
:protocol
true ->
:module
end
end
defp get_external_resources(module, cwd) do
for file <- Module.get_attribute(module, :external_resource),
File.regular?(file),
relative = Path.relative_to(file, cwd),
Path.type(relative) == :relative,
do: relative
end
defp each_file(source) do
Mix.shell.info "Compiled #{source}"
end
## Resolution
defp update_stale_sources(sources, removed, changed) do
Enum.reduce changed, Map.drop(sources, removed), &Map.put(&2, &1, [])
end
# This function receives the manifest entries and some source
# files that have changed. It then, recursively, figures out
# all the files that changed (via the module dependencies) and
# return the non-changed entries and the removed sources.
defp update_stale_entries(all, [], stale) when stale == %{} do
{all, []}
end
defp update_stale_entries(all, changed, stale) do
removed = Enum.into(changed, %{}, &{&1, true})
remove_stale_entries(all, stale, removed)
end
defp remove_stale_entries(entries, old_stale, old_removed) do
{rest, new_stale, new_removed} =
Enum.reduce entries, {[], old_stale, old_removed}, &remove_stale_entry/2
if map_size(new_stale) > map_size(old_stale) or
map_size(new_removed) > map_size(old_removed) do
remove_stale_entries(rest, new_stale, new_removed)
else
{rest, Map.keys(new_removed)}
end
end
defp remove_stale_entry({beam, module, _kind, source, compile, runtime, _bin} = entry,
{rest, stale, removed}) do
cond do
# If I changed in disk or have a compile time dependency
# on something stale, I need to be recompiled.
Map.has_key?(removed, source) or Enum.any?(compile, &Map.has_key?(stale, &1)) ->
remove_and_purge(beam, module)
{rest, Map.put(stale, module, true), Map.put(removed, source, true)}
# If I have a runtime time dependency on something stale,
# I am stale too.
Enum.any?(runtime, &Map.has_key?(stale, &1)) ->
{[entry | rest], Map.put(stale, module, true), removed}
# Otherwise, we don't store it anywhere
true ->
{[entry | rest], stale, removed}
end
end
defp stale_local_deps(manifest, modified) do
base = Path.basename(manifest)
for %{scm: scm, opts: opts} = dep <- Mix.Dep.cached(),
not scm.fetchable?,
Mix.Utils.last_modified(Path.join(opts[:build], base)) > modified,
path <- Mix.Dep.load_paths(dep),
beam <- Path.wildcard(Path.join(path, "*.beam")),
Mix.Utils.last_modified(beam) > modified,
do: {beam |> Path.basename |> Path.rootname |> String.to_atom, true},
into: %{}
end
defp remove_and_purge(beam, module) do
_ = File.rm(beam)
_ = :code.purge(module)
_ = :code.delete(module)
end
## Manifest handling
defp read_manifest(manifest) do
case :file.consult(manifest) do
{:ok, [@manifest_vsn | t]} -> t
_ -> []
end
end
# Similar to read manifest but supports data migration.
defp parse_manifest(manifest) do
state = {[], %{}}
case :file.consult(manifest) do
{:ok, [@manifest_vsn | data]} ->
parse_manifest(data, state)
{:ok, [:v2 | data]} ->
for {beam, module, _, _, _, _, _, _} <- data do
remove_and_purge(beam, module)
end
state
_ ->
state
end
end
defp parse_manifest(data, state) do
Enum.reduce data, state, fn
entry, {entries, sources} when tuple_size(entry) == 7 ->
{[entry | entries], sources}
{source, files}, {entries, sources} ->
{entries, Map.put(sources, source, files)}
end
end
defp write_manifest(manifest, [], sources) when sources == %{} do
File.rm(manifest)
:ok
end
defp write_manifest(manifest, entries, sources) do
File.mkdir_p!(Path.dirname(manifest))
File.open!(manifest, [:write], fn device ->
:io.format(device, '~p.~n', [@manifest_vsn])
Enum.each entries, fn {beam, _, _, _, _, _, binary} = entry ->
if binary, do: File.write!(beam, binary)
:io.format(device, '~p.~n', [put_elem(entry, 6, nil)])
end
Enum.each sources, fn {_, _} = entry ->
:io.format(device, '~p.~n', [entry])
end
:ok
end)
# Since Elixir is a dependency itself, we need to touch the lock
# so the current Elixir version, used to compile the files above,
# is properly stored.
Mix.Dep.ElixirSCM.update
end
end
| 29.831715 | 103 | 0.619766 |
f7d8ccd79c98e8af46914826c22e5f2b0af405c8 | 10,816 | ex | Elixir | test/support/live_views.ex | simonprev/phoenix_live_view | 55a54726650e53ac68c30fc9b49d2a5895ce2053 | [
"MIT"
] | null | null | null | test/support/live_views.ex | simonprev/phoenix_live_view | 55a54726650e53ac68c30fc9b49d2a5895ce2053 | [
"MIT"
] | null | null | null | test/support/live_views.ex | simonprev/phoenix_live_view | 55a54726650e53ac68c30fc9b49d2a5895ce2053 | [
"MIT"
] | null | null | null | alias Phoenix.LiveViewTest.{ClockLive, ClockControlsLive}
defmodule Phoenix.LiveViewTest.ThermostatLive do
use Phoenix.LiveView, container: {:article, class: "thermo"}, namespace: Phoenix.LiveViewTest
def render(assigns) do
~L"""
The temp is: <%= @val %><%= @greeting %>
<button phx-click="dec">-</button>
<button phx-click="inc">+</button><%= if @nest do %>
<%= live_render(@socket, ClockLive, [id: :clock] ++ @nest) %>
<%= for user <- @users do %>
<i><%= user.name %> <%= user.email %></i>
<% end %>
<% end %>
"""
end
def mount(session, socket) do
nest = Map.get(session, :nest, false)
users = session[:users] || []
val = if connected?(socket), do: 1, else: 0
{:ok,
assign(socket,
val: val,
nest: nest,
redir: session[:redir],
users: users,
greeting: nil
)}
end
@key_i 73
@key_d 68
def handle_event("key", @key_i, socket) do
{:noreply, update(socket, :val, &(&1 + 1))}
end
def handle_event("key", @key_d, socket) do
{:noreply, update(socket, :val, &(&1 - 1))}
end
def handle_event("save", %{"temp" => new_temp} = params, socket) do
{:noreply, assign(socket, val: new_temp, greeting: inspect(params["_target"]))}
end
def handle_event("save", new_temp, socket) do
{:noreply, assign(socket, :val, new_temp)}
end
def handle_event("redir", to, socket) do
{:stop, redirect(socket, to: to)}
end
def handle_event("inactive", msg, socket) do
{:noreply, assign(socket, :greeting, "Tap to wake – #{msg}")}
end
def handle_event("active", msg, socket) do
{:noreply, assign(socket, :greeting, "Waking up – #{msg}")}
end
def handle_event("noop", _, socket), do: {:noreply, socket}
def handle_event("inc", _, socket), do: {:noreply, update(socket, :val, &(&1 + 1))}
def handle_event("dec", _, socket), do: {:noreply, update(socket, :val, &(&1 - 1))}
def handle_info(:noop, socket), do: {:noreply, socket}
def handle_info({:redir, to}, socket) do
{:stop, redirect(socket, to: to)}
end
def handle_call({:set, var, val}, _, socket) do
{:reply, :ok, assign(socket, var, val)}
end
end
defmodule Phoenix.LiveViewTest.ClockLive do
use Phoenix.LiveView, container: {:section, class: "clock"}
def render(assigns) do
~L"""
time: <%= @time %> <%= @name %>
<%= live_render(@socket, ClockControlsLive, id: :"#{String.replace(@name, " ", "-")}-controls") %>
"""
end
def mount(session, socket) do
{:ok, assign(socket, time: "12:00", name: session[:name] || "NY")}
end
def handle_info(:snooze, socket) do
{:noreply, assign(socket, :time, "12:05")}
end
def handle_info({:run, func}, socket) do
func.(socket)
end
def handle_call({:set, new_time}, _from, socket) do
{:reply, :ok, assign(socket, :time, new_time)}
end
end
defmodule Phoenix.LiveViewTest.ClockControlsLive do
use Phoenix.LiveView
def render(assigns), do: ~L|<button phx-click="snooze">+</button>|
def mount(_session, socket), do: {:ok, socket}
def handle_event("snooze", _, socket) do
send(socket.parent_pid, :snooze)
{:noreply, socket}
end
end
defmodule Phoenix.LiveViewTest.DashboardLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
session: <%= Phoenix.HTML.raw inspect(@router_session) %>
"""
end
def mount(session, socket) do
{:ok, assign(socket, router_session: session)}
end
end
defmodule Phoenix.LiveViewTest.SameChildLive do
use Phoenix.LiveView
def render(%{dup: true} = assigns) do
~L"""
<%= for name <- @names do %>
<%= live_render(@socket, ClockLive, id: :dup, session: %{name: name}) %>
<% end %>
"""
end
def render(%{dup: false} = assigns) do
~L"""
<%= for name <- @names do %>
<%= live_render(@socket, ClockLive, session: %{name: name, count: @count}, id: name) %>
<% end %>
"""
end
def mount(%{dup: dup}, socket) do
{:ok, assign(socket, count: 0, dup: dup, names: ~w(Tokyo Madrid Toronto))}
end
def handle_event("inc", _, socket) do
{:noreply, assign(socket, :count, socket.assigns.count + 1)}
end
end
defmodule Phoenix.LiveViewTest.RootLive do
use Phoenix.LiveView
alias Phoenix.LiveViewTest.ChildLive
def render(assigns) do
~L"""
root name: <%= @current_user.name %>
<%= live_render(@socket, ChildLive, id: :static, session: %{child: :static, user_id: @current_user.id}) %>
<%= if @dynamic_child do %>
<%= live_render(@socket, ChildLive, id: @dynamic_child, session: %{child: :dynamic, user_id: @current_user.id}) %>
<% end %>
"""
end
def mount(%{user_id: user_id}, socket) do
{:ok,
socket
|> assign(:dynamic_child, nil)
|> assign_new(:current_user, fn ->
%{name: "user-from-root", id: user_id}
end)}
end
def handle_call({:dynamic_child, child}, _from, socket) do
{:reply, :ok, assign(socket, dynamic_child: child)}
end
end
defmodule Phoenix.LiveViewTest.ChildLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
child <%= @id %> name: <%= @current_user.name %>
"""
end
def mount(%{user_id: user_id, child: id}, socket) do
{:ok,
socket
|> assign(:id, id)
|> assign_new(:current_user, fn ->
%{name: "user-from-child", id: user_id}
end)}
end
end
defmodule Phoenix.LiveViewTest.ParamCounterLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
The value is: <%= @val %>
<%= if map_size(@params) > 0, do: inspect(@params) %>
connect: <%= inspect(@connect_params) %>
"""
end
def mount(session, socket) do
on_handle_params = session[:on_handle_params]
{:ok,
assign(
socket,
val: 1,
connect_params: get_connect_params(socket) || %{},
test_pid: session[:test_pid],
on_handle_params: on_handle_params && :erlang.binary_to_term(on_handle_params)
)}
end
def handle_params(%{"from" => "handle_params"} = params, uri, socket) do
send(socket.assigns.test_pid, {:handle_params, uri, socket.assigns, params})
socket.assigns.on_handle_params.(assign(socket, :params, params))
end
def handle_params(params, uri, socket) do
send(socket.assigns.test_pid, {:handle_params, uri, socket.assigns, params})
{:noreply, assign(socket, :params, params)}
end
def handle_event("live_redirect", to, socket) do
{:noreply, live_redirect(socket, to: to)}
end
def handle_info({:set, var, val}, socket), do: {:noreply, assign(socket, var, val)}
def handle_info({:live_redirect, to}, socket) do
{:noreply, live_redirect(socket, to: to)}
end
def handle_call({:live_redirect, func}, _from, socket) do
func.(socket)
end
def handle_cast({:live_redirect, to}, socket) do
{:noreply, live_redirect(socket, to: to)}
end
end
defmodule Phoenix.LiveViewTest.OptsLive do
use Phoenix.LiveView
def render(assigns), do: ~L|<%= @description %>. <%= @canary %>|
def mount(%{opts: opts}, socket) do
{:ok, assign(socket, description: "long description", canary: "canary"), opts}
end
def handle_call({:exec, func}, _from, socket) do
func.(socket)
end
end
defmodule Phoenix.LiveViewTest.AppendLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
<div id="times" phx-update="<%= @update_type %>">
<%= for %{id: id, name: name} <- @time_zones do %>
<h1 id="title-<%= id %>"><%= name %></h1>
<%= live_render(@socket, Phoenix.LiveViewTest.ClockLive, id: "tz-#{id}", session: %{name: name}) %>
<% end %>
</div>
"""
end
def mount(%{time_zones: {update_type, time_zones}}, socket) do
{:ok, assign(socket, update_type: update_type, time_zones: time_zones),
temporary_assigns: [time_zones: []]}
end
def handle_event("add-tz", %{"id" => id, "name" => name}, socket) do
{:noreply, assign(socket, :time_zones, [%{id: id, name: name}])}
end
end
defmodule Phoenix.LiveViewTest.ShuffleLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
<%= for zone <- @time_zones do %>
<div id="score-<%= zone.id %>">
<%= live_render(@socket, Phoenix.LiveViewTest.ClockLive, id: "tz-#{zone.id}", session: %{name: zone.name}) %>
</div>
<% end %>
"""
end
def mount(%{time_zones: time_zones}, socket) do
{:ok, assign(socket, time_zones: time_zones)}
end
def handle_event("reverse", _, socket) do
{:noreply, assign(socket, :time_zones, Enum.reverse(socket.assigns.time_zones))}
end
end
defmodule Phoenix.LiveViewTest.BasicComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, assign(socket, id: nil, name: "unknown")}
end
def render(assigns) do
~L"""
<div <%= if @id, do: Phoenix.HTML.raw("id=\"#{@id}\""), else: "" %>>
<%= @name %> says hi with socket: <%= !!@socket %>
</div>
"""
end
end
defmodule Phoenix.LiveViewTest.StatefulComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, assign(socket, name: "unknown", dup_name: nil)}
end
def update(assigns, socket) do
if from = assigns[:from] do
send(from, {:updated, assigns})
end
{:ok, assign(socket, assigns)}
end
def preload([assigns | _] = lists_of_assigns) do
if from = assigns[:from] do
send(from, {:preload, lists_of_assigns})
end
lists_of_assigns
end
def render(assigns) do
~L"""
<div id="<%= @id %>">
<%= @name %> says hi with socket: <%= !!@socket %><%= if @dup_name, do: live_component @socket, __MODULE__, id: @dup_name, name: @dup_name %>
</div>
"""
end
def handle_event("transform", %{"op" => op}, socket) do
case op do
"upcase" ->
{:noreply, update(socket, :name, &String.upcase(&1))}
"title-case" ->
{:noreply,
update(socket, :name, fn <<first::binary-size(1), rest::binary>> ->
String.upcase(first) <> rest
end)}
"dup" ->
{:noreply, assign(socket, :dup_name, socket.assigns.name <> "-dup")}
end
end
end
defmodule Phoenix.LiveViewTest.WithComponentLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
<%= live_component @socket, Phoenix.LiveViewTest.BasicComponent %>
<%= for name <- @names do %>
<%= live_component @socket, Phoenix.LiveViewTest.StatefulComponent, id: name, name: name, from: @from %>
<% end %>
"""
end
def mount(%{names: names, from: from}, socket) do
{:ok, assign(socket, names: names, from: from)}
end
def handle_info({:send_update, updates}, socket) do
Enum.each(updates, fn {module, args} -> send_update(module, args) end)
{:noreply, socket}
end
def handle_event("delete-name", %{"name" => name}, socket) do
{:noreply, update(socket, :names, &List.delete(&1, name))}
end
end
| 26.252427 | 147 | 0.617511 |
f7d8d2829ab948ff5154fc1222148261eaf4ea1f | 1,643 | ex | Elixir | clients/life_sciences/lib/google_api/life_sciences/v2beta/model/worker_released_event.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/life_sciences/lib/google_api/life_sciences/v2beta/model/worker_released_event.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/life_sciences/lib/google_api/life_sciences/v2beta/model/worker_released_event.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.LifeSciences.V2beta.Model.WorkerReleasedEvent do
@moduledoc """
An event generated when the worker VM that was assigned to the pipeline
has been released (deleted).
## Attributes
* `instance` (*type:* `String.t`, *default:* `nil`) - The worker's instance name.
* `zone` (*type:* `String.t`, *default:* `nil`) - The zone the worker was running in.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:instance => String.t(),
:zone => String.t()
}
field(:instance)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.LifeSciences.V2beta.Model.WorkerReleasedEvent do
def decode(value, options) do
GoogleApi.LifeSciences.V2beta.Model.WorkerReleasedEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.LifeSciences.V2beta.Model.WorkerReleasedEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.215686 | 89 | 0.727328 |
f7d8e284f53cfd27381253d09d0607e24788e87a | 680 | exs | Elixir | farmbot_ext/test/farmbot_ext/amqp/bot_state_channel_test.exs | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | 1 | 2021-04-22T10:18:50.000Z | 2021-04-22T10:18:50.000Z | farmbot_ext/test/farmbot_ext/amqp/bot_state_channel_test.exs | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | null | null | null | farmbot_ext/test/farmbot_ext/amqp/bot_state_channel_test.exs | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | null | null | null | defmodule FarmbotExt.AMQP.BotStateChannelTest do
use ExUnit.Case, async: false
use Mimic
# alias FarmbotExt.AMQP.BotStateChannel
# alias FarmbotCore.BotState
setup :verify_on_exit!
setup :set_mimic_global
defmodule FakeState do
defstruct conn: %{fake: :conn}, chan: "fake_chan_", jwt: "fake_jwt_", cache: %{fake: :cache}
end
test "terminate" do
expected = "Disconnected from BotState channel: \"foo\""
expect(AMQP.Channel, :close, 1, fn "fake_chan_" -> :ok end)
expect(FarmbotCore.LogExecutor, :execute, 1, fn log ->
assert log.message == expected
end)
FarmbotExt.AMQP.BotStateChannel.terminate("foo", %FakeState{})
end
end
| 26.153846 | 96 | 0.702941 |
f7d9438a157ae6955ab19cd81541b0631b0859a9 | 420 | ex | Elixir | web/views/error_view.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | web/views/error_view.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | web/views/error_view.ex | Poniverse/LunaTube-API | 433473f32ec04a636f6de60642e445fd2e478f98 | [
"Apache-2.0"
] | null | null | null | defmodule Lunatube.ErrorView do
use Lunatube.Web, :view
def render("404.json", _assigns) do
%{errors: %{detail: "Page not found"}}
end
def render("500.json", _assigns) do
%{errors: %{detail: "Internal server error"}}
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.json", assigns
end
end
| 23.333333 | 49 | 0.683333 |
f7d9757eb7140be5a4dc4dc50b149cbc41f60ddc | 2,351 | ex | Elixir | wadm/lib/wadm/model/validator.ex | janitha09/wadm | b5c9a0f5290bfcdd0eefe0f0bf22583b61c66c0f | [
"Apache-2.0"
] | 2 | 2022-01-02T12:37:57.000Z | 2022-03-02T20:13:03.000Z | wadm/lib/wadm/model/validator.ex | janitha09/wadm | b5c9a0f5290bfcdd0eefe0f0bf22583b61c66c0f | [
"Apache-2.0"
] | 12 | 2021-11-28T19:22:20.000Z | 2022-01-25T18:41:48.000Z | wadm/lib/wadm/model/validator.ex | janitha09/wadm | b5c9a0f5290bfcdd0eefe0f0bf22583b61c66c0f | [
"Apache-2.0"
] | 1 | 2021-11-25T00:42:06.000Z | 2021-11-25T00:42:06.000Z | defmodule Wadm.Model.Validator do
alias Wadm.Model.{
AppSpec,
ActorComponent,
CapabilityComponent,
SpreadScaler,
LinkDefinition
}
@doc """
Validates an application specification structure. This validation only tests against conditions
that are testable from within a single specification.
"""
@spec validate_appspec(Wadm.Model.AppSpec.t()) ::
:ok | {:error, [String.t()]}
def validate_appspec(%AppSpec{} = appspec) do
result = %{
spec: appspec,
errors: []
}
result
|> validate_components()
end
defp validate_components(result) do
result =
result.spec.components
|> Enum.reduce(result, fn comp, acc -> validate_component(comp, acc) end)
if length(result.errors) > 0 do
{:error, result.errors}
else
:ok
end
end
defp validate_component(%ActorComponent{} = comp, result) do
comp.traits
|> Enum.reduce(result, fn trait, acc -> validate_trait(trait, acc) end)
end
defp validate_component(%CapabilityComponent{} = comp, result) do
comp.traits
|> Enum.reduce(result, fn trait, acc -> validate_trait(trait, acc) end)
end
defp validate_trait(%LinkDefinition{} = trait, result) do
targets = find_capability_component(result.spec, trait.target)
cond do
length(targets) > 1 ->
%{
result
| errors: [
"Too many targets matching link definition target #{trait.target}" | result.errors
]
}
length(targets) == 0 ->
%{
result
| errors: [
"No matching targets found for link definition target #{trait.target}"
| result.errors
]
}
true ->
result
end
end
defp validate_trait(%SpreadScaler{} = trait, result) do
weight_total =
trait.spread
|> Enum.map(& &1.weight)
|> Enum.sum()
if weight_total != 100 do
%{result | errors: ["Spread scaler weight does not add up to 100" | result.errors]}
else
result
end
end
defp find_capability_component(%AppSpec{components: comps}, comp_name)
when is_binary(comp_name) do
comps
|> Enum.filter(fn comp ->
case comp do
%CapabilityComponent{name: name} -> name == comp_name
_ -> false
end
end)
end
end
| 23.989796 | 97 | 0.607401 |
f7d97ddc1e375724adee901e6de21d0e64a6a39d | 717 | ex | Elixir | apps/core/lib/core/pubsub/protocols/realtime.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | null | null | null | apps/core/lib/core/pubsub/protocols/realtime.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 2 | 2019-12-13T23:55:50.000Z | 2019-12-17T05:49:58.000Z | apps/core/lib/core/pubsub/protocols/realtime.ex | michaeljguarino/chartmart | a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc | [
"Apache-2.0"
] | null | null | null | defprotocol Core.PubSub.Realtime do
@fallback_to_any true
def publish?(event)
end
defimpl Core.PubSub.Realtime, for: Any do
def publish?(_), do: false
end
defimpl Core.PubSub.Realtime, for: [
Core.PubSub.IncidentCreated,
Core.PubSub.IncidentUpdated,
Core.PubSub.IncidentDeleted,
Core.PubSub.IncidentMessageCreated,
Core.PubSub.IncidentMessageUpdated,
Core.PubSub.IncidentMessageDeleted,
Core.PubSub.NotificationCreated,
Core.PubSub.UpgradeCreated,
Core.PubSub.UpgradeQueueUpdated,
Core.PubSub.UpgradeQueueCreated,
Core.PubSub.RolloutCreated,
Core.PubSub.RolloutUpdated,
Core.PubSub.TestCreated,
Core.PubSub.TestUpdated,
Core.PubSub.StepLogs,
] do
def publish?(_), do: true
end
| 24.724138 | 41 | 0.783821 |
f7d9acfad84d72c2e29b46c157984626cfb26fce | 2,703 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/user.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/user.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/user.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.User do
@moduledoc """
A Cloud SQL user resource.
## Attributes
- host (String.t): The host name from which the user can connect. For insert operations, host defaults to an empty string. For update operations, host is specified as part of the request URL. The host name cannot be updated after insertion. Defaults to: `null`.
- etag (String.t): This field is deprecated and will be removed from a future version of the API. Defaults to: `null`.
- instance (String.t): The name of the Cloud SQL instance. This does not include the project ID. Can be omitted for update since it is already specified on the URL. Defaults to: `null`.
- kind (String.t): This is always sql#user. Defaults to: `null`.
- name (String.t): The name of the user in the Cloud SQL instance. Can be omitted for update since it is already specified in the URL. Defaults to: `null`.
- password (String.t): The password for the user. Defaults to: `null`.
- project (String.t): The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable. Can be omitted for update since it is already specified on the URL. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:host => any(),
:etag => any(),
:instance => any(),
:kind => any(),
:name => any(),
:password => any(),
:project => any()
}
field(:host)
field(:etag)
field(:instance)
field(:kind)
field(:name)
field(:password)
field(:project)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.User do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.User.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.User do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.954545 | 263 | 0.711062 |
f7d9cc5f65df467017cadf22673672a72bc681ca | 404 | exs | Elixir | apps/robby_web/test/lib/photo_handler_test.exs | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | apps/robby_web/test/lib/photo_handler_test.exs | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | apps/robby_web/test/lib/photo_handler_test.exs | jeffweiss/openrobby | 9fed2024e6ce87a6fe27ef3af85558f3116aca2a | [
"Apache-2.0"
] | null | null | null | defmodule RobbyWeb.PhotoHandlerTest do
use ExUnit.Case
alias RobbyWeb.PhotoHandler
test "can successfully update a photo" do
:ok = PhotoHandler.update_photo("matt")
end
test "can handle a missing photo gracefully" do
:ok = PhotoHandler.update_photo("tom")
end
test "produces an error when there's an issue" do
{:error, _message} = PhotoHandler.update_photo("jim")
end
end
| 22.444444 | 57 | 0.725248 |
f7d9dbaf51f171e9702bda91094fca1b8f3e1b66 | 1,219 | ex | Elixir | apps/room_book/lib/user/user.ex | adrianomota/room_book_umbrella | 701d4ad89b45739418df831156288b94346287df | [
"MIT"
] | null | null | null | apps/room_book/lib/user/user.ex | adrianomota/room_book_umbrella | 701d4ad89b45739418df831156288b94346287df | [
"MIT"
] | null | null | null | apps/room_book/lib/user/user.ex | adrianomota/room_book_umbrella | 701d4ad89b45739418df831156288b94346287df | [
"MIT"
] | null | null | null | defmodule RoomBook.User do
use Ecto.Schema
import Ecto.Changeset
alias RoomBook.User
schema "users" do
field(:email, :string, null: false)
field(:username, :string, null: false)
field(:encrypted_password, :string, null: false)
field(:password, :string, virtual: true)
field(:password_confirmation, :string, virtual: true)
has_many(:rooms, RoomBook.Room)
timestamps()
end
@doc false
def changeset(%User{} = user, attrs) do
user
|> cast(attrs, [:email, :username])
|> validate_required([:email, :username])
|> validate_length(:username, min: 3, max: 30)
|> unique_constraint(:email)
|> unique_constraint(:username)
end
@doc false
def registration_changeset(%User{} = user, attrs) do
user
|> changeset(attrs)
|> validate_confirmation(:password)
|> cast(attrs, [:password], [])
|> validate_length(:password, min: 6, max: 128)
|> encrypt_password()
end
defp encrypt_password(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{password: password}} ->
put_change(changeset, :encrypted_password, Comeonin.Bcrypt.hashpwsalt(password))
_ ->
changeset
end
end
end
| 25.93617 | 88 | 0.658737 |
f7d9efc5e5af5dbd74f479e7033320d1d7e0a3cf | 300 | exs | Elixir | src/say_math.exs | ybur-yug/all_about_iex | 222d00dee28fa9cbabbf37359768584a9d8d1422 | [
"MIT"
] | 4 | 2016-02-07T07:33:06.000Z | 2016-02-15T14:32:22.000Z | src/say_math.exs | ybur-yug/all_about_iex | 222d00dee28fa9cbabbf37359768584a9d8d1422 | [
"MIT"
] | null | null | null | src/say_math.exs | ybur-yug/all_about_iex | 222d00dee28fa9cbabbf37359768584a9d8d1422 | [
"MIT"
] | null | null | null | defmodule SayMath do
@moduledoc """
A simple module to print math operations and their result
"""
@doc "prints the addition of two numbers and their result"
@spec add(number(), number()) :: atom()
def add a, b do
IO.puts "#{a} + #{b} is #{a + b}"
end
end
require IEx; IEx.pry
| 23.076923 | 61 | 0.626667 |
f7da0ca80c87e59cc8987d104ef4b004a413f3bb | 1,966 | ex | Elixir | lib/blog_post_api_web/controllers/user_controller.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | lib/blog_post_api_web/controllers/user_controller.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | lib/blog_post_api_web/controllers/user_controller.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | defmodule BlogPostApiWeb.UserController do
use BlogPostApiWeb, :controller
alias BlogPostApi.Accounts
alias BlogPostApi.Accounts.User
alias BlogPostApi.Guardian
alias BlogPostApiWeb.Params.LoginParams
action_fallback BlogPostApiWeb.FallbackController
def index(conn, _params) do
users = Accounts.list_users()
render(conn, "index.json", users: users)
end
def paginate(conn, %{"page_number" => page_number}) do
try do
pagination = Accounts.paginate_users(page_number)
render(conn, "index_paginated.json", pagination: pagination)
rescue
ArgumentError -> {:error, :invalid_data}
end
end
def create(conn, user_params) do
with {:ok, %User{} = user} <- Accounts.create_user(user_params),
{:ok, token, _} <- Guardian.encode_and_sign(user) do
conn
|> put_status(:created)
|> render("token.json", %{token: token})
end
end
def login(conn, params) do
with {:ok, params} <- LoginParams.prepare(params),
{:ok, user} <- Accounts.get_user_by_credentials(params.email, params.password),
{:ok, token, _} <- Guardian.encode_and_sign(user) do
conn
|> put_status(:ok)
|> render("token.json", %{token: token})
end
end
def show(conn, %{"id" => id}) do
with {:ok, _uuid} <- Ecto.UUID.cast(id), {:ok, user} <- Accounts.get_user(id) do
render(conn, "show.json", user: user)
else
_ ->
conn
|> put_status(:not_found)
|> render("404.json", [])
end
end
def update(conn, update_params) do
with {:ok, user} <- Guardian.Plug.current_resource(conn),
{:ok, %User{} = user} <- Accounts.update_user(user, update_params) do
render(conn, "show.json", user: user)
end
end
def delete(conn, _) do
with {:ok, user} <- Guardian.Plug.current_resource(conn),
{:ok, %User{}} <- Accounts.delete_user(user) do
send_resp(conn, :no_content, "")
end
end
end
| 28.492754 | 88 | 0.636826 |
f7da11847a9bb99a4e91dab37e7daef625b99493 | 2,150 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/managed_configurations_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/managed_configurations_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/managed_configurations_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.ManagedConfigurationsSettings do
@moduledoc """
A managed configurations settings resource contains the set of
managed properties that have been configured for an Android app to be applied
to a set of users. The app's developer would have
defined configurable properties in the managed configurations schema.
## Attributes
* `lastUpdatedTimestampMillis` (*type:* `String.t`, *default:* `nil`) - The last updated time of the managed configuration settings in
milliseconds since 1970-01-01T00:00:00Z.
* `mcmId` (*type:* `String.t`, *default:* `nil`) - The ID of the managed configurations settings.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the managed configurations settings.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:lastUpdatedTimestampMillis => String.t(),
:mcmId => String.t(),
:name => String.t()
}
field(:lastUpdatedTimestampMillis)
field(:mcmId)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.ManagedConfigurationsSettings do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.ManagedConfigurationsSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.ManagedConfigurationsSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.719298 | 138 | 0.741395 |
f7da25ed1f760cdd48ce1751026ad68801068965 | 1,830 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_nat_address.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_nat_address.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_nat_address.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1NatAddress do
@moduledoc """
Apigee NAT(network address translation) address. A NAT address is a static external IP address used for Internet egress traffic.
## Attributes
* `ipAddress` (*type:* `String.t`, *default:* `nil`) - Output only. The static IPV4 address.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. Resource ID of the NAT address.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. State of the nat address.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:ipAddress => String.t(),
:name => String.t(),
:state => String.t()
}
field(:ipAddress)
field(:name)
field(:state)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1NatAddress do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1NatAddress.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1NatAddress do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.528302 | 130 | 0.718033 |
f7da2e5e00f51015d30d5c9cfe194653350080b4 | 28,907 | ex | Elixir | lib/elixir/lib/application.ex | basdirks/elixir | 2cb058ba32e410e8ea073970ef52d6476ef7b4d3 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/application.ex | basdirks/elixir | 2cb058ba32e410e8ea073970ef52d6476ef7b4d3 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/elixir/lib/application.ex | basdirks/elixir | 2cb058ba32e410e8ea073970ef52d6476ef7b4d3 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Application do
@moduledoc """
A module for working with applications and defining application callbacks.
Applications are the idiomatic way to package software in Erlang/OTP. To get
the idea, they are similar to the "library" concept common in other
programming languages, but with some additional characteristics.
An application is a component implementing some specific functionality, with a
standardized directory structure, configuration, and lifecycle. Applications
are *loaded*, *started*, and *stopped*.
## The application resource file
Applications are specified in their [*resource
file*](http://erlang.org/doc/man/app.html), which is a file called `APP.app`,
where `APP` is the application name. For example, the application resource
file of the OTP application `ex_unit` is called `ex_unit.app`.
You'll find the resource file of an application in its `ebin` directory, it is
generated automatically by Mix. Some of its keys are taken from the keyword
lists returned by the `project/0` and `application/0` functions defined in
`mix.exs`, and others are generated by Mix itself.
You can learn more about the generation of application resource files in the
documentation of `Mix.Tasks.Compile.App`, available as well by running `mix
help compile.app`.
## The application environment
The key `env` of an application resource file has a list of tuples that map
atoms to terms, and its contents are known as the application *environment*.
Note that this environment is unrelated to the operating system environment.
By default, the environment of an application is an empty list. In a Mix
project you can set that key in `application/0`:
def application do
[env: [redis_host: "localhost"]]
end
and the generated application resource file is going to have it included.
The environment is available after loading the application, which is a process
explained later:
Application.load(:APP_NAME)
#=> :ok
Application.get_env(:APP_NAME, :redis_host)
#=> "localhost"
In Mix projects, the environment of the application and its dependencies can
be overridden via the `config/config.exs` file. If you start the application
with Mix, that configuration is available at compile time, and at runtime too,
but take into account it is not included in the generated application resource
file, and it is not available if you start the application without Mix.
For example, someone using your application can override its `:redis_host`
environment variable as follows:
config :APP_NAME, redis_host: "redis.local"
The function `put_env/3` allows dynamic configuration of the application
environment, but as a rule of thumb each application is responsible for its
own environment. Please do not use the functions in this module for directly
accessing or modifying the environment of other applications.
The application environment can be overridden via the `-config` option of
`erl`, as well as command-line flags, as we are going to see below.
## The application callback module
The `mod` key of an application resource file configures an application
callback module and start argument:
def application do
[mod: {MyApp, []}]
end
This key is optional, only needed for applications that start a supervision tree.
The `MyApp` module given to `:mod` needs to implement the `Application` behaviour.
This can be done by putting `use Application` in that module and implementing the
`c:start/2` callback, for example:
defmodule MyApp do
use Application
def start(_type, _args) do
children = []
Supervisor.start_link(children, strategy: :one_for_one)
end
end
The `c:start/2` callback has to spawn and link a supervisor and return `{:ok,
pid}` or `{:ok, pid, state}`, where `pid` is the PID of the supervisor, and
`state` is an optional application state. `args` is the second element of the
tuple given to the `:mod` option.
The `type` argument passed to `c:start/2` is usually `:normal` unless in a
distributed setup where application takeovers and failovers are configured.
Distributed applications are beyond the scope of this documentation.
When an application is shutting down, its `c:stop/1` callback is called after
the supervision tree has been stopped by the runtime. This callback allows the
application to do any final cleanup. The argument is the state returned by
`c:start/2`, if it did, or `[]` otherwise. The return value of `c:stop/1` is
ignored.
By using `Application`, modules get a default implementation of `c:stop/1`
that ignores its argument and returns `:ok`, but it can be overridden.
Application callback modules may also implement the optional callback
`c:prep_stop/1`. If present, `c:prep_stop/1` is invoked before the supervision
tree is terminated. Its argument is the state returned by `c:start/2`, if it did,
or `[]` otherwise, and its return value is passed to `c:stop/1`.
## The application lifecycle
### Loading applications
Applications are *loaded*, which means that the runtime finds and processes
their resource files:
Application.load(:ex_unit)
#=> :ok
If an application has included applications, they are also loaded. And the
procedure recurses if they in turn have included applications. Included
applications are unrelated to applications in Mix umbrella projects, they are
an Erlang/OTP concept that has to do with coordinated starts.
When an application is loaded, the environment specified in its resource file
is merged with any overrides from config files passed to `erl` via the
`-config` option. It is worth highlighting that releases pass `sys.config`
this way. The resulting environment can still be overridden again via specific
`-Application` flags passed to `erl`.
Loading an application *does not* load its modules.
In practice, you rarely load applications by hand because that is part of the
start process, explained next.
### Starting applications
Applications are also *started*:
Application.start(:ex_unit)
#=> :ok
Once your application is compiled, running your system is a matter of starting
your current application and its dependencies. Differently from other languages,
Elixir does not have a `main` procedure that is responsible for starting your
system. Instead, you start one or more applications, each with their own
initialization and termination logic.
When an application is started, the runtime loads it if it hasn't been loaded
yet (in the technical sense described above). Then, it checks if the
dependencies listed in the `applications` key of the resource file are already
started. Having at least one dependency not started is an error condition, but
when you start an application with `mix run`, Mix takes care of starting all
the dependencies for you, so in practice you don't need to worry about it
unless you are starting applications manually with the API provided by this
module.
If the application does not have a callback module configured, starting is
done at this point. Otherwise, its `c:start/2` callback if invoked. The PID of
the top-level supervisor returned by this function is stored by the runtime
for later use, and the returned application state is saved too, if any.
### Stopping applications
Started applications are, finally, *stopped*:
Application.stop(:ex_unit)
#=> :ok
Stopping an application without a callback module is defined, but except for
some system tracing, it is in practice a no-op.
Stopping an application with a callback module has three steps:
1. If present, invoke the optional callback `c:prep_stop/1`.
2. Terminate the top-level supervisor.
3. Invoke the required callback `c:stop/1`.
The arguments passed to the callbacks are related to the state optionally
returned by `c:start/2`, and are documented in the section about the callback
module above.
It is important to highlight that step 2 is a blocking one. Termination of a
supervisor triggers a recursive chain of children terminations, therefore
orderly shutting down all descendant processes. The `c:stop/1` callback is
invoked only after termination of the whole supervision tree.
Shutting down a live system cleanly can be done by calling `System.stop/1`. It
will shut down every application in the opposite order they had been started.
By default, a SIGTERM from the operating system will automatically translate to
`System.stop/0`. You can also have more explicit control over OS signals via the
`:os.set_signal/2` function.
## Tooling
The Mix build tool can also be used to start your applications. For example,
`mix test` automatically starts your application dependencies and your application
itself before your test runs. `mix run --no-halt` boots your current project and
can be used to start a long running system. See `mix help run`.
Developers can also use tools like [Distillery](https://github.com/bitwalker/distillery)
that build **releases**. Releases are able to package all of your source code
as well as the Erlang VM into a single directory. Releases also give you explicit
control over how each application is started and in which order. They also provide
a more streamlined mechanism for starting and stopping systems, debugging, logging,
as well as system monitoring.
Finally, Elixir provides tools such as escripts and archives, which are
different mechanisms for packaging your application. Those are typically used
when tools must be shared between developers and not as deployment options.
See `mix help archive.build` and `mix help escript.build` for more detail.
## Further information
For further details on applications please check the documentation of the
[`application`](http://www.erlang.org/doc/man/application.html) Erlang module,
and the
[Applications](http://www.erlang.org/doc/design_principles/applications.html)
section of the [OTP Design Principles User's
Guide](http://erlang.org/doc/design_principles/users_guide.html).
"""
@doc """
Called when an application is started.
This function is called when an application is started using
`Application.start/2` (and functions on top of that, such as
`Application.ensure_started/2`). This function should start the top-level
process of the application (which should be the top supervisor of the
application's supervision tree if the application follows the OTP design
principles around supervision).
`start_type` defines how the application is started:
* `:normal` - used if the startup is a normal startup or if the application
is distributed and is started on the current node because of a failover
from another node and the application specification key `:start_phases`
is `:undefined`.
* `{:takeover, node}` - used if the application is distributed and is
started on the current node because of a failover on the node `node`.
* `{:failover, node}` - used if the application is distributed and is
started on the current node because of a failover on node `node`, and the
application specification key `:start_phases` is not `:undefined`.
`start_args` are the arguments passed to the application in the `:mod`
specification key (e.g., `mod: {MyApp, [:my_args]}`).
This function should either return `{:ok, pid}` or `{:ok, pid, state}` if
startup is successful. `pid` should be the PID of the top supervisor. `state`
can be an arbitrary term, and if omitted will default to `[]`; if the
application is later stopped, `state` is passed to the `stop/1` callback (see
the documentation for the `c:stop/1` callback for more information).
`use Application` provides no default implementation for the `start/2`
callback.
"""
@callback start(start_type, start_args :: term) ::
{:ok, pid}
| {:ok, pid, state}
| {:error, reason :: term}
@doc """
Called before stopping the application.
This function is called before the top-level supervisor is terminated. It
receives the state returned by `c:start/2`, if it did, or `[]` otherwise.
The return value is later passed to `c:stop/1`.
"""
@callback prep_stop(state) :: state
@doc """
Called after an application has been stopped.
This function is called after an application has been stopped, i.e., after its
supervision tree has been stopped. It should do the opposite of what the
`c:start/2` callback did, and should perform any necessary cleanup. The return
value of this callback is ignored.
`state` is the state returned by `c:start/2`, if it did, or `[]` otherwise.
If the optional callback `c:prep_stop/1` is present, `state` is its return
value instead.
`use Application` defines a default implementation of this function which does
nothing and just returns `:ok`.
"""
@callback stop(state) :: term
@doc """
Starts an application in synchronous phases.
This function is called after `start/2` finishes but before
`Application.start/2` returns. It will be called once for every start phase
defined in the application's (and any included applications') specification,
in the order they are listed in.
"""
@callback start_phase(phase :: term, start_type, phase_args :: term) ::
:ok | {:error, reason :: term}
@doc """
Callback invoked after code upgrade, if the application environment
has changed.
`changed` is a keyword list of keys and their changed values in the
application environment. `new` is a keyword list with all new keys
and their values. `removed` is a list with all removed keys.
"""
@callback config_change(changed, new, removed) :: :ok
when changed: keyword, new: keyword, removed: [atom]
@optional_callbacks start_phase: 3, prep_stop: 1, config_change: 3
@doc false
defmacro __using__(_) do
quote location: :keep do
@behaviour Application
@doc false
def stop(_state) do
:ok
end
defoverridable Application
end
end
@type app :: atom
@type key :: atom
@type value :: term
@type state :: term
@type start_type :: :normal | {:takeover, node} | {:failover, node}
@type restart_type :: :permanent | :transient | :temporary
@application_keys [
:description,
:id,
:vsn,
:modules,
:maxP,
:maxT,
:registered,
:included_applications,
:applications,
:mod,
:start_phases
]
@doc """
Returns the spec for `app`.
The following keys are returned:
* #{Enum.map_join(@application_keys, "\n * ", &"`#{inspect(&1)}`")}
Note the environment is not returned as it can be accessed via
`fetch_env/2`. Returns `nil` if the application is not loaded.
"""
@spec spec(app) :: [{key, value}] | nil
def spec(app) do
case :application.get_all_key(app) do
{:ok, info} -> :lists.keydelete(:env, 1, info)
:undefined -> nil
end
end
@doc """
Returns the value for `key` in `app`'s specification.
See `spec/1` for the supported keys. If the given
specification parameter does not exist, this function
will raise. Returns `nil` if the application is not loaded.
"""
@spec spec(app, key) :: value | nil
def spec(app, key) when key in @application_keys do
case :application.get_key(app, key) do
{:ok, value} -> value
:undefined -> nil
end
end
@doc """
Gets the application for the given module.
The application is located by analyzing the spec
of all loaded applications. Returns `nil` if
the module is not listed in any application spec.
"""
@spec get_application(atom) :: atom | nil
def get_application(module) when is_atom(module) do
case :application.get_application(module) do
{:ok, app} -> app
:undefined -> nil
end
end
@doc """
Returns all key-value pairs for `app`.
"""
@spec get_all_env(app) :: [{key, value}]
def get_all_env(app) do
:application.get_all_env(app)
end
@doc """
Returns the value for `key` in `app`'s environment.
If the configuration parameter does not exist, the function returns the
`default` value.
## Examples
`get_env/3` is commonly used to read the configuration of your OTP applications.
Since Mix configurations are commonly used to configure applications, we will use
this as a point of illustration.
Consider a new application `:my_app`. `:my_app` contains a database engine which
supports a pool of databases. The database engine needs to know the configuration for
each of those databases, and that configuration is supplied by key-value pairs in
environment of `:my_app`.
config :my_app, Databases.RepoOne,
# A database configuration
ip: "localhost",
port: 5433
config :my_app, Databases.RepoTwo,
# Another database configuration (for the same OTP app)
ip: "localhost",
port: 20717
config :my_app, my_app_databases: [Databases.RepoOne, Databases.RepoTwo]
Our database engine used by `:my_app` needs to know what databases exist, and
what the database configurations are. The database engine can make a call to
`get_env(:my_app, :my_app_databases)` to retrieve the list of databases (specified
by module names). Our database engine can then traverse each repository in the
list and then call `get_env(:my_app, Databases.RepoOne)` and so forth to retrieve
the configuration of each one.
**Important:** if you are writing a library to be used by other developers,
it is generally recommended to avoid the application environment, as the
application environment is effectively a global storage. For more information,
read our [library guidelines](/library-guidelines.html).
"""
@spec get_env(app, key, value) :: value
def get_env(app, key, default \\ nil) do
:application.get_env(app, key, default)
end
@doc """
Returns the value for `key` in `app`'s environment in a tuple.
If the configuration parameter does not exist, the function returns `:error`.
"""
@spec fetch_env(app, key) :: {:ok, value} | :error
def fetch_env(app, key) do
case :application.get_env(app, key) do
{:ok, value} -> {:ok, value}
:undefined -> :error
end
end
@doc """
Returns the value for `key` in `app`'s environment.
If the configuration parameter does not exist, raises `ArgumentError`.
"""
@spec fetch_env!(app, key) :: value
def fetch_env!(app, key) do
case fetch_env(app, key) do
{:ok, value} ->
value
:error ->
vsn = :application.get_key(app, :vsn)
app = inspect(app)
key = inspect(key)
case vsn do
{:ok, _} ->
raise ArgumentError,
"could not fetch application environment #{key} for application #{app} " <>
"because configuration #{key} was not set"
:undefined ->
raise ArgumentError,
"could not fetch application environment #{key} for application #{app} " <>
"because the application was not loaded/started. If your application " <>
"depends on #{app} at runtime, make sure to load/start it or list it " <>
"under :extra_applications in your mix.exs file"
end
end
end
@doc """
Puts the `value` in `key` for the given `app`.
## Options
* `:timeout` - the timeout for the change (defaults to `5_000` milliseconds)
* `:persistent` - persists the given value on application load and reloads
If `put_env/4` is called before the application is loaded, the application
environment values specified in the `.app` file will override the ones
previously set.
The persistent option can be set to `true` when there is a need to guarantee
parameters set with this function will not be overridden by the ones defined
in the application resource file on load. This means persistent values will
stick after the application is loaded and also on application reload.
"""
@spec put_env(app, key, value, timeout: timeout, persistent: boolean) :: :ok
def put_env(app, key, value, opts \\ []) do
:application.set_env(app, key, value, opts)
end
@doc """
Deletes the `key` from the given `app` environment.
See `put_env/4` for a description of the options.
"""
@spec delete_env(app, key, timeout: timeout, persistent: boolean) :: :ok
def delete_env(app, key, opts \\ []) do
:application.unset_env(app, key, opts)
end
@doc """
Ensures the given `app` is started.
Same as `start/2` but returns `:ok` if the application was already
started. This is useful in scripts and in test setup, where test
applications need to be explicitly started:
:ok = Application.ensure_started(:my_test_dep)
"""
@spec ensure_started(app, restart_type) :: :ok | {:error, term}
def ensure_started(app, type \\ :temporary) when is_atom(app) do
:application.ensure_started(app, type)
end
@doc """
Ensures the given `app` and its applications are started.
Same as `start/2` but also starts the applications listed under
`:applications` in the `.app` file in case they were not previously
started.
"""
@spec ensure_all_started(app, restart_type) :: {:ok, [app]} | {:error, {app, term}}
def ensure_all_started(app, type \\ :temporary) when is_atom(app) do
:application.ensure_all_started(app, type)
end
@doc """
Starts the given `app`.
If the `app` is not loaded, the application will first be loaded using `load/1`.
Any included application, defined in the `:included_applications` key of the
`.app` file will also be loaded, but they won't be started.
Furthermore, all applications listed in the `:applications` key must be explicitly
started before this application is. If not, `{:error, {:not_started, app}}` is
returned, where `app` is the name of the missing application.
In case you want to automatically load **and start** all of `app`'s dependencies,
see `ensure_all_started/2`.
The `type` argument specifies the type of the application:
* `:permanent` - if `app` terminates, all other applications and the entire
node are also terminated.
* `:transient` - if `app` terminates with `:normal` reason, it is reported
but no other applications are terminated. If a transient application
terminates abnormally, all other applications and the entire node are
also terminated.
* `:temporary` - if `app` terminates, it is reported but no other
applications are terminated (the default).
Note that it is always possible to stop an application explicitly by calling
`stop/1`. Regardless of the type of the application, no other applications will
be affected.
Note also that the `:transient` type is of little practical use, since when a
supervision tree terminates, the reason is set to `:shutdown`, not `:normal`.
"""
@spec start(app, restart_type) :: :ok | {:error, term}
def start(app, type \\ :temporary) when is_atom(app) do
:application.start(app, type)
end
@doc """
Stops the given `app`.
When stopped, the application is still loaded.
"""
@spec stop(app) :: :ok | {:error, term}
def stop(app) do
:application.stop(app)
end
@doc """
Loads the given `app`.
In order to be loaded, an `.app` file must be in the load paths.
All `:included_applications` will also be loaded.
Loading the application does not start it nor load its modules, but
it does load its environment.
"""
@spec load(app) :: :ok | {:error, term}
def load(app) when is_atom(app) do
:application.load(app)
end
@doc """
Unloads the given `app`.
It will also unload all `:included_applications`.
Note that the function does not purge the application modules.
"""
@spec unload(app) :: :ok | {:error, term}
def unload(app) when is_atom(app) do
:application.unload(app)
end
@doc """
Gets the directory for app.
This information is returned based on the code path. Here is an
example:
File.mkdir_p!("foo/ebin")
Code.prepend_path("foo/ebin")
Application.app_dir(:foo)
#=> "foo"
Even though the directory is empty and there is no `.app` file
it is considered the application directory based on the name
"foo/ebin". The name may contain a dash `-` which is considered
to be the app version and it is removed for the lookup purposes:
File.mkdir_p!("bar-123/ebin")
Code.prepend_path("bar-123/ebin")
Application.app_dir(:bar)
#=> "bar-123"
For more information on code paths, check the `Code` module in
Elixir and also Erlang's [`:code` module](http://www.erlang.org/doc/man/code.html).
"""
@spec app_dir(app) :: String.t()
def app_dir(app) when is_atom(app) do
case :code.lib_dir(app) do
lib when is_list(lib) -> IO.chardata_to_string(lib)
{:error, :bad_name} -> raise ArgumentError, "unknown application: #{inspect(app)}"
end
end
@doc """
Returns the given path inside `app_dir/1`.
If `path` is a string, then it will be used as the path inside `app_dir/1`. If
`path` is a list of strings, it will be joined (see `Path.join/1`) and the result
will be used as the path inside `app_dir/1`.
## Examples
File.mkdir_p!("foo/ebin")
Code.prepend_path("foo/ebin")
Application.app_dir(:foo, "my_path")
#=> "foo/my_path"
Application.app_dir(:foo, ["my", "nested", "path"])
#=> "foo/my/nested/path"
"""
@spec app_dir(app, String.t() | [String.t()]) :: String.t()
def app_dir(app, path)
def app_dir(app, path) when is_binary(path) do
Path.join(app_dir(app), path)
end
def app_dir(app, path) when is_list(path) do
Path.join([app_dir(app) | path])
end
@doc """
Returns a list with information about the applications which are currently running.
"""
@spec started_applications(timeout) :: [{app, description :: charlist(), vsn :: charlist()}]
def started_applications(timeout \\ 5000) do
:application.which_applications(timeout)
end
@doc """
Returns a list with information about the applications which have been loaded.
"""
@spec loaded_applications :: [{app, description :: charlist(), vsn :: charlist()}]
def loaded_applications do
:application.loaded_applications()
end
@doc """
Formats the error reason returned by `start/2`,
`ensure_started/2`, `stop/1`, `load/1` and `unload/1`,
returns a string.
"""
@spec format_error(any) :: String.t()
def format_error(reason) do
try do
do_format_error(reason)
catch
# A user could create an error that looks like a built-in one
# causing an error.
:error, _ ->
inspect(reason)
end
end
# exit(:normal) call is special cased, undo the special case.
defp do_format_error({{:EXIT, :normal}, {mod, :start, args}}) do
Exception.format_exit({:normal, {mod, :start, args}})
end
# {:error, reason} return value
defp do_format_error({reason, {mod, :start, args}}) do
Exception.format_mfa(mod, :start, args) <>
" returned an error: " <> Exception.format_exit(reason)
end
# error or exit(reason) call, use exit reason as reason.
defp do_format_error({:bad_return, {{mod, :start, args}, {:EXIT, reason}}}) do
Exception.format_exit({reason, {mod, :start, args}})
end
# bad return value
defp do_format_error({:bad_return, {{mod, :start, args}, return}}) do
Exception.format_mfa(mod, :start, args) <> " returned a bad value: " <> inspect(return)
end
defp do_format_error({:already_started, app}) when is_atom(app) do
"already started application #{app}"
end
defp do_format_error({:not_started, app}) when is_atom(app) do
"not started application #{app}"
end
defp do_format_error({:bad_application, app}) do
"bad application: #{inspect(app)}"
end
defp do_format_error({:already_loaded, app}) when is_atom(app) do
"already loaded application #{app}"
end
defp do_format_error({:not_loaded, app}) when is_atom(app) do
"not loaded application #{app}"
end
defp do_format_error({:invalid_restart_type, restart}) do
"invalid application restart type: #{inspect(restart)}"
end
defp do_format_error({:invalid_name, name}) do
"invalid application name: #{inspect(name)}"
end
defp do_format_error({:invalid_options, opts}) do
"invalid application options: #{inspect(opts)}"
end
defp do_format_error({:badstartspec, spec}) do
"bad application start specs: #{inspect(spec)}"
end
defp do_format_error({'no such file or directory', file}) do
"could not find application file: #{file}"
end
defp do_format_error(reason) do
Exception.format_exit(reason)
end
end
| 36.269762 | 94 | 0.705331 |
f7da3dfaa4c71f63d99b7929283a389eb0f6b901 | 1,561 | ex | Elixir | lib/request.ex | bzzt/bigtable | 215b104a60596dde6cd459efb73baf8bccdb6b50 | [
"MIT"
] | 17 | 2019-01-22T12:59:38.000Z | 2021-12-13T10:41:52.000Z | lib/request.ex | bzzt/bigtable | 215b104a60596dde6cd459efb73baf8bccdb6b50 | [
"MIT"
] | 17 | 2019-01-27T18:11:33.000Z | 2020-02-24T10:16:08.000Z | lib/request.ex | bzzt/bigtable | 215b104a60596dde6cd459efb73baf8bccdb6b50 | [
"MIT"
] | 3 | 2019-02-04T17:08:09.000Z | 2021-04-07T07:13:53.000Z | defmodule Bigtable.Request do
@moduledoc false
alias Bigtable.{Auth, Connection}
alias Connection.Worker
@spec process_request(any(), function(), list()) :: {:ok, any()} | {:error, any()}
def process_request(request, request_fn, opts \\ []) do
response =
:poolboy.transaction(
:connection_pool,
fn pid ->
token = Auth.get_token()
pid
|> Worker.get_connection()
|> request_fn.(request, get_metadata(token))
end,
10_000
)
handle_response(response, opts)
end
@spec handle_response(any(), list()) :: {:ok, any()} | {:error, any()}
defp handle_response({:ok, response, _headers}, opts) do
if Keyword.get(opts, :stream, false) do
processed =
response
|> process_stream()
{:ok, processed}
else
{:ok, response}
end
end
defp handle_response(error, _opts) do
case error do
{:error, _msg} ->
error
msg ->
{:error, msg}
end
end
@spec process_stream(Enumerable.t()) :: [{:ok | :error, any}]
defp process_stream(stream) do
stream
|> Stream.take_while(&remaining_resp?/1)
|> Enum.to_list()
end
@spec remaining_resp?({:ok | :error | :trailers, any()}) :: boolean()
defp remaining_resp?({status, _}), do: status != :trailers
@spec get_metadata(map()) :: Keyword.t()
defp get_metadata(%{token: token}) do
metadata = %{authorization: "Bearer #{token}"}
[metadata: metadata, content_type: "application/grpc", return_headers: true]
end
end
| 24.777778 | 84 | 0.599616 |
f7da41f019a3df00adf0d92d38378469eec2bebc | 7,794 | exs | Elixir | lib/elixir/test/elixir/regex_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/regex_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/regex_test.exs | MSch/elixir | fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule Regex.BinaryTest do
use ExUnit.Case, async: true
test :multiline do
refute Regex.match?(%r/^b$/, "a\nb\nc")
assert Regex.match?(%r/^b$/m, "a\nb\nc")
end
test :precedence do
assert { "aa", :unknown } |> elem(0) =~ %r/(a)\1/
end
test :backreference do
assert "aa" =~ %r/(a)\1/
end
test :compile! do
assert is_record(Regex.compile!("foo"), Regex)
assert is_regex(Regex.compile!("foo"))
assert_raise Regex.CompileError, "nothing to repeat at position 0", fn ->
Regex.compile!("*foo")
end
end
test :compile do
{ :ok, regex } = Regex.compile("foo")
assert is_regex(regex)
assert { :error, _ } = Regex.compile("*foo")
assert { :error, _ } = Regex.compile("foo", "y")
end
test :compile_with_erl_opts do
{ :ok, regex } = Regex.compile("foo\\sbar", [:dotall, {:newline, :anycrlf}])
assert "foo\nbar" =~ regex
end
test :source do
assert Regex.source(Regex.compile!("foo")) == "foo"
end
test :opts do
assert Regex.opts(Regex.compile!("foo", "u")) == "u"
end
test :unicode do
assert ("josé" =~ %r"\p{Latin}$"u)
end
test :groups do
assert Regex.groups(%r/(?<FOO>foo)/g) == [:FOO]
assert Regex.groups(Regex.compile!("foo")) == nil
assert Regex.groups(Regex.compile!("(?<FOO>foo)", "g")) == [:FOO]
end
test :match? do
assert Regex.match?(%r/foo/, "foo")
refute Regex.match?(%r/foo/, "FOO")
assert Regex.match?(%r/foo/i, "FOO")
assert Regex.match?(%r/\d{1,3}/i, "123")
assert Regex.match?(%r/foo/, "afooa")
refute Regex.match?(%r/^foo/, "afooa")
assert Regex.match?(%r/^foo/, "fooa")
refute Regex.match?(%r/foo$/, "afooa")
assert Regex.match?(%r/foo$/, "afoo")
end
test :named_captures do
assert Keyword.equal? Regex.named_captures(%r/(?<foo>c)(?<bar>d)/g, 'abcd'), [bar: 'd', foo: 'c']
assert Regex.named_captures(%r/c(?<foo>d)/g, 'abcd') == [foo: 'd']
assert Regex.named_captures(%r/c(?<foo>d)/g, 'no_match') == nil
assert Regex.named_captures(%r/c(?<foo>d|e)/g, 'abcd abce') == [foo: 'd']
assert Regex.named_captures(%r/c(?<foo>d)/g, 'abcd', return: :binary) == [foo: "d"]
assert Regex.named_captures(%r/c(.)/g, 'cat') == []
end
test :sigil_R do
assert Regex.match?(%R/f#{1,3}o/, "f#o")
end
test :run do
assert Regex.run(%r"c(d)", "abcd") == ["cd", "d"]
assert Regex.run(%r"e", "abcd") == nil
assert Regex.run(%r"c(d)", "abcd", return: :list) == ['cd', 'd']
end
test :run_with_groups do
assert Regex.run(%r/c(?<foo>d)/g, 'abcd', capture: :groups) == ['d']
assert Regex.run(%r/c(?<foo>d)/g, 'no_match', capture: :groups) == nil
assert Regex.run(%r/c(?<foo>d|e)/g, 'abcd abce', capture: :groups) == ['d']
assert Regex.run(%r/c(?<foo>d)/g, 'abcd', return: :binary, capture: :groups) == ["d"]
end
test :run_with_indexes do
assert Regex.run(%r"c(d)", "abcd", return: :index) == [{2, 2}, {3, 1}]
assert Regex.run(%r"e", "abcd", return: :index) == nil
end
test :scan do
assert Regex.scan(%r"c(d|e)", "abcd abce") == [["cd", "d"], ["ce", "e"]]
assert Regex.scan(%r"c(?:d|e)", "abcd abce") == [["cd"], ["ce"]]
assert Regex.scan(%r"e", "abcd") == []
assert Regex.scan(%r"c(d|e)", "abcd abce", return: :list) == [['cd', 'd'], ['ce', 'e']]
end
test :scan_with_groups do
assert Regex.scan(%r/cd/g, 'abcd', capture: :groups) == []
assert Regex.scan(%r/c(?<foo>d)/g, 'abcd', capture: :groups) == [['d']]
assert Regex.scan(%r/c(?<foo>d)/g, 'no_match', capture: :groups) == []
assert Regex.scan(%r/c(?<foo>d|e)/g, 'abcd abce', capture: :groups) == [['d'], ['e']]
assert Regex.scan(%r/c(?<foo>d)/g, 'abcd', return: :binary, capture: :groups) == [["d"]]
end
test :split do
assert Regex.split(%r",", "") == [""]
assert Regex.split(%r" ", "foo bar baz") == ["foo", "bar", "baz"]
assert Regex.split(%r" ", "foo bar baz", parts: 2) == ["foo", "bar baz"]
assert Regex.split(%r"\s", "foobar") == ["foobar"]
assert Regex.split(%r" ", " foo bar baz ") == ["", "foo", "bar", "baz", ""]
assert Regex.split(%r" ", " foo bar baz ", trim: true) == ["foo", "bar", "baz"]
assert Regex.split(%r"=", "key=") == ["key", ""]
assert Regex.split(%r"=", "=value") == ["", "value"]
end
test :replace do
assert Regex.replace(%r(d), "abc", "d") == "abc"
assert Regex.replace(%r(b), "abc", "d") == "adc"
assert Regex.replace(%r(b), "abc", "[&]") == "a[b]c"
assert Regex.replace(%r(b), "abc", "[\\&]") == "a[&]c"
assert Regex.replace(%r[(b)], "abc", "[\\1]") == "a[b]c"
assert Regex.replace(%r(d), "abcbe", "d") == "abcbe"
assert Regex.replace(%r(b), "abcbe", "d") == "adcde"
assert Regex.replace(%r(b), "abcbe", "[&]") == "a[b]c[b]e"
assert Regex.replace(%r(b), "abcbe", "[\\&]") == "a[&]c[&]e"
assert Regex.replace(%r[(b)], "abcbe", "[\\1]") == "a[b]c[b]e"
end
test :escape do
assert matches_escaped?(".")
refute matches_escaped?(".", "x")
assert matches_escaped?("[\w]")
refute matches_escaped?("[\w]", "x")
assert matches_escaped?("\\")
assert matches_escaped?("\\xff", "\\xff")
refute matches_escaped?("\\xff", "\xff")
assert matches_escaped?("(")
assert matches_escaped?("()")
assert matches_escaped?("(?:foo)")
assert matches_escaped?("\\A \\z")
assert matches_escaped?(" x ")
assert matches_escaped?(" x x ") # unicode spaces here
assert matches_escaped?("# lol")
assert matches_escaped?("\\A.^$*+?()[{\\| \t\n\xff\\z #hello\x{202F}\x{205F}")
end
defp matches_escaped?(string) do
matches_escaped?(string, string)
end
defp matches_escaped?(string, match) do
Regex.match? %r/#{Regex.escape(string)}/usimx, match
end
end
defmodule Regex.ListTest do
use ExUnit.Case, async: true
test :match? do
assert Regex.match?(%r(foo), 'foo')
refute Regex.match?(%r(foo), 'FOO')
assert Regex.match?(%r(foo)i, 'FOO')
assert Regex.match?(%r/\d{1,3}/i, '123')
assert Regex.match?(%r(foo), 'afooa')
refute Regex.match?(%r(^foo), 'afooa')
assert Regex.match?(%r(^foo), 'fooa')
refute Regex.match?(%r(foo$), 'afooa')
assert Regex.match?(%r(foo$), 'afoo')
end
test :run do
assert Regex.run(%r'c(d)', 'abcd') == ['cd', 'd']
assert Regex.run(%r'e', 'abcd') == nil
assert Regex.run(%r"c(d)", "abcd", return: :binary) == ["cd", "d"]
end
test :indexes do
assert Regex.run(%r'c(d)', 'abcd', return: :index) == [{2, 2}, {3, 1}]
assert Regex.run(%r'e', 'abcd', return: :index) == nil
end
test :scan do
assert Regex.scan(%r'c(d|e)', 'abcd abce') == [['cd', 'd'], ['ce', 'e']]
assert Regex.scan(%r'c(?:d|e)', 'abcd abce') == [['cd'], ['ce']]
assert Regex.scan(%r'e', 'abcd') == []
assert Regex.scan(%r'c(d|e)', 'abcd abce', return: :binary) == [["cd", "d"], ["ce", "e"]]
end
test :split do
assert Regex.split(%r' ', 'foo bar baz') == ['foo', 'bar', 'baz']
assert Regex.split(%r' ', 'foo bar baz', parts: 2) == ['foo', 'bar baz']
assert Regex.split(%r'\s', 'foobar') == ['foobar']
end
test :replace do
assert Regex.replace(%r(d), 'abc', 'd') == 'abc'
assert Regex.replace(%r(b), 'abc', 'd') == 'adc'
assert Regex.replace(%r(b), 'abc', '[&]') == 'a[b]c'
assert Regex.replace(%r(b), 'abc', '[\\&]') == 'a[&]c'
assert Regex.replace(%r[(b)], 'abc', '[\\1]') == 'a[b]c'
assert Regex.replace(%r(d), 'abcbe', 'd') == 'abcbe'
assert Regex.replace(%r(b), 'abcbe', 'd') == 'adcde'
assert Regex.replace(%r(b), 'abcbe', '[&]') == 'a[b]c[b]e'
assert Regex.replace(%r(b), 'abcbe', '[\\&]') == 'a[&]c[&]e'
assert Regex.replace(%r[(b)], 'abcbe', '[\\1]') == 'a[b]c[b]e'
end
end
| 34.034934 | 101 | 0.549782 |
f7da4c6089429dc4a5d632d6e8b1eaa51ec91d03 | 1,091 | ex | Elixir | test/support/conn_case.ex | NinjasCL/huemulixir | 0f0ceb69f19e5d361725284f0f8cb1264e7e1742 | [
"BSD-2-Clause"
] | 2 | 2022-03-21T00:57:19.000Z | 2022-03-25T14:28:15.000Z | test/support/conn_case.ex | NinjasCL/huemulixir | 0f0ceb69f19e5d361725284f0f8cb1264e7e1742 | [
"BSD-2-Clause"
] | null | null | null | test/support/conn_case.ex | NinjasCL/huemulixir | 0f0ceb69f19e5d361725284f0f8cb1264e7e1742 | [
"BSD-2-Clause"
] | null | null | null | defmodule HuemulixirWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use HuemulixirWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import HuemulixirWeb.ConnCase
alias HuemulixirWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint HuemulixirWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.710526 | 64 | 0.737855 |
f7da91d65cbdd9e9a54ae4bbe628671e26578b6e | 14,278 | ex | Elixir | lib/bex/peer_worker.ex | ckampfe/bex | cfbfdd600e03ebb9ef60c3f2f8cb61b0640455ff | [
"BSD-3-Clause"
] | null | null | null | lib/bex/peer_worker.ex | ckampfe/bex | cfbfdd600e03ebb9ef60c3f2f8cb61b0640455ff | [
"BSD-3-Clause"
] | null | null | null | lib/bex/peer_worker.ex | ckampfe/bex | cfbfdd600e03ebb9ef60c3f2f8cb61b0640455ff | [
"BSD-3-Clause"
] | null | null | null | defmodule Bex.PeerWorker do
@moduledoc false
use GenServer, restart: :transient
alias Bex.{BitArray, Peer, TorrentControllerWorker, Torrent, Metainfo, Chunk, Piece}
require Logger
### PUBLIC API
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
def init(args) do
{:ok, args, {:continue, :setup}}
end
def remote_ip_and_port(pid) do
GenServer.call(pid, :remote_ip_and_port)
end
def interested(pid) do
GenServer.call(pid, :interested)
end
def not_interested(pid) do
GenServer.call(pid, :not_interested)
end
def choke(pid) do
GenServer.call(pid, :choke)
end
def unchoke(pid) do
GenServer.call(pid, :unchoke)
end
def request_piece(pid, index) do
GenServer.call(pid, {:request_piece, index})
end
def shutdown(pid) do
GenServer.call(pid, :shutdown)
end
### CALLBACKS
def handle_continue(
:setup,
%{
metainfo: %Metainfo{decorated: %Metainfo.Decorated{info_hash: info_hash}},
socket: socket,
my_peer_id: my_peer_id
} = state
) do
state =
state
|> Map.put(:choked, true)
|> Map.put(:interested, false)
checkin_tick = :timer.seconds(5)
schedule_controller_checkin(checkin_tick)
:ok =
Peer.send_message(socket, %Peer.Message.Handshake{
info_hash: info_hash,
peer_id: my_peer_id,
extension_bytes: [0, 0, 0, 0, 0, 0, 0, 0]
})
:ok = active_once(socket)
Logger.debug("Handshake sent to #{inspect(socket)}")
{:noreply, state}
end
def handle_continue(
:post_handshake,
%{
metainfo: %Metainfo{
decorated: %Metainfo.Decorated{have_pieces: %BitArray{} = have_pieces}
},
socket: socket,
choked: choked,
interested: interested
} = state
) do
handle_info(:keepalive, state)
if BitArray.any?(have_pieces) do
Logger.debug("Have >0 pieces, sending bitfield to #{inspect(socket)}")
Peer.send_message(socket, %Peer.Message.Bitfield{bitfield: have_pieces})
else
Logger.debug("Do not have any pieces, not sending bitfield to #{inspect(socket)}")
end
if choked do
Peer.send_message(socket, %Peer.Message.Choke{})
else
Peer.send_message(socket, %Peer.Message.Unchoke{})
end
if interested do
Peer.send_message(socket, %Peer.Message.Interested{})
else
Peer.send_message(socket, %Peer.Message.NotInterested{})
end
{:noreply, state}
end
def handle_call(:remote_ip_and_port, _from, %{socket: socket} = state) do
reply = :inet.peername(socket)
{:reply, reply, state}
end
def handle_call(:interested, _from, %{socket: socket} = state) do
state =
if !state[:interested] do
:ok = Peer.send_message(socket, %Peer.Message.Interested{})
Logger.debug("Let peer #{inspect(socket)} know we're interested")
Map.put(state, :interested, true)
else
Logger.debug("Already let peer #{inspect(socket)} know we're interested, not sending")
state
end
{:reply, :ok, state}
end
def handle_call(:not_interested, _from, %{socket: socket} = state) do
state =
if state[:interested] do
:ok = Peer.send_message(socket, %Peer.Message.NotInterested{})
Logger.debug("Let peer #{inspect(socket)} know we're not interested")
Map.put(state, :interested, false)
else
Logger.debug("Already let peer #{inspect(socket)} know we're not interested, not sending")
state
end
{:reply, :ok, state}
end
def handle_call(:choke, _from, %{socket: socket} = state) do
state = Map.put(state, :choked, true)
:ok = Peer.send_message(socket, %Peer.Message.Choke{})
{:reply, :ok, state}
end
def handle_call(:unchoke, _from, %{socket: socket} = state) do
state = Map.put(state, :choked, false)
:ok = Peer.send_message(socket, %Peer.Message.Unchoke{})
Logger.debug("Unchoking #{inspect(socket)}")
{:reply, :ok, state}
end
def handle_call(
{:request_piece, index},
_from,
%{
metainfo: %{
info: %{"piece length": piece_length, length: total_length}
},
chunk_size_bytes: chunk_size_bytes,
socket: socket
} = state
) do
Logger.debug("Requesting piece #{index} from #{inspect(socket)}")
chunks = Torrent.compute_chunks(total_length, piece_length, index, chunk_size_bytes)
Logger.debug("Chunks for #{index}: #{inspect(chunks)}")
these_outstanding_chunks =
Enum.map(chunks, fn %Chunk{offset_within_piece: offset, length: length} = chunk ->
Logger.debug("Requesting chunk #{offset} for index #{index} from #{inspect(socket)}")
:ok =
Peer.send_message(socket, %Peer.Message.Request{
index: index,
begin: offset,
length: length
})
chunk
end)
|> Enum.into(MapSet.new())
existing_outstanding_chunks = Map.get(state, :outstanding_chunks, %{})
existing_outstanding_chunks =
Map.put(existing_outstanding_chunks, index, these_outstanding_chunks)
state = Map.put(state, :outstanding_chunks, existing_outstanding_chunks)
{:reply, :ok, state}
end
def handle_call(:shutdown, _from, state) do
{:stop, :normal, state}
end
def handle_info(
{:tcp, _socket,
<<
19,
"BitTorrent protocol",
_reserved_bytes::bytes-size(8),
info_hash::bytes-size(20),
remote_peer_id::bytes-size(20)
>>},
%{
socket: socket,
metainfo: %Bex.Metainfo{
decorated: %Bex.Metainfo.Decorated{info_hash: existing_info_hash}
}
} = state
) do
if info_hash == existing_info_hash do
Logger.debug("Received accurate handshake from #{inspect(remote_peer_id)}")
peer_pid = self()
TorrentControllerWorker.add_peer(info_hash, remote_peer_id, peer_pid)
state = Map.put(state, :remote_peer_id, remote_peer_id)
Logger.debug(
"Registered #{inspect(remote_peer_id)} -> #{inspect(peer_pid)} with TorrentControllerWorker"
)
:ok = :inet.setopts(socket, active: :once, packet: 4)
{:noreply, state, {:continue, :post_handshake}}
else
:gen_tcp.close(socket)
{:stop,
{:shutdown,
"Info hash received from #{remote_peer_id} (#{info_hash}) did not match existing (#{state["metainfo"]["decorated"]["info_hash"]})"},
state}
end
end
def handle_info(
{:tcp, socket, rest},
%{
metainfo: %{
decorated: %{info_hash: info_hash, piece_hashes: piece_hashes},
info: %{"piece length": piece_length, length: _length}
},
socket: socket,
download_path: download_path,
remote_peer_id: remote_peer_id
} = state
) do
case Peer.Message.parse(rest) do
%Peer.Message.Choke{} ->
Logger.debug("Received choke from #{inspect(socket)}, choked them")
state =
if !state[:choked] do
state = Map.put(state, :choked, true)
:ok = Peer.send_message(socket, %Peer.Message.Choke{})
Logger.debug("Choked #{inspect(socket)})")
state
else
state
end
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Unchoke{} ->
Logger.debug("Received unchoke from #{inspect(socket)}, unchoked them")
state =
if state[:choked] do
state = Map.put(state, :choked, false)
:ok = Peer.send_message(socket, %Peer.Message.Unchoke{})
Logger.debug("Unchoked #{inspect(socket)}")
state
else
state
end
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Interested{} ->
Logger.debug("Received interested from #{inspect(socket)}")
state =
if state[:choked] do
state = Map.put(state, :choked, false)
:ok = Peer.send_message(socket, %Peer.Message.Unchoke{})
Logger.debug("Unchoked #{inspect(socket)}")
state
else
state
end
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.NotInterested{} ->
Logger.debug("Received not_interested from #{inspect(socket)}")
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Have{index: index} ->
:ok = active_once(socket)
:ok = TorrentControllerWorker.have(info_hash, remote_peer_id, index)
{:noreply, state}
%Peer.Message.Bitfield{bitfield: bitfield_binary} ->
peer_bitfield = BitArray.from_binary(bitfield_binary, length(piece_hashes))
state = Map.put(state, :peer_bitfield, peer_bitfield)
Logger.debug("Received and stored bitfield from #{inspect(socket)}")
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Request{index: index, begin: begin, length: length} ->
with {:ok, file} <- File.open(download_path, [:write, :read, :raw]),
piece = %Piece{index: index, length: piece_length},
chunk = %Chunk{offset_within_piece: begin, length: length},
{:ok, chunk_bytes} <- Chunk.read(chunk, piece, file) do
:ok =
Peer.send_message(socket, %Peer.Message.Piece{
index: index,
begin: begin,
chunk: chunk_bytes
})
Logger.debug("Sent chunk #{index} #{begin} #{length} to peer")
end
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Piece{index: index, begin: begin, chunk: chunk_bytes} ->
Logger.debug("Received chunk of length #{byte_size(chunk_bytes)}")
Logger.debug("Received chunk: index: #{index}, begin: #{begin}, attempting to verify")
state =
with {:ok, file} <- File.open(download_path, [:write, :read, :raw]),
piece = %Piece{index: index, length: piece_length},
chunk = %Chunk{offset_within_piece: begin, length: byte_size(chunk_bytes)},
:ok <- Chunk.write(chunk, piece, file, chunk_bytes),
:ok <- File.close(file) do
Logger.info("Got chunk #{index}, #{begin}")
outstanding_chunks = Map.get(state, :outstanding_chunks, %{})
outstanding_chunks_for_index =
Map.get_lazy(outstanding_chunks, index, fn -> MapSet.new() end)
Logger.info(
"Outstanding chunks for index #{index} pre: #{inspect(outstanding_chunks_for_index)}"
)
outstanding_chunks_for_index =
MapSet.delete(outstanding_chunks_for_index, %Chunk{
offset_within_piece: begin,
length: byte_size(chunk_bytes)
})
Logger.info(
"Outstanding chunks for index #{index} post: #{inspect(outstanding_chunks_for_index)}"
)
outstanding_chunks = Map.put(outstanding_chunks, index, outstanding_chunks_for_index)
state = Map.put(state, :outstanding_chunks, outstanding_chunks)
state =
if Enum.empty?(outstanding_chunks_for_index) do
Logger.info("Got piece #{index}")
expected_hash = Enum.at(piece_hashes, index) |> IO.inspect(label: "Expected Hash")
with {:ok, file} <- File.open(download_path, [:read, :raw, :binary]),
true <-
Piece.verify(piece, file, expected_hash),
:ok <- File.close(file) do
:ok = TorrentControllerWorker.have(info_hash, remote_peer_id, index)
:ok = Peer.send_message(socket, %Peer.Message.Have{index: index})
state
else
e ->
Logger.warn("#{index} did not match hash, #{inspect(e)}")
state
end
else
state
end
state
else
e ->
Logger.error("Error with chunk #{index}, #{begin}, #{inspect(e)}")
state
end
:ok = active_once(socket)
{:noreply, state}
%Peer.Message.Cancel{index: _index, begin: _begin, length: _length} ->
:ok = active_once(socket)
todo("cancel")
%Peer.Message.Keepalive{} ->
Logger.debug("Received keepalive from #{inspect(socket)}")
:ok = active_once(socket)
{:noreply, state}
end
end
def handle_info({:tcp_error, socket, reason}, state) do
Logger.error("#{inspect(socket)}: #{reason}")
reason = inspect(reason)
{:stop, {:shutdown, reason}, state}
end
def handle_info({:tcp_closed, socket}, state) do
reason = "The peer on the other end (#{inspect(socket)}) severed the connection."
Logger.debug(reason)
{:stop, {:shutdown, reason}, state}
end
def handle_info(:keepalive, %{socket: socket, peer_keepalive_tick: peer_keepalive_tick} = state) do
Peer.send_message(socket, %Peer.Message.Keepalive{})
Logger.debug("Keepalive sent to #{inspect(socket)}, scheduling another")
schedule_keepalive(peer_keepalive_tick)
{:noreply, state}
end
def handle_info(
:checkin,
%{
metainfo: %{decorated: %{info_hash: info_hash}},
peer_checkin_tick: peer_checkin_tick,
remote_peer_id: remote_peer_id
} = state
) do
if state[:peer_bitfield] do
TorrentControllerWorker.peer_checkin(info_hash, remote_peer_id, state[:peer_bitfield])
end
schedule_controller_checkin(peer_checkin_tick)
{:noreply, state}
end
### IMPL
defp schedule_controller_checkin(peer_checkin_tick) do
Process.send_after(self(), :checkin, peer_checkin_tick)
end
defp schedule_keepalive(peer_keepalive_tick) do
Process.send_after(self(), :keepalive, peer_keepalive_tick)
end
def active_once(socket) do
:inet.setopts(socket, [{:active, :once}])
end
defp todo(message) do
raise message
end
end
| 29.932914 | 140 | 0.596932 |
f7dad73662886c525d8271721b2f83359ec2a86e | 1,002 | exs | Elixir | mix.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | 2 | 2017-09-10T12:44:31.000Z | 2017-11-05T14:15:55.000Z | mix.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | null | null | null | mix.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | null | null | null | defmodule GuardianDemo.Mixfile do
use Mix.Project
def project do
[
app: :guardian_demo,
version: "1.0.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {GuardianDemo.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:guardian, "~> 1.0-beta"},
]
end
end
| 23.302326 | 56 | 0.588822 |
f7daed246339a0afe034b21fbce7b3d93ddff889 | 2,806 | ex | Elixir | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_export_insights_data_metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_export_insights_data_metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/contact_center_insights/lib/google_api/contact_center_insights/v1/model/google_cloud_contactcenterinsights_v1_export_insights_data_metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataMetadata do
@moduledoc """
Metadata for an export insights operation.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the operation was created.
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the operation finished running.
* `partialErrors` (*type:* `list(GoogleApi.ContactCenterInsights.V1.Model.GoogleRpcStatus.t)`, *default:* `nil`) - Partial errors during export operation that might cause the operation output to be incomplete.
* `request` (*type:* `GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataRequest.t`, *default:* `nil`) - The original request for export.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t() | nil,
:endTime => DateTime.t() | nil,
:partialErrors =>
list(GoogleApi.ContactCenterInsights.V1.Model.GoogleRpcStatus.t()) | nil,
:request =>
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataRequest.t()
| nil
}
field(:createTime, as: DateTime)
field(:endTime, as: DateTime)
field(:partialErrors, as: GoogleApi.ContactCenterInsights.V1.Model.GoogleRpcStatus, type: :list)
field(:request,
as:
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataRequest
)
end
defimpl Poison.Decoder,
for:
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataMetadata do
def decode(value, options) do
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.ContactCenterInsights.V1.Model.GoogleCloudContactcenterinsightsV1ExportInsightsDataMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.085714 | 213 | 0.746614 |
f7daf4e5cbb9246c8a7beb77a217b8c8ddfb44bb | 73 | ex | Elixir | lib/price_tracker_web/views/layout_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker_web/views/layout_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker_web/views/layout_view.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | defmodule PriceTrackerWeb.LayoutView do
use PriceTrackerWeb, :view
end
| 18.25 | 39 | 0.835616 |
f7db41eadd3745522561d295071ab8832d4d530e | 1,469 | ex | Elixir | lib/graft/state.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 7 | 2021-08-29T14:38:54.000Z | 2022-02-23T10:56:35.000Z | lib/graft/state.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 1 | 2021-11-23T21:39:31.000Z | 2021-11-23T21:39:31.000Z | lib/graft/state.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 1 | 2021-11-23T21:27:48.000Z | 2021-11-23T21:27:48.000Z | defmodule Graft.State do
@moduledoc false
# name of the server process
defstruct me: nil,
# latest term server has seen
current_term: 0,
# candidate_pid that received vote in current term (or nil if none)
voted_for: nil,
# log entries; each entry contains command for state machine, and term when entry was received by leader
log: [{0, 0, nil}],
# index of highest log entry known to be committed
commit_index: 0,
# index of highest log entry applied to state machine
last_applied: 0,
# for each server, index of the next log entry to send to that server
next_index: %{},
# for each server, index of highest log entry known to be replicated on server
match_index: %{},
# for each server, an indication of whether that server has been sent an AE rpc and still has not replied
ready: %{},
# number of servers in the cluster
server_count: 0,
# names of each server in the cluster
servers: [],
# number of votes obtained
votes: 0,
# the id of the node believed to be the leader
leader: nil,
# the replicated state machine pid
machine: nil,
# for each request, the address of the client who requested it
requests: %{}
end
| 43.205882 | 117 | 0.573179 |
f7db423a8f25aaa50fa12beeda36c854d2fc8d69 | 10,636 | ex | Elixir | lib/sanbase/clickhouse/metric/metric_adapter.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | lib/sanbase/clickhouse/metric/metric_adapter.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | lib/sanbase/clickhouse/metric/metric_adapter.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule Sanbase.Clickhouse.MetricAdapter do
@moduledoc ~s"""
Provide access to the v2 metrics in Clickhouse
The metrics are stored in clickhouse tables where each metric
is defined by a `metric_id` and every project is defined by an `asset_id`.
"""
@behaviour Sanbase.Metric.Behaviour
import Sanbase.Clickhouse.MetricAdapter.SqlQuery
import Sanbase.Metric.Transform, only: [exec_timeseries_data_query: 2]
import Sanbase.Utils.Transform, only: [maybe_unwrap_ok_value: 1, maybe_apply_function: 2]
alias __MODULE__.{HistogramMetric, FileHandler, TableMetric}
alias Sanbase.ClickhouseRepo
@plain_aggregations FileHandler.aggregations()
@aggregations [nil] ++ @plain_aggregations
@timeseries_metrics_name_list FileHandler.metrics_with_data_type(:timeseries)
@histogram_metrics_name_list FileHandler.metrics_with_data_type(:histogram)
@table_structured_metrics_name_list FileHandler.metrics_with_data_type(:table)
@access_map FileHandler.access_map()
@min_plan_map FileHandler.min_plan_map()
@min_interval_map FileHandler.min_interval_map()
@free_metrics FileHandler.metrics_with_access(:free)
@restricted_metrics FileHandler.metrics_with_access(:restricted)
@aggregation_map FileHandler.aggregation_map()
@human_readable_name_map FileHandler.human_readable_name_map()
@metrics_data_type_map FileHandler.metrics_data_type_map()
@metrics_name_list (@histogram_metrics_name_list ++
@timeseries_metrics_name_list ++ @table_structured_metrics_name_list)
|> Enum.uniq()
@metrics_mapset @metrics_name_list |> MapSet.new()
@incomplete_data_map FileHandler.incomplete_data_map()
@selectors_map FileHandler.selectors_map()
@required_selectors_map FileHandler.required_selectors_map()
@metric_to_name_map FileHandler.metric_to_name_map()
@deprecated_metrics_map FileHandler.deprecated_metrics_map()
@default_complexity_weight 0.3
@type slug :: String.t()
@type metric :: String.t()
@type interval :: String.t()
@impl Sanbase.Metric.Behaviour
def free_metrics(), do: @free_metrics
@impl Sanbase.Metric.Behaviour
def restricted_metrics(), do: @restricted_metrics
@impl Sanbase.Metric.Behaviour
def deprecated_metrics_map(), do: @deprecated_metrics_map
@impl Sanbase.Metric.Behaviour
def access_map(), do: @access_map
@impl Sanbase.Metric.Behaviour
def min_plan_map(), do: @min_plan_map
@impl Sanbase.Metric.Behaviour
def has_incomplete_data?(metric), do: Map.get(@incomplete_data_map, metric)
@impl Sanbase.Metric.Behaviour
def complexity_weight(_), do: @default_complexity_weight
@impl Sanbase.Metric.Behaviour
def broken_data(_metric, _selector, _from, _to), do: {:ok, []}
@doc ~s"""
Get a given metric for a slug and time range. The metric's aggregation
function can be changed by the last optional parameter. The available
aggregations are #{inspect(@plain_aggregations)}
"""
@impl Sanbase.Metric.Behaviour
def timeseries_data(_metric, %{slug: []}, _from, _to, _interval, _opts), do: {:ok, []}
def timeseries_data(metric, %{slug: slug}, from, to, interval, opts) do
aggregation = Keyword.get(opts, :aggregation, nil) || Map.get(@aggregation_map, metric)
# FIXME: Some of the `nft` metrics need additional filter for `owner=opensea`
# to show correct values. Remove after fixed by bigdata.
filters =
if String.starts_with?(metric, "nft_") do
[owner: "opensea"]
else
Keyword.get(opts, :additional_filters, [])
end
{query, args} = timeseries_data_query(metric, slug, from, to, interval, aggregation, filters)
exec_timeseries_data_query(query, args)
end
@impl Sanbase.Metric.Behaviour
def timeseries_data_per_slug(metric, %{slug: slug}, from, to, interval, opts) do
aggregation = Keyword.get(opts, :aggregation, nil) || Map.get(@aggregation_map, metric)
filters = Keyword.get(opts, :additional_filters, [])
{query, args} =
timeseries_data_per_slug_query(metric, slug, from, to, interval, aggregation, filters)
ClickhouseRepo.query_reduce(
query,
args,
%{},
fn [timestamp, slug, value], acc ->
datetime = DateTime.from_unix!(timestamp)
elem = %{slug: slug, value: value}
Map.update(acc, datetime, [elem], &[elem | &1])
end
)
|> maybe_apply_function(fn list ->
list
|> Enum.map(fn {datetime, data} -> %{datetime: datetime, data: data} end)
end)
end
@impl Sanbase.Metric.Behaviour
defdelegate histogram_data(metric, slug, from, to, interval, limit), to: HistogramMetric
@impl Sanbase.Metric.Behaviour
defdelegate table_data(metric, slug_or_slugs, from, to, opts), to: TableMetric
@impl Sanbase.Metric.Behaviour
def aggregated_timeseries_data(metric, selector, from, to, opts)
def aggregated_timeseries_data(_metric, nil, _from, _to, _opts), do: {:ok, %{}}
def aggregated_timeseries_data(_metric, [], _from, _to, _opts), do: {:ok, %{}}
def aggregated_timeseries_data(metric, %{slug: slug_or_slugs}, from, to, opts)
when is_binary(slug_or_slugs) or is_list(slug_or_slugs) do
aggregation = Keyword.get(opts, :aggregation, nil) || Map.get(@aggregation_map, metric)
filters = Keyword.get(opts, :additional_filters, [])
slugs = List.wrap(slug_or_slugs)
get_aggregated_timeseries_data(metric, slugs, from, to, aggregation, filters)
end
@impl Sanbase.Metric.Behaviour
def slugs_by_filter(metric, from, to, operator, threshold, opts) do
aggregation = Keyword.get(opts, :aggregation, nil) || Map.get(@aggregation_map, metric)
filters = Keyword.get(opts, :additional_filters, [])
{query, args} =
slugs_by_filter_query(metric, from, to, operator, threshold, aggregation, filters)
ClickhouseRepo.query_transform(query, args, fn [slug, _value] -> slug end)
end
@impl Sanbase.Metric.Behaviour
def slugs_order(metric, from, to, direction, opts) do
aggregation = Keyword.get(opts, :aggregation, nil) || Map.get(@aggregation_map, metric)
filters = Keyword.get(opts, :additional_filters, [])
{query, args} = slugs_order_query(metric, from, to, direction, aggregation, filters)
ClickhouseRepo.query_transform(query, args, fn [slug, _value] -> slug end)
end
@impl Sanbase.Metric.Behaviour
def required_selectors(), do: FileHandler.required_selectors_map()
@impl Sanbase.Metric.Behaviour
def metadata(metric) do
min_interval = min_interval(metric)
default_aggregation = Map.get(@aggregation_map, metric)
{:ok,
%{
metric: metric,
min_interval: min_interval,
default_aggregation: default_aggregation,
available_aggregations: @plain_aggregations,
available_selectors: Map.get(@selectors_map, metric),
required_selectors: Map.get(@required_selectors_map, metric, []),
data_type: Map.get(@metrics_data_type_map, metric),
complexity_weight: @default_complexity_weight
}}
end
@impl Sanbase.Metric.Behaviour
def human_readable_name(metric) do
{:ok, Map.get(@human_readable_name_map, metric)}
end
@doc ~s"""
Return a list of available metrics.
"""
@impl Sanbase.Metric.Behaviour
def available_histogram_metrics(), do: @histogram_metrics_name_list
@impl Sanbase.Metric.Behaviour
def available_timeseries_metrics(), do: @timeseries_metrics_name_list
@impl Sanbase.Metric.Behaviour
def available_table_metrics(), do: @table_structured_metrics_name_list
@impl Sanbase.Metric.Behaviour
def available_metrics(), do: @metrics_name_list
@impl Sanbase.Metric.Behaviour
def available_metrics(%{slug: slug}) when is_binary(slug) do
{query, args} = available_metrics_for_slug_query(slug)
ClickhouseRepo.query_transform(query, args, fn [metric] ->
Map.get(@metric_to_name_map, metric)
end)
|> maybe_apply_function(fn metrics ->
MapSet.intersection(@metrics_mapset, MapSet.new(metrics))
|> Enum.to_list()
end)
end
@impl Sanbase.Metric.Behaviour
def available_slugs(), do: get_available_slugs()
@impl Sanbase.Metric.Behaviour
def available_slugs(metric), do: get_available_slugs(metric)
@impl Sanbase.Metric.Behaviour
def available_aggregations(), do: @aggregations
@impl Sanbase.Metric.Behaviour
def first_datetime(metric, selector)
when metric in ["price_histogram", "spent_coins_cost", "all_spent_coins_cost"],
do: HistogramMetric.first_datetime(metric, selector)
def first_datetime(metric, %{slug: slug}) do
{query, args} = first_datetime_query(metric, slug)
ClickhouseRepo.query_transform(query, args, fn [datetime] ->
DateTime.from_unix!(datetime)
end)
|> maybe_unwrap_ok_value()
end
@impl Sanbase.Metric.Behaviour
def last_datetime_computed_at(metric, selector)
when metric in ["price_histogram", "spent_coins_cost", "all_spent_coins_cost"],
do: HistogramMetric.last_datetime_computed_at(metric, selector)
def last_datetime_computed_at(metric, %{slug: slug}) do
{query, args} = last_datetime_computed_at_query(metric, slug)
ClickhouseRepo.query_transform(query, args, fn [datetime] ->
DateTime.from_unix!(datetime)
end)
|> maybe_unwrap_ok_value()
end
# Private functions
defp min_interval(metric), do: Map.get(@min_interval_map, metric)
defp get_available_slugs() do
{query, args} = available_slugs_query()
ClickhouseRepo.query_transform(query, args, fn [slug] -> slug end)
end
defp get_available_slugs(metric) do
{query, args} = available_slugs_for_metric_query(metric)
ClickhouseRepo.query_transform(query, args, fn [slug] -> slug end)
end
defp get_aggregated_timeseries_data(metric, slugs, from, to, aggregation, filters)
when is_list(slugs) and length(slugs) > 50 do
result =
Enum.chunk_every(slugs, 50)
|> Sanbase.Parallel.map(
&get_aggregated_timeseries_data(metric, &1, from, to, aggregation, filters),
timeout: 25_000,
max_concurrency: 8,
ordered: false,
on_timeout: :kill_task
)
|> Enum.filter(&match?({:ok, _}, &1))
|> Enum.map(&elem(&1, 1))
|> Enum.reduce(%{}, &Map.merge(&1, &2))
{:ok, result}
end
defp get_aggregated_timeseries_data(metric, slugs, from, to, aggregation, filters)
when is_list(slugs) do
{query, args} =
aggregated_timeseries_data_query(metric, slugs, from, to, aggregation, filters)
ClickhouseRepo.query_reduce(query, args, %{}, fn [slug, value, has_changed], acc ->
value = if has_changed == 1, do: value, else: nil
Map.put(acc, slug, value)
end)
end
end
| 35.691275 | 97 | 0.724332 |
f7db48f541b770c5db300342db70268c8c18f556 | 1,228 | ex | Elixir | lib/surgex_web/controllers/patient_controller.ex | fiqus/surgex | af3ec37459abd3f17c7e9a826ca1abef1dd5fb44 | [
"MIT"
] | 8 | 2019-05-11T19:41:06.000Z | 2020-01-20T07:01:53.000Z | lib/surgex_web/controllers/patient_controller.ex | fiqus/surgex | af3ec37459abd3f17c7e9a826ca1abef1dd5fb44 | [
"MIT"
] | 12 | 2019-05-10T22:00:40.000Z | 2019-07-05T19:20:56.000Z | lib/surgex_web/controllers/patient_controller.ex | fiqus/surgex | af3ec37459abd3f17c7e9a826ca1abef1dd5fb44 | [
"MIT"
] | 1 | 2019-07-18T15:58:41.000Z | 2019-07-18T15:58:41.000Z | defmodule SurgexWeb.PatientController do
use SurgexWeb, :controller
alias Surgex.Surgeries
alias Surgex.Surgeries.Patient
action_fallback SurgexWeb.FallbackController
def index(conn, _params) do
patients = Surgeries.list_patients()
render(conn, "index.json", patients: patients)
end
def create(conn, %{"patient" => patient_params}) do
with {:ok, %Patient{} = patient} <- Surgeries.create_patient(patient_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.patient_path(conn, :show, patient))
|> render("show.json", patient: patient)
end
end
def show(conn, %{"id" => id}) do
patient = Surgeries.get_patient!(id)
render(conn, "show.json", patient: patient)
end
def update(conn, %{"id" => id, "patient" => patient_params}) do
patient = Surgeries.get_patient!(id)
with {:ok, %Patient{} = patient} <- Surgeries.update_patient(patient, patient_params) do
render(conn, "show.json", patient: patient)
end
end
def delete(conn, %{"id" => id}) do
patient = Surgeries.get_patient!(id)
with {:ok, %Patient{}} <- Surgeries.delete_patient(patient) do
send_resp(conn, :no_content, "")
end
end
end
| 27.909091 | 92 | 0.669381 |
f7db4e289cbc403e71fe5971455aa461dc7cef56 | 1,254 | ex | Elixir | test/support/conn_case.ex | LuizFerK/Rocketpay | 4388f22231c43fb58e777dfb04a342e82b548df7 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | LuizFerK/Rocketpay | 4388f22231c43fb58e777dfb04a342e82b548df7 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | LuizFerK/Rocketpay | 4388f22231c43fb58e777dfb04a342e82b548df7 | [
"MIT"
] | null | null | null | defmodule RocketpayWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use RocketpayWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import RocketpayWeb.ConnCase
alias RocketpayWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint RocketpayWeb.Endpoint
end
end
setup tags do
:ok = Sandbox.checkout(Rocketpay.Repo)
unless tags[:async] do
Sandbox.mode(Rocketpay.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.26087 | 63 | 0.725678 |
f7db5fe51c0db8157176c7e0855f5effa4c807f6 | 1,448 | ex | Elixir | lib/thrift/parser/resolver.ex | simplifi/elixir-thrift | 3ce784e198fbdf77d7d2481e6fd0cc9fd9618630 | [
"Apache-2.0"
] | 209 | 2015-12-19T09:56:39.000Z | 2022-03-22T04:43:16.000Z | lib/thrift/parser/resolver.ex | fakeNetflix/pinterest-repo-elixir-thrift | 4e6cc130738b4f04fdbb06bd6f12985b9a9438d3 | [
"Apache-2.0"
] | 312 | 2016-01-05T04:04:58.000Z | 2021-11-15T17:59:57.000Z | lib/thrift/parser/resolver.ex | thecodeboss/elixir-thrift | 621a2039bcbcec62d1cedc85b01421813e0910e8 | [
"Apache-2.0"
] | 40 | 2015-12-21T19:46:03.000Z | 2022-02-10T08:34:58.000Z | defmodule Thrift.Parser.Resolver do
@moduledoc false
# A resolver for references. During file parsing, all new generated thrift
# concepts flow through this resolver and are added to its global database
# of names. At the end, the database is dumped into the FileGroup so it can
# resolve references.
alias Thrift.AST.TEnum
def add(state, name, schema) do
state
|> update(name, schema.constants)
|> update(name, schema.services)
|> update(name, schema.structs)
|> update(name, schema.exceptions)
|> update(name, schema.unions)
|> update(name, schema.enums)
|> update(name, schema.typedefs)
end
defp update(%{} = resolutions, include_name, %{} = local_mappings) do
new_type_mappings =
Map.new(local_mappings, fn
{name, val} when is_atom(val) or is_tuple(val) ->
{:"#{include_name}.#{name}", val}
{name, val} when is_map(val) ->
{:"#{include_name}.#{name}", Map.put(val, :name, :"#{include_name}.#{name}")}
end)
new_value_mappings =
Enum.reduce(local_mappings, %{}, fn
{_, %TEnum{name: enum_name, values: values}}, acc ->
Enum.reduce(values, acc, fn
{value_name, value}, acc ->
Map.put(acc, :"#{enum_name}.#{value_name}", value)
end)
_, acc ->
acc
end)
resolutions
|> Map.merge(new_type_mappings)
|> Map.merge(new_value_mappings)
end
end
| 29.55102 | 87 | 0.618094 |
f7db85f9589a305ca27dd28891dbd4a8663fe699 | 157 | ex | Elixir | lib/remote_retro_web/views/retro_view.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 523 | 2017-03-15T15:21:11.000Z | 2022-03-14T03:04:18.000Z | lib/remote_retro_web/views/retro_view.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 524 | 2017-03-16T18:31:09.000Z | 2022-02-26T10:02:06.000Z | lib/remote_retro_web/views/retro_view.ex | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | 60 | 2017-05-01T18:02:28.000Z | 2022-03-04T21:04:56.000Z | defmodule RemoteRetroWeb.RetroView do
use RemoteRetroWeb, :view
import RemoteRetroWeb.IdeaView
import RemoteRetroWeb.LayoutView, only: [app_js: 1]
end
| 26.166667 | 53 | 0.808917 |
f7db9fb2759229b282c3a07f097283c285d9f516 | 1,524 | exs | Elixir | mix.exs | LaErika/phone | fc63a32dfd156b63b5c95ed59bd160046bae5dd3 | [
"Apache-2.0"
] | null | null | null | mix.exs | LaErika/phone | fc63a32dfd156b63b5c95ed59bd160046bae5dd3 | [
"Apache-2.0"
] | null | null | null | mix.exs | LaErika/phone | fc63a32dfd156b63b5c95ed59bd160046bae5dd3 | [
"Apache-2.0"
] | null | null | null | defmodule Phone.Mixfile do
use Mix.Project
@source_url "https://github.com/fcevado/phone"
@version "0.5.5"
def project do
[
app: :phone,
version: @version,
elixir: ">= 1.1.0",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
package: package(),
deps: deps(),
docs: docs()
] ++ coverage()
end
def application do
[applications: []]
end
defp package do
[
description: """
Phone number parser for telephone numbers in international standard or
missing international country code.
""",
maintainers: ["Flávio M.V."],
licenses: ["Apache-2.0"],
links: %{
"Changelog" => "https://hexdocs.pm/phone/changelog.html",
"GitHub" => @source_url
}
]
end
defp deps do
[
{:excoveralls, "0.13.4", only: :test, runtime: false},
{:credo, "1.5.4", only: :dev, runtime: false},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:benchfella, "0.3.5", only: :dev}
]
end
defp docs do
[
extras: ["CHANGELOG.md", "README.md"],
main: "readme",
formatters: ["html"],
api_reference: false,
skip_undefined_reference_warnings_on: ["CHANGELOG.md"]
]
end
defp coverage do
[
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
end
| 21.771429 | 76 | 0.55315 |
f7dc1ad3ca7b2164d4c68731e9af0bea8ea2daeb | 1,776 | exs | Elixir | test/exshome/pub_sub_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | 2 | 2021-12-21T16:32:56.000Z | 2022-02-22T17:06:39.000Z | test/exshome/pub_sub_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | test/exshome/pub_sub_test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | defmodule ExshomeTest.PubSubTest do
use ExUnit.Case, async: true
alias Exshome.PubSub
import ExshomeTest.Fixtures
alias ExshomeTest.TestRegistry
describe "with registry" do
setup do
TestRegistry.allow(self(), self())
topic = "topic_#{unique_integer()}"
PubSub.subscribe(topic)
%{topic: topic}
end
test "topic name is tied to an owner process in tests", %{topic: topic} do
assert PubSub.topic_name(topic) == ExshomeTest.Hooks.PubSub.topic_name(topic)
end
test "broadcast works fine for the same process", %{topic: topic} do
data = random_data()
PubSub.broadcast(topic, data)
assert_received(^data)
end
test "unsubscribe works", %{topic: topic} do
data = random_data()
PubSub.unsubscribe(topic)
PubSub.broadcast(topic, data)
refute_received(^data)
PubSub.subscribe(topic)
new_data = random_data()
PubSub.broadcast(topic, new_data)
assert_received(^new_data)
end
test "broadcast from other process", %{topic: topic} do
data = random_data()
test_pid = self()
{:ok, pid} =
Task.start_link(fn ->
TestRegistry.allow(test_pid, self())
PubSub.broadcast(topic, data)
end)
ref = Process.monitor(pid)
assert_receive {:DOWN, ^ref, :process, ^pid, _reason}
assert_received(^data)
end
defp random_data, do: %{data: unique_integer()}
end
describe "without registry" do
test "pubsub does not work in tests if there is no subscription" do
topic = "topic_#{unique_integer()}"
assert_raise(MatchError, fn ->
PubSub.subscribe(topic)
end)
TestRegistry.allow(self(), self())
PubSub.subscribe(topic)
end
end
end
| 25.371429 | 83 | 0.643018 |
f7dc3ef3b1a6f50c64173d809615ee9f82fddb0d | 558 | exs | Elixir | config/dev.exs | jhonndabi/speedrun_blogengine | 3a4ae736be1a3e21634376cff1c2e2a60750dcba | [
"Apache-2.0"
] | 11 | 2021-04-12T18:32:30.000Z | 2021-04-23T04:29:48.000Z | config/dev.exs | jhonndabi/speedrun_blogengine | 3a4ae736be1a3e21634376cff1c2e2a60750dcba | [
"Apache-2.0"
] | 1 | 2021-09-18T01:14:50.000Z | 2021-09-18T01:14:50.000Z | config/dev.exs | jhonndabi/speedrun_blogengine | 3a4ae736be1a3e21634376cff1c2e2a60750dcba | [
"Apache-2.0"
] | 11 | 2021-04-13T15:01:36.000Z | 2021-04-19T19:04:47.000Z | import Config
config :speedrun_blogengine, SpeedrunBlogengine.Repo,
username: "postgres",
password: "postgres",
database: "speedrun_blogengine_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
config :speedrun_blogengine_web, SpeedrunBlogengineWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
config :logger, :console, format: "[$level] $message\n"
config :phoenix, :plug_init_mode, :runtime
config :phoenix, :stacktrace_depth, 20
| 24.26087 | 64 | 0.759857 |
f7dc82937bb457052f15886c76064e9ee4249104 | 972 | ex | Elixir | apps/gitgud_web/test/support/conn_case.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | apps/gitgud_web/test/support/conn_case.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | apps/gitgud_web/test/support/conn_case.ex | rogervezaro/gitgud | 6656f8c2df16817a6c5325fb4c18b03f9d3f7140 | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | defmodule GitGud.Web.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
import Plug.Conn
import Phoenix.ConnTest
alias GitGud.Web.Router.Helpers, as: Routes
@endpoint GitGud.Web.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(GitGud.DB)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(GitGud.DB, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.923077 | 66 | 0.710905 |
f7dc8af4934f9176b2060a341993e1fec5673689 | 417 | exs | Elixir | test/banchan_web/views/error_view_test.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | test/banchan_web/views/error_view_test.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | test/banchan_web/views/error_view_test.exs | tacohole/banchan | 04c9f2fd5464e697d9b69e4bc524ace5f6487487 | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | defmodule BanchanWeb.ErrorViewTest do
use BanchanWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(BanchanWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(BanchanWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.8 | 92 | 0.733813 |
f7dca3cd2c86b0b4ddd30afd3d3c94a4762339af | 1,929 | ex | Elixir | lib/elxpro_blog_web.ex | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | lib/elxpro_blog_web.ex | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | 4 | 2021-08-11T03:19:33.000Z | 2021-09-26T01:29:58.000Z | lib/elxpro_blog_web.ex | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | defmodule ElxproBlogWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ElxproBlogWeb, :controller
use ElxproBlogWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ElxproBlogWeb
import Plug.Conn
import ElxproBlogWeb.Gettext
alias ElxproBlogWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/elxpro_blog_web/templates",
namespace: ElxproBlogWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ElxproBlogWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import ElxproBlogWeb.ErrorHelpers
import ElxproBlogWeb.Gettext
alias ElxproBlogWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.52439 | 76 | 0.69155 |
f7dcd3f6f5a4ad1a081a6dd462e2ff4ff6fc1d9a | 2,239 | ex | Elixir | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/connection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/connection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/connection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdSenseHost.V41.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.AdSenseHost.V41.
"""
use Tesla
# Add any middleware here (authentication)
plug Tesla.Middleware.BaseUrl, "https://www.googleapis.com/adsensehost/v4.1"
plug Tesla.Middleware.Headers, %{"User-Agent" => "Elixir"}
plug Tesla.Middleware.EncodeJson
@scopes [
"https://www.googleapis.com/auth/adsensehost" # View and manage your AdSense host data and associated accounts
]
@doc """
Configure a client connection using a provided OAuth2 token as a Bearer token
## Parameters
- token (String): Bearer token
## Returns
Tesla.Env.client
"""
@spec new(String.t) :: Tesla.Env.client
def new(token) when is_binary(token) do
Tesla.build_client([
{Tesla.Middleware.Headers, %{"Authorization" => "Bearer #{token}"}}
])
end
@doc """
Configure a client connection using a function which yields a Bearer token.
## Parameters
- token_fetcher (function arity of 1): Callback which provides an OAuth2 token
given a list of scopes
## Returns
Tesla.Env.client
"""
@spec new(((list(String.t)) -> String.t)) :: Tesla.Env.client
def new(token_fetcher) when is_function(token_fetcher) do
token_fetcher.(@scopes)
|> new
end
@doc """
Configure an authless client connection
# Returns
Tesla.Env.client
"""
@spec new() :: Tesla.Env.client
def new do
Tesla.build_client([])
end
end
| 26.975904 | 114 | 0.713265 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.