hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e5b36e77ec8f3980582c589e4c486d165dd0471 | 24,205 | exs | Elixir | lib/mix/test/mix/tasks/deps_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2018-10-02T13:55:29.000Z | 2018-10-02T13:55:29.000Z | lib/mix/test/mix/tasks/deps_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/deps_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.DepsTest do
use MixTest.Case
defmodule DepsApp do
def project do
[
app: :deps,
version: "0.1.0",
deps: [
{:ok, "0.1.0", github: "elixir-lang/ok"},
{:invalidvsn, "0.2.0", path: "deps/invalidvsn"},
{:invalidapp, "0.1.0", path: "deps/invalidapp"},
{:noappfile, "0.1.0", path: "deps/noappfile"},
{:nosemver, "~> 0.1", path: "deps/nosemver"}
]
]
end
end
defmodule SuccessfulDepsApp do
def project do
[
app: :sample,
version: "0.1.0",
deps: [
{:ok, "0.1.0", path: "deps/ok"}
]
]
end
end
defmodule ReqDepsApp do
def project do
[
app: :req_deps,
version: "0.1.0",
deps: [
{:ok, ">= 2.0.0", path: "deps/ok"},
{:noappfile, path: "deps/noappfile", app: false},
{:apppath, path: "deps/noappfile", app: "../deps/ok/ebin/ok.app"}
]
]
end
end
## deps
test "prints list of dependencies and their status" do
Mix.Project.push(DepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]}
msg = " the dependency is not available, run \"mix deps.get\""
assert_received {:mix_shell, :info, [^msg]}
assert_received {:mix_shell, :info, ["* invalidvsn (deps/invalidvsn)"]}
assert_received {:mix_shell, :info, [" the app file contains an invalid version: :ok"]}
assert_received {:mix_shell, :info, ["* invalidapp (deps/invalidapp) (mix)"]}
msg = " the app file at \"_build/dev/lib/invalidapp/ebin/invalidapp.app\" is invalid"
assert_received {:mix_shell, :info, [^msg]}
assert_received {:mix_shell, :info, ["* noappfile (deps/noappfile)"]}
assert_received {:mix_shell, :info, [" could not find an app file at" <> _]}
assert_received {:mix_shell, :info, ["* nosemver (deps/nosemver)"]}
assert_received {:mix_shell, :info, [" the app file specified a non-Semantic" <> _]}
end)
end
test "prints list of dependencies and their status, including req mismatches and custom apps" do
Mix.Project.push(ReqDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* ok (deps/ok) (mix)"]}
msg = " the dependency does not match the requirement \">= 2.0.0\", got \"0.1.0\""
assert_received {:mix_shell, :info, [^msg]}
assert_received {:mix_shell, :info, ["* apppath (deps/noappfile)"]}
refute_received {:mix_shell, :info, [" could not find app file at " <> _]}
assert_received {:mix_shell, :info, ["* noappfile (deps/noappfile)"]}
refute_received {:mix_shell, :info, [" could not find app file at " <> _]}
end)
end
test "prints Elixir req mismatches" do
Mix.Project.push(ReqDepsApp)
in_fixture("deps_status", fn ->
File.write!("deps/ok/mix.exs", """
defmodule Deps.OkApp do
use Mix.Project
def project do
[elixir: "~> 0.1.0", app: :ok, version: "2.0.0"]
end
end
""")
Mix.Tasks.Deps.Compile.run([:ok])
msg =
"warning: the dependency :ok requires Elixir \"~> 0.1.0\" " <>
"but you are running on v#{System.version()}"
assert_received {:mix_shell, :error, [^msg]}
Mix.Tasks.Deps.Compile.run([])
end)
end
test "prints list of dependencies and their lock status" do
Mix.Project.push(DepsApp)
in_fixture("deps_status", fn ->
File.cd!("deps/ok", fn -> System.cmd("git", ["init"]) end)
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]}
msg =
" the dependency is not locked. To generate the \"mix.lock\" file run \"mix deps.get\""
assert_received {:mix_shell, :info, [^msg]}
Mix.Dep.Lock.write(%{ok: {:git, "https://github.com/elixir-lang/ok.git", "abcdefghi", []}})
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]}
assert_received {:mix_shell, :info, [" locked at abcdefg"]}
msg =
" lock mismatch: the dependency is out of date. To fetch locked version run \"mix deps.get\""
assert_received {:mix_shell, :info, [^msg]}
Mix.Dep.Lock.write(%{
ok: {:git, "git://github.com/elixir-lang/another.git", "abcdefghi", []}
})
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* ok (https://github.com/elixir-lang/ok.git) (mix)"]}
msg =
" lock outdated: the lock is outdated compared to the options in your mix.exs. To fetch locked version run \"mix deps.get\""
assert_received {:mix_shell, :info, [^msg]}
end)
end
test "cleans and recompiles artifacts if --force given" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Compile.run([])
File.touch!("_build/dev/lib/ok/clean-me")
Mix.Tasks.Deps.Compile.run(["--force"])
refute File.exists?("_build/dev/lib/ok/clean-me")
end)
end
## deps.loadpaths
test "checks list of dependencies and their status with success" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end)
end
test "checks list of dependencies and their status on failure" do
Mix.Project.push(DepsApp)
in_fixture("deps_status", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end
assert_received {:mix_shell, :error, ["* ok (https://github.com/elixir-lang/ok.git)"]}
msg = " the dependency is not available, run \"mix deps.get\""
assert_received {:mix_shell, :error, [^msg]}
assert_received {:mix_shell, :error, ["* invalidvsn (deps/invalidvsn)"]}
assert_received {:mix_shell, :error, [" the app file contains an invalid version: :ok"]}
assert_received {:mix_shell, :error, ["* invalidapp (deps/invalidapp)"]}
msg = " the app file at \"_build/dev/lib/invalidapp/ebin/invalidapp.app\" is invalid"
assert_received {:mix_shell, :error, [^msg]}
# This one is compiled automatically
refute_received {:mix_shell, :error, ["* noappfile (deps/noappfile)"]}
refute_received {:mix_shell, :error, [" could not find an app file at " <> _]}
end)
end
test "compiles and prunes builds per environment" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
# Start from scratch!
File.rm_rf("_build")
Mix.Tasks.Deps.Compile.run([])
Mix.Tasks.Deps.Loadpaths.run([])
assert File.exists?("_build/dev/lib/ok/ebin/ok.app")
assert File.exists?("_build/dev/lib/ok/priv/sample")
Mix.Tasks.Compile.run([])
assert to_charlist(Path.expand("_build/dev/lib/ok/ebin/")) in :code.get_path()
assert File.exists?("_build/dev/lib/sample/ebin/sample.app")
# Remove the deps but set build_path, deps won't be pruned, but load paths are
Mix.ProjectStack.post_config(deps: [], build_path: "_build")
Mix.ProjectStack.clear_cache()
Mix.Project.pop()
Mix.Project.push(SuccessfulDepsApp)
Mix.Tasks.Deps.Loadpaths.run([])
refute to_charlist(Path.expand("_build/dev/lib/ok/ebin/")) in :code.get_path()
assert File.exists?("_build/dev/lib/ok/ebin/ok.app")
assert File.exists?("_build/dev/lib/sample/ebin/sample.app")
# Remove the deps without build_path, deps will be pruned
Mix.ProjectStack.post_config(deps: [])
Mix.ProjectStack.clear_cache()
Mix.Project.pop()
Mix.Project.push(SuccessfulDepsApp)
Mix.Tasks.Deps.Loadpaths.run([])
refute File.exists?("_build/dev/lib/ok/ebin/ok.app")
assert File.exists?("_build/dev/lib/sample/ebin/sample.app")
end)
end
## deps.unlock
test "unlocks all deps", context do
Mix.Project.push(DepsApp)
in_tmp(context.test, fn ->
Mix.Dep.Lock.write(%{git_repo: "abcdef"})
assert Mix.Dep.Lock.read() == %{git_repo: "abcdef"}
Mix.Tasks.Deps.Unlock.run(["--all"])
assert Mix.Dep.Lock.read() == %{}
end)
end
test "unlocks unused deps", context do
Mix.Project.push(DepsApp)
in_tmp(context.test, fn ->
Mix.Dep.Lock.write(%{whatever: "abcdef", ok: "abcdef"})
assert Mix.Dep.Lock.read() == %{whatever: "abcdef", ok: "abcdef"}
Mix.Tasks.Deps.Unlock.run(["--unused"])
assert Mix.Dep.Lock.read() == %{ok: "abcdef"}
end)
end
test "unlocks specific deps", context do
Mix.Project.push(DepsApp)
in_tmp(context.test, fn ->
Mix.Dep.Lock.write(%{git_repo: "abcdef", another: "hash"})
Mix.Tasks.Deps.Unlock.run(["git_repo", "unknown"])
assert Mix.Dep.Lock.read() == %{another: "hash"}
error = "warning: unknown dependency is not locked"
assert_received {:mix_shell, :error, [^error]}
end)
end
test "unlocks filtered deps", context do
Mix.Project.push(DepsApp)
in_tmp(context.test, fn ->
Mix.Dep.Lock.write(%{git_repo: "abcdef", another: "hash", another_one: "hash"})
Mix.Tasks.Deps.Unlock.run(["--filter", "another"])
assert Mix.Dep.Lock.read() == %{git_repo: "abcdef"}
output = """
Unlocked deps:
* another
* another_one
"""
assert_received {:mix_shell, :info, [^output]}
end)
end
test "fails with message on missing dependencies" do
Mix.Project.push(DepsApp)
assert_raise Mix.Error, ~r/"mix deps\.unlock" expects dependencies as arguments/, fn ->
Mix.Tasks.Deps.Unlock.run([])
end
end
## Deps environment
defmodule DepsEnvApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:raw_repo, "0.1.0", path: "custom/raw_repo"}
]
]
end
end
defmodule CustomDepsEnvApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:raw_repo, "0.1.0", path: "custom/raw_repo", env: :dev}
]
]
end
end
test "sets deps env to prod by default" do
Mix.Project.push(DepsEnvApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Update.run(["--all"])
assert_received {:mix_shell, :info, [":raw_repo env is prod"]}
end)
end
test "can customize environment" do
Mix.Project.push(CustomDepsEnvApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Update.run(["--all"])
assert_received {:mix_shell, :info, [":raw_repo env is dev"]}
end)
end
## Nested dependencies
defmodule ConflictDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:git_repo, "0.1.0", path: "custom/raw_repo"},
{:bad_deps_repo, "0.1.0", path: "custom/bad_deps_repo"}
]
]
end
end
defmodule DivergedDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:bad_deps_repo, "0.1.0", path: "custom/bad_deps_repo"}
]
]
end
end
defmodule ConvergedDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:deps_repo, "0.1.0", path: "custom/deps_repo"},
{:git_repo, ">= 0.1.0", git: MixTest.Case.fixture_path("git_repo")}
]
]
end
end
defmodule OverriddenDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:bad_deps_repo, "0.1.0", path: "custom/bad_deps_repo"},
{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), override: true}
]
]
end
end
defmodule NonOverriddenDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:bad_deps_repo, "0.1.0", path: "custom/bad_deps_repo"},
{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo")}
]
]
end
end
test "fails on missing dependencies" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
assert_raise Mix.Error, ~r/Unknown dependency invalid for environment dev/, fn ->
Mix.Tasks.Deps.Update.run(["invalid"])
end
end)
end
@overriding_msg " the dependency git_repo in mix.exs is overriding a child dependency"
test "fails on diverged dependencies on get/update" do
Mix.Project.push(ConflictDepsApp)
in_fixture("deps_status", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end
assert_received {:mix_shell, :error, [@overriding_msg <> _]}
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run([])
end
assert_received {:mix_shell, :error, [@overriding_msg <> _]}
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Update.run(["--all"])
end
assert_received {:mix_shell, :error, [@overriding_msg <> _]}
end)
end
test "fails on diverged dependencies on check" do
Mix.Project.push(DivergedDepsApp)
in_fixture("deps_status", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end
assert_received {:mix_shell, :error, [" different specs were given" <> _ = received_msg]}
assert received_msg =~ "In custom/deps_repo/mix.exs:"
assert received_msg =~
"{:git_repo, \"0.1.0\", [env: :prod, git: #{inspect(fixture_path("git_repo"))}]}"
end)
end
test "fails on diverged dependencies by requirement" do
Mix.Project.push(ConvergedDepsApp)
in_fixture("deps_status", fn ->
File.write!("custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[
app: :deps_repo,
version: "0.1.0",
deps: [
{:git_repo, "0.2.0", git: MixTest.Case.fixture_path("git_repo")}
]
]
end
end
""")
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run([])
Mix.Tasks.Deps.Loadpaths.run([])
end
assert_received {:mix_shell, :error, [" the dependency git_repo 0.1.0" <> _ = msg]}
assert msg =~ "In custom/deps_repo/mix.exs:"
assert msg =~
"{:git_repo, \"0.2.0\", [env: :prod, git: #{inspect(fixture_path("git_repo"))}]}"
end)
end
@overriding_msg " the dependency git_repo in mix.exs is overriding"
test "fails on diverged dependencies even when optional" do
Mix.Project.push(ConvergedDepsApp)
in_fixture("deps_status", fn ->
File.write!("custom/deps_repo/mix.exs", """
defmodule DepsRepo do
use Mix.Project
def project do
[
app: :deps_repo,
version: "0.1.0",
deps: [
{:git_repo, git: MixTest.Case.fixture_path("bad_git_repo"), branch: "omg"}
]
]
end
end
""")
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run([])
Mix.Tasks.Deps.Loadpaths.run([])
end
assert_received {:mix_shell, :error, [@overriding_msg <> _]}
end)
end
test "works with converged dependencies" do
Mix.Project.push(ConvergedDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
# Make sure retriever uses converger,
# so the message appears just once
refute_received {:mix_shell, :info, [^message]}
Mix.Task.clear()
Mix.Tasks.Deps.Update.run(["--all"])
message = "* Updating git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
end)
after
purge([GitRepo, GitRepo.MixProject])
end
test "does not check dependencies if --no-deps-check is provided" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
File.rm_rf!("deps/ok")
assert_raise Mix.Error, fn ->
Mix.Tasks.Compile.run([])
end
Mix.Tasks.Compile.run(["--no-deps-check"])
end)
end
test "works with overridden dependencies" do
Mix.Project.push(OverriddenDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Get.run([])
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
# Make sure retriever uses converger,
# so the message appears just once
refute_received {:mix_shell, :info, [^message]}
Mix.Task.clear()
Mix.Tasks.Deps.Update.run(["--all"])
message = "* Updating git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
end)
after
purge([GitRepo, GitRepo.MixProject])
end
test "converged dependencies errors if not overriding" do
Mix.Project.push(NonOverriddenDepsApp)
in_fixture("deps_status", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Loadpaths.run([])
end
receive do
{:mix_shell, :error, [" the dependency git_repo in mix.exs" <> _ = msg]} ->
assert msg =~ "In mix.exs:"
assert msg =~
"{:git_repo, \"0.1.0\", [env: :prod, git: #{inspect(fixture_path("git_repo"))}]}"
after
0 -> flunk("expected overriding error message")
end
end)
after
purge([GitRepo, GitRepo.MixProject])
end
test "checks if dependencies are using old Elixir version" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Compile.run([])
Mix.Tasks.Deps.Loadpaths.run([])
File.mkdir_p!("_build/dev/lib/ok/ebin")
File.mkdir_p!("_build/dev/lib/ok/.mix")
manifest_data = :erlang.term_to_binary({:v1, "the_future", :scm})
File.write!("_build/dev/lib/ok/.mix/compile.elixir_scm", manifest_data)
Mix.Task.clear()
msg =
" the dependency was built with an out-of-date Elixir version, run \"mix deps.compile\""
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, [^msg]}
# deps.loadpaths will automatically recompile it
Mix.Tasks.Deps.Loadpaths.run([])
Mix.Tasks.Deps.run([])
refute_received {:mix_shell, :info, [^msg]}
end)
end
test "checks if dependencies are using old scm version" do
Mix.Project.push(SuccessfulDepsApp)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Compile.run([])
Mix.Tasks.Deps.Loadpaths.run([])
File.mkdir_p!("_build/dev/lib/ok/ebin")
File.mkdir_p!("_build/dev/lib/ok/.mix")
manifest_data =
:erlang.term_to_binary({1, {System.version(), :erlang.system_info(:otp_release)}, :scm})
File.write!("_build/dev/lib/ok/.mix/compile.elixir_scm", manifest_data)
Mix.Task.clear()
msg = " the dependency was built with another SCM, run \"mix deps.compile\""
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, [^msg]}
# deps.loadpaths will automatically recompile it
Mix.Tasks.Deps.Loadpaths.run([])
Mix.Tasks.Deps.run([])
refute_received {:mix_shell, :info, [^msg]}
end)
end
defmodule NonCompilingDeps do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:git_repo, "0.1.0", git: MixTest.Case.fixture_path("git_repo"), compile: false}
]
]
end
end
test "does not compile deps that have explicit flag" do
Mix.Project.push(NonCompilingDeps)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.Compile.run([])
refute_received {:mix_shell, :info, ["==> git_repo"]}
end)
end
defmodule DupDeps do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
# Simulate dependencies gathered together from umbrella
{:ok, "0.1.0", path: "deps/ok"},
{:ok, "0.1.0", path: "deps/ok"}
]
]
end
end
test "warns and converges duplicated deps at the same level" do
Mix.Project.push(DupDeps)
in_fixture("deps_status", fn ->
Mix.Tasks.Deps.run([])
msg =
"warning: the dependency :ok is duplicated at the top level, please remove one of them"
assert_received {:mix_shell, :error, [^msg]}
msg = "* ok 0.1.0 (deps/ok) (mix)"
assert_received {:mix_shell, :info, [^msg]}
refute_received {:mix_shell, :info, [^msg]}
end)
end
## deps.clean
defmodule CleanDepsApp do
def project do
[
app: :raw_sample,
version: "0.1.0",
deps: [
{:git_repo, ">= 0.1.0", git: MixTest.Case.fixture_path("git_repo")},
{:ok, ">= 2.0.0", path: "deps/ok"}
]
]
end
end
test "cleans dependencies" do
Mix.Project.push(CleanDepsApp)
in_fixture("deps_status", fn ->
File.mkdir_p!("_build/dev/lib/raw_sample")
File.mkdir_p!("_build/dev/lib/git_repo")
File.mkdir_p!("_build/test/lib/git_repo")
File.mkdir_p!("_build/dev/lib/ok")
File.mkdir_p!("_build/test/lib/ok")
message =
"\"mix deps.clean\" expects dependencies as arguments or " <>
"a flag indicating which dependencies to clean. " <>
"The --all option will clean all dependencies while " <>
"the --unused option cleans unused dependencies"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Deps.Clean.run([])
end
Mix.Tasks.Deps.Clean.run(["--only", "dev", "--all"])
refute File.exists?("_build/dev/lib/git_repo")
refute File.exists?("_build/dev/lib/ok")
assert File.exists?("_build/test/lib/git_repo")
assert File.exists?("_build/dev/lib/raw_sample")
Mix.Tasks.Deps.Clean.run(["--all"])
refute File.exists?("_build/dev/lib/git_repo")
refute File.exists?("_build/test/lib/git_repo")
assert File.exists?("_build/dev/lib/raw_sample")
end)
end
test "cleans unused dependencies" do
Mix.Project.push(CleanDepsApp)
in_fixture("deps_status", fn ->
File.mkdir_p!("_build/dev/lib/raw_sample")
File.mkdir_p!("deps/git_repo")
File.mkdir_p!("_build/dev/lib/git_repo")
File.mkdir_p!("deps/git_repo_unused")
File.mkdir_p!("_build/dev/lib/git_repo_unused")
Mix.Tasks.Deps.Clean.run(["--unused"])
assert File.exists?("deps/git_repo")
assert File.exists?("_build/dev/lib/git_repo")
refute File.exists?("deps/git_repo_unused")
refute File.exists?("_build/dev/lib/git_repo_unused")
assert File.exists?("_build/dev/lib/raw_sample")
end)
end
test "cleans dependencies build" do
Mix.Project.push(CleanDepsApp)
in_fixture("deps_status", fn ->
File.mkdir_p!("deps/raw_sample")
File.mkdir_p!("_build/dev/lib/raw_sample")
Mix.Tasks.Deps.Clean.run(["raw_sample", "--build"])
assert File.exists?("deps/raw_sample")
refute File.exists?("_build/dev/lib/raw_sample")
end)
end
test "warns on invalid path on clean dependencies" do
Mix.Project.push(CleanDepsApp)
in_fixture("deps_status", fn ->
File.mkdir_p!("deps/raw_sample")
File.mkdir_p!("_build/dev/lib/raw_sample")
Mix.Tasks.Deps.Clean.run(["raw_sample_with_a_typo"])
assert File.exists?("deps/raw_sample")
msg = "warning: the dependency raw_sample_with_a_typo is not present in the build directory"
assert_received {:mix_shell, :error, [^msg]}
end)
end
test "does not remove dependency source when using :path" do
Mix.Project.push(CleanDepsApp)
in_fixture("deps_status", fn ->
assert File.exists?("deps/ok")
Mix.Tasks.Deps.Clean.run(["raw_sample", "--all"])
refute File.exists?("_build/dev/lib/ok")
refute File.exists?("_build/test/lib/ok")
assert File.exists?("deps/ok")
end)
end
end
| 29.162651 | 133 | 0.60471 |
9e5b5cfae19fe10708cf990d5370815495487696 | 669 | ex | Elixir | solutions/infinite-house-of-pancakes/elixir/approach4.ex | EdwinFajardoBarrera/google-code-jam | 3bc2979c726a3a40d1a82888439534ea429c2327 | [
"MIT"
] | 15 | 2015-02-27T01:34:41.000Z | 2022-02-03T23:16:47.000Z | solutions/infinite-house-of-pancakes/elixir/approach4.ex | EdwinFajardoBarrera/google-code-jam | 3bc2979c726a3a40d1a82888439534ea429c2327 | [
"MIT"
] | 35 | 2015-02-17T15:55:33.000Z | 2021-05-10T16:52:46.000Z | solutions/infinite-house-of-pancakes/elixir/approach4.ex | EdwinFajardoBarrera/google-code-jam | 3bc2979c726a3a40d1a82888439534ea429c2327 | [
"MIT"
] | 163 | 2015-02-10T19:19:51.000Z | 2021-10-19T04:20:15.000Z |
defmodule Pancakes do
IO.puts("hola")
def minutes do
{tests,_} = IO.gets("") |> Integer.parse
for i <- 1..tests do
{non_empty,_} = IO.gets("") |> Integer.parse
chain = IO.gets("")
chain = String.replace(chain, "\n","")
chain = String.split(chain, " ")
chain = Enum.map(chain, fn x -> elem(Integer.parse(x), 0) end)
maximum = Enum.max(chain)
result = maximum
minimo = Enum.map(1..maximum+1, fn j -> j + (Enum.map(chain, fn p -> (p-1)/j
|> trunc end)
|> Enum.sum) end)
|> Enum.min
IO.puts(minimo)
end
end
end
pancakes = Pancakes
pancakes.minutes() | 23.892857 | 83 | 0.530643 |
9e5b61479a9a1eba039c1d5c07ab9e121caaab36 | 362 | ex | Elixir | lib/class_roll/service/date.ex | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | lib/class_roll/service/date.ex | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | lib/class_roll/service/date.ex | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | defmodule ClassRoll.Service.Date do
def list(class) do
{:ok, 1..5}
end
def get(id) do
{:ok, %{id: id, date: "2015-08-22T07:18:52Z"}}
end
def list_by_member(class, member) do
{:ok, 1..15}
end
def create(class, date) do
{:ok, %{id: 1524, date: "2015-08-22T07:18:52Z"}}
end
def update(date, params) do
{:ok, true}
end
end | 17.238095 | 52 | 0.588398 |
9e5b7db8995aeb5962f10df707b6742b8efe3877 | 430 | ex | Elixir | lib/cloudevents/format/v_0_2/encoder/avro.ex | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | 7 | 2020-03-17T22:54:51.000Z | 2021-09-10T17:33:03.000Z | lib/cloudevents/format/v_0_2/encoder/avro.ex | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | 2 | 2020-07-13T23:01:52.000Z | 2020-07-16T17:05:56.000Z | lib/cloudevents/format/v_0_2/encoder/avro.ex | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | 1 | 2020-07-07T19:49:25.000Z | 2020-07-07T19:49:25.000Z | defimpl Cloudevents.Format.Encoder.Avro, for: Cloudevents.Format.V_0_2.Event do
@moduledoc false
def encode(event) do
case Cloudevents.Config.avro_event_schema_name() do
nil ->
{:error, "The name of the Avro-schema used to encode events is not set"}
schema_name ->
event
|> Cloudevents.Format.Encoder.Map.convert()
|> Avrora.encode(schema_name: schema_name)
end
end
end
| 26.875 | 80 | 0.676744 |
9e5b81f788a5659a0537abe9cd4cfe274f019327 | 2,352 | ex | Elixir | clients/double_click_search/lib/google_api/double_click_search/v2/model/report_request_time_range.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/double_click_search/lib/google_api/double_click_search/v2/model/report_request_time_range.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/double_click_search/lib/google_api/double_click_search/v2/model/report_request_time_range.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DoubleClickSearch.V2.Model.ReportRequestTimeRange do
@moduledoc """
If metrics are requested in a report, this argument will be used to restrict the metrics to a specific time range.
## Attributes
* `changedAttributesSinceTimestamp` (*type:* `DateTime.t`, *default:* `nil`) - Inclusive UTC timestamp in RFC format, e.g., 2013-07-16T10:16:23.555Z. See additional references on how changed attribute reports work.
* `changedMetricsSinceTimestamp` (*type:* `DateTime.t`, *default:* `nil`) - Inclusive UTC timestamp in RFC format, e.g., 2013-07-16T10:16:23.555Z. See additional references on how changed metrics reports work.
* `endDate` (*type:* `String.t`, *default:* `nil`) - Inclusive date in YYYY-MM-DD format.
* `startDate` (*type:* `String.t`, *default:* `nil`) - Inclusive date in YYYY-MM-DD format.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:changedAttributesSinceTimestamp => DateTime.t(),
:changedMetricsSinceTimestamp => DateTime.t(),
:endDate => String.t(),
:startDate => String.t()
}
field(:changedAttributesSinceTimestamp, as: DateTime)
field(:changedMetricsSinceTimestamp, as: DateTime)
field(:endDate)
field(:startDate)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickSearch.V2.Model.ReportRequestTimeRange do
def decode(value, options) do
GoogleApi.DoubleClickSearch.V2.Model.ReportRequestTimeRange.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickSearch.V2.Model.ReportRequestTimeRange do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42 | 218 | 0.734269 |
9e5bd6c94f98d8922160811c93da6829da115e08 | 300 | ex | Elixir | web/controllers/page_controller.ex | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | seansu4you87/betazoids | a8aac9074f5efaad4cd88ffdf7cdef53d4beb5cd | [
"MIT"
] | null | null | null | defmodule Betazoids.PageController do
use Betazoids.Web, :controller
def index(conn, _params) do
conn
|> put_flash(:info, "Welcome to Phoenix, from flash info!")
|> put_flash(:error, "Let's pretend we have an error.")
# |> put_layout(false)
|> render("index.html")
end
end
| 25 | 63 | 0.666667 |
9e5c04b5a8dc44364ee1e20d7de75618d23d141a | 4,928 | ex | Elixir | lib/json/encoder/default_implementations.ex | ryosan-470/elixir-json | 2a7080440814401b16529eb5e12425d656c9219e | [
"BSD-3-Clause"
] | null | null | null | lib/json/encoder/default_implementations.ex | ryosan-470/elixir-json | 2a7080440814401b16529eb5e12425d656c9219e | [
"BSD-3-Clause"
] | null | null | null | lib/json/encoder/default_implementations.ex | ryosan-470/elixir-json | 2a7080440814401b16529eb5e12425d656c9219e | [
"BSD-3-Clause"
] | null | null | null | defimpl JSON.Encoder, for: Tuple do
@doc """
Encodes an Elixir tuple into a JSON array
"""
def encode(term), do: term |> Tuple.to_list() |> JSON.Encoder.Helpers.enum_encode()
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof(_), do: :array
end
defimpl JSON.Encoder, for: HashDict do
@doc """
Encodes an Elixir HashDict into a JSON object
"""
def encode(dict), do: JSON.Encoder.Helpers.dict_encode(dict)
@doc """
Returns :object
"""
def typeof(_), do: :object
end
defimpl JSON.Encoder, for: List do
@doc """
Encodes an Elixir List into a JSON array
"""
def encode([]), do: {:ok, "[]"}
def encode(list) do
if Keyword.keyword?(list) do
JSON.Encoder.Helpers.dict_encode(list)
else
JSON.Encoder.Helpers.enum_encode(list)
end
end
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof([]), do: :array
def typeof(list) do
if Keyword.keyword?(list) do
:object
else
:array
end
end
end
defimpl JSON.Encoder, for: [Integer, Float] do
@doc """
Converts Elixir Integer and Floats into JSON Numbers
"""
# Elixir converts octal, etc into decimal when putting in strings
def encode(number), do: {:ok, "#{number}"}
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof(_), do: :number
end
defimpl JSON.Encoder, for: Atom do
@doc """
Converts Elixir Atoms into their JSON equivalents
"""
def encode(nil), do: {:ok, "null"}
def encode(false), do: {:ok, "false"}
def encode(true), do: {:ok, "true"}
def encode(atom) when is_atom(atom), do: atom |> Atom.to_string() |> JSON.Encoder.encode()
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof(boolean) when is_boolean(boolean), do: :boolean
def typeof(nil), do: :null
def typeof(atom) when is_atom(atom), do: :string
end
defimpl JSON.Encoder, for: BitString do
# 32 = ascii space, cleaner than using "? ", I think
@acii_space 32
@doc """
Converts Elixir String into JSON String
"""
def encode(bitstring), do: {:ok, <<?">> <> encode_binary_recursive(bitstring, []) <> <<?">>}
defp encode_binary_recursive(<<head::utf8, tail::binary>>, acc) do
encode_binary_recursive(tail, encode_binary_character(head, acc))
end
# stop cond
defp encode_binary_recursive(<<>>, acc), do: acc |> Enum.reverse() |> to_string
defp encode_binary_character(?", acc), do: [?", ?\\ | acc]
defp encode_binary_character(?\b, acc), do: [?b, ?\\ | acc]
defp encode_binary_character(?\f, acc), do: [?f, ?\\ | acc]
defp encode_binary_character(?\n, acc), do: [?n, ?\\ | acc]
defp encode_binary_character(?\r, acc), do: [?r, ?\\ | acc]
defp encode_binary_character(?\t, acc), do: [?t, ?\\ | acc]
defp encode_binary_character(?\\, acc), do: [?\\, ?\\ | acc]
defp encode_binary_character(char, acc) when is_number(char) and char < @acii_space do
encode_hexadecimal_unicode_control_character(char, [?u, ?\\ | acc])
end
# anything else besides these control characters, just let it through
defp encode_binary_character(char, acc) when is_number(char), do: [char | acc]
defp encode_hexadecimal_unicode_control_character(char, acc) when is_number(char) do
[
char
|> Integer.to_charlist(16)
|> zeropad_hexadecimal_unicode_control_character
|> Enum.reverse()
| acc
]
end
defp zeropad_hexadecimal_unicode_control_character([a, b, c]), do: [?0, a, b, c]
defp zeropad_hexadecimal_unicode_control_character([a, b]), do: [?0, ?0, a, b]
defp zeropad_hexadecimal_unicode_control_character([a]), do: [?0, ?0, ?0, a]
defp zeropad_hexadecimal_unicode_control_character(iolist) when is_list(iolist), do: iolist
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof(_), do: :string
end
defimpl JSON.Encoder, for: Record do
@doc """
Encodes elixir records into json objects
"""
def encode(record), do: record.to_keywords |> JSON.Encoder.Helpers.dict_encode()
@doc """
Encodes a record into a JSON object
"""
def typeof(_), do: :object
end
defimpl JSON.Encoder, for: Map do
@doc """
Encodes maps into object
"""
def encode(map), do: map |> JSON.Encoder.Helpers.dict_encode()
@doc """
Returns an atom that represents the JSON type for the term
"""
def typeof(_), do: :object
end
defimpl JSON.Encoder, for: Any do
@moduledoc """
Falllback module for encoding any other values
"""
@doc """
Encodes a map into a JSON object
"""
def encode(%{} = struct) do
struct
|> Map.to_list()
|> JSON.Encoder.Helpers.dict_encode()
end
@doc """
Fallback method
"""
def encode(x) do
x
|> Kernel.inspect()
|> JSON.Encoder.encode()
end
@doc """
Fallback method
"""
def typeof(struct) when is_map(struct), do: :object
def typeof(_), do: :string
end
| 26.212766 | 94 | 0.657468 |
9e5c248ecaa50cb5f89827313ea9e65f62f52b81 | 8,141 | exs | Elixir | test/goth/config_test.exs | ananthakumaran/goth | 9b40537ae98dd425f06c4210f5c7965c3c5ac9aa | [
"MIT"
] | 1 | 2019-08-24T20:56:59.000Z | 2019-08-24T20:56:59.000Z | test/goth/config_test.exs | ananthakumaran/goth | 9b40537ae98dd425f06c4210f5c7965c3c5ac9aa | [
"MIT"
] | null | null | null | test/goth/config_test.exs | ananthakumaran/goth | 9b40537ae98dd425f06c4210f5c7965c3c5ac9aa | [
"MIT"
] | 1 | 2021-09-22T20:18:04.000Z | 2021-09-22T20:18:04.000Z | defmodule Goth.ConfigTest do
use ExUnit.Case
alias Goth.Config
def check_config(map) do
check_config(map, fn key -> Config.get(key) end)
end
def check_config(map, get_config) do
map
|> Map.keys()
|> Enum.each(fn key ->
assert {:ok, map[key]} == get_config.(key)
end)
end
setup do
bypass = Bypass.open()
bypass_url = "http://localhost:#{bypass.port}"
Application.put_env(:goth, :metadata_url, bypass_url)
{:ok, bypass: bypass}
end
test "setting and retrieving value" do
Config.set(:key, "123")
assert {:ok, "123"} == Config.get(:key)
end
test "setting a value by atom can be retrieved by string" do
Config.set(:random, "value")
assert {:ok, "value"} == Config.get("random")
end
test "setting a value by string can be retrieved by atom" do
Config.set("totally", "cool")
assert {:ok, "cool"} == Config.get(:totally)
end
test "the initial state is what's passed in from the app config" do
"test/data/test-credentials.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
|> check_config(fn key -> Config.get(key) end)
end
test "dynamically add configs without interfering with existing accounts" do
original_config = "test/data/test-credentials.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
dynamic_config = "test/data/test-credentials-2.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
Config.add_config(dynamic_config)
check_config(original_config)
check_config(dynamic_config, fn key -> Config.get(dynamic_config["client_email"], key) end)
end
test "the initial state has the token_source set to oauth_jwt" do
assert {:ok, :oauth_jwt} == Config.get(:token_source)
end
test "Config can start up with no config when disabled" do
saved_config = Application.get_all_env(:goth)
try do
[:json, :metadata_url, :config_root_dir]
|> Enum.each(&Application.delete_env(:goth, &1))
Application.put_env(:goth, :disabled, true, persistent: true)
{:ok, pid} = GenServer.start_link(Goth.Config, :ok)
assert Process.alive?(pid)
after
Application.delete_env(:goth, :disabled)
Enum.each(saved_config, fn {k, v} ->
Application.put_env(:goth, k, v, persistent: true)
end)
end
end
test "Goth correctly retrieves project IDs from metadata", %{bypass: bypass} do
# The test configuration sets an example JSON blob. We override it briefly
# during this test.
current_json = Application.get_env(:goth, :json)
Application.put_env(:goth, :json, nil, persistent: true)
Application.stop(:goth)
# Fake project response
project = "test-project"
Bypass.expect(bypass, fn conn ->
uri = "/computeMetadata/v1/project/project-id"
assert(conn.request_path == uri, "Goth should ask for project ID")
Plug.Conn.resp(conn, 200, project)
end)
Application.start(:goth)
assert(
{:ok, :metadata} == Config.get(:token_source),
"Token source should be Google Cloud metadata"
)
assert(
{:ok, "test-project"} == Config.get(:project_id),
"Config should return project from metadata"
)
# Restore original config
Application.put_env(:goth, :json, current_json, persistent: true)
Application.stop(:goth)
Application.start(:goth)
end
test "GOOGLE_APPLICATION_CREDENTIALS is read" do
# The test configuration sets an example JSON blob. We override it briefly
# during this test.
current_json = Application.get_env(:goth, :json)
Application.put_env(:goth, :json, nil, persistent: true)
System.put_env("GOOGLE_APPLICATION_CREDENTIALS", "test/data/test-credentials-2.json")
Application.stop(:goth)
Application.start(:goth)
state =
"test/data/test-credentials-2.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
|> Config.map_config()
Enum.each(state, fn {_, config} ->
Enum.each(config, fn {key, _} ->
assert {:ok, config[key]} == Config.get(key)
end)
end)
assert {:ok, :oauth_jwt} == Config.get(:token_source)
# Restore original config
Application.put_env(:goth, :json, current_json, persistent: true)
System.delete_env("GOOGLE_APPLICATION_CREDENTIALS")
Application.stop(:goth)
Application.start(:goth)
end
test "multiple credentials are parsed correctly" do
# The test configuration sets an example JSON blob. We override it briefly
# during this test.
current_json = Application.get_env(:goth, :json)
new_json = "test/data/test-multicredentials.json" |> Path.expand() |> File.read!()
Application.put_env(:goth, :json, new_json, persistent: true)
Application.stop(:goth)
Application.start(:goth)
state =
"test/data/test-multicredentials.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
|> Config.map_config()
Enum.each(state, fn {account, config} ->
Enum.each(config, fn {key, _} ->
assert {:ok, config[key]} == Config.get(account, key)
end)
assert {:ok, :oauth_jwt} == Config.get(account, :token_source)
end)
# Restore original config
Application.put_env(:goth, :json, current_json, persistent: true)
System.delete_env("GOOGLE_APPLICATION_CREDENTIALS")
Application.stop(:goth)
Application.start(:goth)
end
test "gcloud default credentials are found", %{bypass: bypass} do
# The test configuration sets an example JSON blob. We override it briefly
# during this test.
current_json = Application.get_env(:goth, :json)
current_home = Application.get_env(:goth, :config_root_dir)
Application.put_env(:goth, :json, nil, persistent: true)
Application.put_env(:goth, :config_root_dir, "test/data/home", persistent: true)
Application.stop(:goth)
# Fake project response because the ADC doesn't embed a project.
project = "test-project"
Bypass.expect(bypass, fn conn ->
uri = "/computeMetadata/v1/project/project-id"
assert(conn.request_path == uri, "Goth should ask for project ID")
Plug.Conn.resp(conn, 200, project)
end)
Application.start(:goth)
state =
"test/data/home/gcloud/application_default_credentials.json"
|> Path.expand()
|> File.read!()
|> Jason.decode!()
check_config(state)
assert {:ok, :oauth_refresh} == Config.get(:token_source)
# Restore original config
Application.put_env(:goth, :json, current_json, persistent: true)
Application.put_env(:goth, :config_root_dir, current_home, persistent: true)
Application.stop(:goth)
Application.start(:goth)
end
test "project_id can be overridden in config" do
project = "different"
Application.put_env(:goth, :project_id, project, persistent: true)
Application.stop(:goth)
Application.start(:goth)
assert {:ok, project} == Config.get(:project_id)
Application.put_env(:goth, :project_id, nil, persistent: true)
Application.stop(:goth)
Application.start(:goth)
end
test "project_id can be overridden by environment variables" do
project_from_env = "different1"
project_from_devshell = "different2"
System.put_env("DEVSHELL_PROJECT_ID", project_from_devshell)
Application.stop(:goth)
Application.start(:goth)
assert {:ok, project_from_devshell} == Config.get(:project_id)
System.put_env("GOOGLE_CLOUD_PROJECT", project_from_env)
Application.stop(:goth)
Application.start(:goth)
assert {:ok, project_from_env} == Config.get(:project_id)
System.delete_env("DEVSHELL_PROJECT_ID")
System.delete_env("GOOGLE_CLOUD_PROJECT")
Application.stop(:goth)
Application.start(:goth)
end
test "the config_module is allowed to override config" do
Application.put_env(:goth, :config_module, Goth.TestConfigMod)
Application.stop(:goth)
Application.start(:goth)
assert {:ok, :val} == Goth.Config.get(:actor_email)
Application.delete_env(:goth, :config_module)
Application.stop(:goth)
Application.start(:goth)
end
end
| 30.605263 | 95 | 0.672153 |
9e5c4ba6fbd83094ad6b2211e37e47c7e36eabbc | 1,125 | exs | Elixir | config/config.exs | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | 1 | 2020-01-27T00:49:16.000Z | 2020-01-27T00:49:16.000Z | config/config.exs | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | null | null | null | config/config.exs | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :packet_api, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:packet_api, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
| 36.290323 | 73 | 0.755556 |
9e5ca34576acea4fdea5297abd6980c4ca93ad30 | 4,513 | ex | Elixir | clients/elixir/generated/lib/swaggy_jenkins/request_builder.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | clients/elixir/generated/lib/swaggy_jenkins/request_builder.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | clients/elixir/generated/lib/swaggy_jenkins/request_builder.ex | cliffano/jenkins-api-clients-generator | 522d02b3a130a29471df5ec1d3d22c822b3d0813 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule SwaggyJenkins.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests
"""
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (atom) - Request method
## Returns
Map
"""
@spec method(map(), atom) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t) :: map()
def url(request, u) do
Map.put_new(request, :url, u)
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(atom) => atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), atom, atom, any()) :: map()
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_field(&1, key, Poison.encode!(value), headers: [{:"Content-Type", "application/json"}])))
end
def add_param(request, :headers, key, value) do
request
|> Tesla.put_header(key, value)
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_file(&1, path, name: name)))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &(Map.put(&1, name, value)))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
@doc """
Due to a bug in httpc, POST, PATCH and PUT requests will fail, if the body is empty
This function will ensure, that the body param is always set
## Parameters
- request (Map) - Collected request options
## Returns
Map
"""
@spec ensure_body(map()) :: map()
def ensure_body(%{body: nil} = request) do
%{request | body: ""}
end
def ensure_body(request) do
Map.put_new(request, :body, "")
end
@doc """
Handle the response for a Tesla request
## Parameters
- arg1 (Tesla.Env.t | term) - The response object
- arg2 (:false | struct | [struct]) - The shape of the struct to deserialize into
## Returns
{:ok, struct} on success
{:error, term} on failure
"""
@spec decode(Tesla.Env.t() | term(), false | struct() | [struct()]) ::
{:ok, struct()} | {:ok, Tesla.Env.t()} | {:error, any}
def decode(%Tesla.Env{} = env, false), do: {:ok, env}
def decode(%Tesla.Env{body: body}, struct), do: Poison.decode(body, as: struct)
def evaluate_response({:ok, %Tesla.Env{} = env}, mapping) do
resolve_mapping(env, mapping)
end
def evaluate_response({:error, _} = error, _), do: error
def resolve_mapping(env, mapping, default \\ nil)
def resolve_mapping(%Tesla.Env{status: status} = env, [{mapping_status, struct} | _], _)
when status == mapping_status do
decode(env, struct)
end
def resolve_mapping(env, [{:default, struct} | tail], _), do: resolve_mapping(env, tail, struct)
def resolve_mapping(env, [_ | tail], struct), do: resolve_mapping(env, tail, struct)
def resolve_mapping(env, [], nil), do: {:error, env}
def resolve_mapping(env, [], struct), do: decode(env, struct)
end
| 27.186747 | 137 | 0.646577 |
9e5ca7c98ad048d11669e54903c6ee3443241fb9 | 1,828 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/advanced_machine_features.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/advanced_machine_features.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/advanced_machine_features.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.AdvancedMachineFeatures do
@moduledoc """
Specifies options for controlling advanced machine features. Options that would traditionally be configured in a BIOS belong here. Features that require operating system support may have corresponding entries in the GuestOsFeatures of an Image (e.g., whether or not the OS in the Image supports nested virtualization being enabled or disabled).
## Attributes
* `enableNestedVirtualization` (*type:* `boolean()`, *default:* `nil`) - Whether to enable nested virtualization or not (default is false).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enableNestedVirtualization => boolean() | nil
}
field(:enableNestedVirtualization)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.AdvancedMachineFeatures do
def decode(value, options) do
GoogleApi.Compute.V1.Model.AdvancedMachineFeatures.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.AdvancedMachineFeatures do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.893617 | 346 | 0.764223 |
9e5cc4495cd0ae886080ce0281fc25cb03f52f30 | 548 | exs | Elixir | mix.exs | IncludeSecurity/safeurl-elixir | 507f5aa0f92709bbbfa250dac156f4d56b4bf02b | [
"MIT"
] | 4 | 2021-05-04T15:02:28.000Z | 2021-08-10T23:13:34.000Z | mix.exs | IncludeSecurity/safeurl-elixir | 507f5aa0f92709bbbfa250dac156f4d56b4bf02b | [
"MIT"
] | 2 | 2021-05-02T06:44:13.000Z | 2021-05-03T13:54:02.000Z | mix.exs | IncludeSecurity/elixir-safeurl | 507f5aa0f92709bbbfa250dac156f4d56b4bf02b | [
"MIT"
] | 1 | 2021-05-02T04:24:38.000Z | 2021-05-02T04:24:38.000Z | defmodule SafeURL.MixProject do
use Mix.Project
def project do
[
app: :safeurl,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
]
end
def application do
[env: default_configs()]
end
defp default_configs do
[
schemes: ~w[http https],
blacklist_reserved: true,
blacklist: [],
whitelist: [],
]
end
defp deps do
[
{:httpoison, "~> 1.8"},
{:inet_cidr, "~> 1.0"},
{:dns, "~> 2.2"},
]
end
end
| 15.657143 | 42 | 0.509124 |
9e5d2e61272fe5e6f77960585efd9fb7d77a216b | 445 | exs | Elixir | config/config.exs | ohyecloudy/bitcoin_price_scraper | 888eca9641c555e961739f9292d0ddd3d3172b92 | [
"MIT"
] | 1 | 2021-09-06T06:09:11.000Z | 2021-09-06T06:09:11.000Z | config/config.exs | data-miner00/bitcoin_price_scraper | 888eca9641c555e961739f9292d0ddd3d3172b92 | [
"MIT"
] | null | null | null | config/config.exs | data-miner00/bitcoin_price_scraper | 888eca9641c555e961739f9292d0ddd3d3172b92 | [
"MIT"
] | 2 | 2021-06-27T03:35:27.000Z | 2022-03-28T17:38:51.000Z | import Config
config :bitcoin_price_scraper,
upbit_access_key: "SUPER_ACCESS_KEY",
upbit_secret_key: "SUPER_SECRET_KEY",
scrap_days: 4 * 365
# https://github.com/deadtrickster/prometheus-httpd/blob/master/doc/prometheus_httpd.md
config :prometheus, :prometheus_http,
path: String.to_charlist("/metrics"),
format: :auto,
port: 8081
if File.exists?("config/#{Mix.env()}.secret.exs") do
import_config "#{Mix.env()}.secret.exs"
end
| 26.176471 | 87 | 0.748315 |
9e5d32aba2668d47deaf6616959bf02ebae14f19 | 7,524 | exs | Elixir | config/config.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | config/config.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | config/config.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | use Mix.Config
import Cog.Config.Helpers
# ========================================================================
# Cog Telemetry - This has been turned off for now, as it is no longer
# a running service to send telemetry data to.
# ========================================================================
config :cog, :telemetry, false
# ========================================================================
# Set this to :unenforcing to globally disable all access rules.
# NOTE: This is a global setting.
# ========================================================================
if System.get_env("DISABLE_RULE_ENFORCEMENT") do
config :cog, :access_rules, :unenforcing
else
config :cog, :access_rules, :enforcing
end
# ========================================================================
# Embedded Command Bundle Version (for built-in commands)
# NOTE: Do not change this value unless you know what you're doing.
# ========================================================================
config :cog, :embedded_bundle_version, "0.18.2"
# ========================================================================
# Chat Adapters
# ========================================================================
config :cog, Cog.Chat.Http.Provider, []
config :cog, Cog.Chat.Adapter,
providers: provider_list(),
chat: enabled_chat_provider()
config :cog, :enable_spoken_commands, ensure_boolean(System.get_env("ENABLE_SPOKEN_COMMANDS")) || true
config :cog, :message_bus,
host: System.get_env("COG_MQTT_HOST") || "127.0.0.1",
port: ensure_integer(System.get_env("COG_MQTT_PORT")) || 1883
# Uncomment the next three lines and edit ssl_cert and ssl_key
# to point to your SSL certificate and key files.
# config :cog, :message_bus,
# ssl_cert: "public.crt",
# ssl_key: "secret.key"
# Chat provider APIs may be slow to respond to requests in some cases
# so we set a generous timeout.
config :httpotion, :default_timeout, 30000
# ========================================================================
# Commands, Bundles, and Services
config :cog, :command_prefix, System.get_env("COG_COMMAND_PREFIX") || "!"
config :cog, :previous_command_token, System.get_env("COG_PREVIOUS_COMMAND_TOKEN") || "!!"
config :cog, Cog.Bundle.BundleSup,
bundle_root: Path.join([File.cwd!, "bundles"])
config :cog, Cog.Command.Pipeline,
interactive_timeout: {String.to_integer(System.get_env("COG_PIPELINE_TIMEOUT") || "60"), :sec},
trigger_timeout: {300, :sec}
config :cog, Cog.Command.Service,
data_path: data_dir("service_data")
config :cog, :custom_template_dir, System.get_env("COG_CUSTOM_TEMPLATE_DIR")
# Set these to zero (0) to disable caching
config :cog, :command_cache_ttl, {60, :sec}
config :cog, :command_rule_ttl, {10, :sec}
config :cog, :template_cache_ttl, {60, :sec}
config :cog, :user_perms_ttl, {10, :sec}
# Enable/disable user self-registration
config :cog, :self_registration, System.get_env("COG_ALLOW_SELF_REGISTRATION") != nil || false
config :cog, :emqttc,
log_level: :info
# How many levels deep do we expand aliases, '0' disables aliases
config :cog, :max_alias_expansion, 5
# ========================================================================
# Logging
common_metadata = [:module, :line]
common_log_format = "$dateT$time $metadata[$level] $levelpad$message\n"
config :logger,
utc_log: true,
level: :info,
backends: [:console,
{LoggerFileBackend, :flywheel_log}]
config :logger, :console,
metadata: common_metadata,
format: common_log_format
config :logger, :cog_log,
metadata: common_metadata,
format: common_log_format,
path: data_dir("cog.log")
if System.get_env("COG_SASL_LOG") != nil do
config :logger,
handle_sasl_reports: true
end
config :lager, :error_logger_redirect, false
config :lager, :error_logger_whitelist, [Logger.ErrorHandler]
config :lager, :crash_log, false
config :probe, log_directory: data_dir("audit_logs")
# ========================================================================
# Database Setup
config :cog, ecto_repos: [Cog.Repo]
config :cog, Cog.Repo,
adapter: Ecto.Adapters.Postgres,
url: (case System.get_env("DATABASE_URL") do
nil -> "ecto://#{System.get_env("USER")}@localhost/cog_#{Mix.env}"
url -> url
end),
pool_size: ensure_integer(System.get_env("COG_DB_POOL_SIZE")) || 10,
pool_timeout: ensure_integer(System.get_env("COG_DB_POOL_TIMEOUT")) || 15000,
timeout: ensure_integer(System.get_env("COG_DB_TIMEOUT")) || 15000,
parameters: [timezone: 'UTC'],
loggers: [{Cog.Util.EctoLogger, :log, []}],
ssl: ensure_boolean(System.get_env("COG_DB_SSL")) || false
# ========================================================================
config :cog, Carrier.Messaging.Connection,
host: System.get_env("COG_MQTT_HOST") || "127.0.0.1",
port: ensure_integer(System.get_env("COG_MQTT_PORT")) || 1883
# Uncomment the next three lines and edit ssl_cert to point to your
# SSL certificate.
# Note: SSL certification verification can be disabled by setting
# "ssl: :no_verify". We strongly recommend disabling verification for
# development or debugging ONLY.
#config :cog, Carrier.Messaging.Connection,
# ssl: true,
# ssl_cert: "server.crt"
# ========================================================================
# Web Endpoints
config :cog, Cog.Endpoint,
http: [port: System.get_env("COG_API_PORT") || 4000],
url: gen_public_url_config(Cog.Endpoint),
root: Path.dirname(__DIR__),
debug_errors: false,
cache_static_lookup: false,
check_origin: true,
render_errors: [accepts: ~w(json)],
pubsub: [name: Carrier.Messaging.Connection,
adapter: Phoenix.PubSub.PG2]
config :cog, Cog.TriggerEndpoint,
http: [port: System.get_env("COG_TRIGGER_PORT") || 4001],
url: gen_public_url_config(Cog.TriggerEndpoint),
root: Path.dirname(__DIR__),
debug_errors: false,
cache_static_lookup: false,
check_origin: true,
render_errors: [accepts: ~w(json)]
config :cog, Cog.ServiceEndpoint,
http: [port: System.get_env("COG_SERVICE_PORT") || 4002],
url: gen_public_url_config(Cog.ServiceEndpoint),
root: Path.dirname(__DIR__),
debug_errors: false,
cache_static_lookup: false,
check_origin: true,
render_errors: [accepts: ~w(json)]
config :cog, :token_lifetime, {1, :week}
config :cog, :token_reap_period, {1, :day}
# Trigger timeouts are defined according to the needs of the
# requestor, which includes network roundtrip time, as well as Cog's
# internal processing. Cog itself can't wait that long to respond, as
# that'll be guaranteed to exceed the HTTP requestor's timeout. As
# such, we'll incorporate a buffer into our internal timeout. Defined
# as seconds
config :cog, :trigger_timeout_buffer, (System.get_env("COG_TRIGGER_TIMEOUT_BUFFER") || 2)
# ========================================================================
# Emails
config :cog, Cog.Mailer,
adapter: Bamboo.SMTPAdapter,
server: System.get_env("COG_SMTP_SERVER"),
port: ensure_integer(System.get_env("COG_SMTP_PORT")),
username: System.get_env("COG_SMTP_USERNAME"),
password: System.get_env("COG_SMTP_PASSWORD"),
tls: :if_available, # can be `:always` or `:never`
ssl: (ensure_boolean(System.get_env("COG_SMTP_SSL")) || false),
retries: (System.get_env("COG_SMTP_RETRIES") || 1)
config :cog, :email_from, System.get_env("COG_EMAIL_FROM")
config :cog, :password_reset_base_url, System.get_env("COG_PASSWORD_RESET_BASE_URL")
import_config "slack.exs"
import_config "hipchat.exs"
import_config "#{Mix.env}.exs"
| 34.833333 | 102 | 0.6353 |
9e5d5f925388af189f6f403257fb3d6627b4ff6a | 1,345 | exs | Elixir | deps/cidr/mix.exs | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | deps/cidr/mix.exs | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | deps/cidr/mix.exs | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
defmodule CIDR.Mixfile do
use Mix.Project
@version "1.1.0"
def project do
[
app: :cidr,
elixir: ">= 1.3.0",
deps: [
{:credo, "~> 0.4", only: [:dev, :test]},
{:earmark, "~> 1.0", only: [:dev, :docs]},
{:ex_doc, "~> 0.13", only: [:dev, :docs]},
{:excoveralls, "~> 0.5", only: [:dev, :test]},
{:inch_ex, "~> 0.5", only: :docs}
],
description: "Classless Inter-Domain Routing (CIDR) for Elixir",
docs: [
main: "CIDR",
source_ref: "v#{@version}",
source_url: "https://github.com/c-rack/cidr-elixir"
],
package: package(),
test_coverage: [tool: ExCoveralls],
version: @version
]
end
def application do
[applications: []]
end
defp package do
%{
maintainers: [
"Constantin Rack",
"Laurens Duijvesteijn"
],
licenses: ["Mozilla Public License 2.0"],
links: %{
"Changelog" => "https://github.com/c-rack/cidr-elixir/blob/master/CHANGELOG.md",
"GitHub" => "https://github.com/c-rack/cidr-elixir"
}
}
end
end
| 25.865385 | 88 | 0.537546 |
9e5d653776abcc4239e7cb3c07152fd0d866d82e | 135 | exs | Elixir | 2021/test/aoc001_test.exs | axler8r/aoc | d36e59a60e6062e1c76486034108c7f22ddbd9aa | [
"BSD-3-Clause"
] | null | null | null | 2021/test/aoc001_test.exs | axler8r/aoc | d36e59a60e6062e1c76486034108c7f22ddbd9aa | [
"BSD-3-Clause"
] | null | null | null | 2021/test/aoc001_test.exs | axler8r/aoc | d36e59a60e6062e1c76486034108c7f22ddbd9aa | [
"BSD-3-Clause"
] | null | null | null | defmodule Aoc001Test do
use ExUnit.Case
doctest Aoc001
test "greets the world" do
assert Aoc001.hello() == :world
end
end
| 15 | 35 | 0.703704 |
9e5ddf4b37bc38c56b08f4823e81fc723a341ff7 | 2,001 | exs | Elixir | test/lib/type/g2g_types_test.exs | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | test/lib/type/g2g_types_test.exs | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | test/lib/type/g2g_types_test.exs | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
defmodule Antikythera.G2gTypesTest do
use ExUnit.Case
test "G2gRequest: new/1" do
alias Antikythera.G2gRequest, as: GRq
assert GRq.new([method: :get, path: "/"]) == {:ok, %GRq{body: "", cookies: %{}, headers: %{}, method: :get, path: "/", query_params: %{}}}
assert GRq.new([method: :get, path: "/", headers: %{"x" => "y"}]) == {:ok, %GRq{body: "", cookies: %{}, headers: %{"x" => "y"}, method: :get, path: "/", query_params: %{}}}
assert GRq.new([]) == {:error, {:value_missing, [GRq, {Antikythera.Http.Method , :method }]}}
assert GRq.new([method: :get]) == {:error, {:value_missing, [GRq, {Antikythera.EncodedPath , :path }]}}
assert GRq.new([path: "/"]) == {:error, {:value_missing, [GRq, {Antikythera.Http.Method , :method }]}}
assert GRq.new([method: :get, path: "without_slash"]) == {:error, {:invalid_value, [GRq, {Antikythera.EncodedPath , :path }]}}
assert GRq.new([method: :get, path: "/", headers: "not map"]) == {:error, {:invalid_value, [GRq, {Antikythera.Http.Headers, :headers}]}}
end
test "G2gResponse: new/1" do
alias Antikythera.G2gResponse, as: GRs
assert GRs.new([status: 200]) == {:ok, %GRs{status: 200, headers: %{}, cookies: %{}, body: ""}}
assert GRs.new([status: 200, body: "valid body"]) == {:ok, %GRs{status: 200, headers: %{}, cookies: %{}, body: "valid body", }}
assert GRs.new([status: 200, headers: "not_map"]) == {:error, {:invalid_value, [GRs, {Antikythera.Http.Headers , :headers}]}}
assert GRs.new([status: :ok]) == {:error, {:invalid_value, [GRs, {Antikythera.Http.Status.Int, :status }]}}
assert GRs.new([]) == {:error, {:value_missing, [GRs, {Antikythera.Http.Status.Int, :status }]}}
end
end
| 76.961538 | 176 | 0.537231 |
9e5de9007b713ab604c18c9433a8d18dc6624a41 | 2,204 | exs | Elixir | config/prod.exs | Yathi/emc-backend-phoenix | a14cfb38a2b698bc5bd7107a6466383a7b91053a | [
"MIT"
] | null | null | null | config/prod.exs | Yathi/emc-backend-phoenix | a14cfb38a2b698bc5bd7107a6466383a7b91053a | [
"MIT"
] | null | null | null | config/prod.exs | Yathi/emc-backend-phoenix | a14cfb38a2b698bc5bd7107a6466383a7b91053a | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# EmcBackendWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :emc_backend, EmcBackendWeb.Endpoint,
load_from_system_env: true,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :emc_backend, EmcBackendWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :emc_backend, EmcBackendWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :emc_backend, EmcBackendWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.907692 | 67 | 0.725499 |
9e5e03e1a8507306b4f6ea6ebebdf0939e88f204 | 1,280 | exs | Elixir | mix.exs | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | 1 | 2021-08-17T02:31:07.000Z | 2021-08-17T02:31:07.000Z | mix.exs | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | null | null | null | mix.exs | GunnarPDX/keyword_parser | b6013a5c156672761c71c5b927874c3d9b5567c3 | [
"MIT"
] | 1 | 2021-08-17T02:31:14.000Z | 2021-08-17T02:31:14.000Z | defmodule Keywords.MixProject do
use Mix.Project
def project do
[
app: :keywords,
version: "0.2.0",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
compilers: Mix.compilers(),
rustler_crates: rustler_crates(),
name: "keywords",
source_url: "https://github.com/GunnarPDX/keyword_parser"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Keywords.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:rustler, "~> 0.22.0"}
]
end
defp description() do
"""
Parses keywords from strings.
"""
end
defp package() do
[
# These are the default files included in the package
files: ~w(lib .formatter.exs mix.exs README* LICENSE*),
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/GunnarPDX/keyword_parser"}
]
end
defp rustler_crates do
[
keywords: [path: "native/parser", mode: if(Mix.env() == :prod, do: :release, else: :debug)]
]
end
end
| 22.068966 | 97 | 0.585938 |
9e5e5623ce4ff19342b8ff66ecb0aa3f67f73b2f | 9,160 | ex | Elixir | lib/aws/generated/polly.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/polly.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/polly.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Polly do
@moduledoc """
Amazon Polly is a web service that makes it easy to synthesize speech from text.
The Amazon Polly service provides API operations for synthesizing high-quality
speech from plain text and Speech Synthesis Markup Language (SSML), along with
managing pronunciations lexicons that enable you to get the best results for
your application domain.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2016-06-10",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "polly",
global?: false,
protocol: "rest-json",
service_id: "Polly",
signature_version: "v4",
signing_name: "polly",
target_prefix: nil
}
end
@doc """
Deletes the specified pronunciation lexicon stored in an AWS Region.
A lexicon which has been deleted is not available for speech synthesis, nor is
it possible to retrieve it using either the `GetLexicon` or `ListLexicon` APIs.
For more information, see [Managing Lexicons](https://docs.aws.amazon.com/polly/latest/dg/managing-lexicons.html).
"""
def delete_lexicon(%Client{} = client, name, input, options \\ []) do
url_path = "/v1/lexicons/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns the list of voices that are available for use when requesting speech
synthesis.
Each voice speaks a specified language, is either male or female, and is
identified by an ID, which is the ASCII version of the voice name.
When synthesizing speech ( `SynthesizeSpeech` ), you provide the voice ID for
the voice you want from the list of voices returned by `DescribeVoices`.
For example, you want your news reader application to read news in a specific
language, but giving a user the option to choose the voice. Using the
`DescribeVoices` operation you can provide the user with a list of available
voices to select from.
You can optionally specify a language code to filter the available voices. For
example, if you specify `en-US`, the operation returns a list of all available
US English voices.
This operation requires permissions to perform the `polly:DescribeVoices`
action.
"""
def describe_voices(
%Client{} = client,
engine \\ nil,
include_additional_language_codes \\ nil,
language_code \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/v1/voices"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(language_code) do
[{"LanguageCode", language_code} | query_params]
else
query_params
end
query_params =
if !is_nil(include_additional_language_codes) do
[{"IncludeAdditionalLanguageCodes", include_additional_language_codes} | query_params]
else
query_params
end
query_params =
if !is_nil(engine) do
[{"Engine", engine} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the content of the specified pronunciation lexicon stored in an AWS
Region.
For more information, see [Managing Lexicons](https://docs.aws.amazon.com/polly/latest/dg/managing-lexicons.html).
"""
def get_lexicon(%Client{} = client, name, options \\ []) do
url_path = "/v1/lexicons/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves a specific SpeechSynthesisTask object based on its TaskID.
This object contains information about the given speech synthesis task,
including the status of the task, and a link to the S3 bucket containing the
output of the task.
"""
def get_speech_synthesis_task(%Client{} = client, task_id, options \\ []) do
url_path = "/v1/synthesisTasks/#{URI.encode(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of pronunciation lexicons stored in an AWS Region.
For more information, see [Managing Lexicons](https://docs.aws.amazon.com/polly/latest/dg/managing-lexicons.html).
"""
def list_lexicons(%Client{} = client, next_token \\ nil, options \\ []) do
url_path = "/v1/lexicons"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of SpeechSynthesisTask objects ordered by their creation date.
This operation can filter the tasks by their status, for example, allowing users
to list only tasks that are completed.
"""
def list_speech_synthesis_tasks(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
status \\ nil,
options \\ []
) do
url_path = "/v1/synthesisTasks"
headers = []
query_params = []
query_params =
if !is_nil(status) do
[{"Status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"MaxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Stores a pronunciation lexicon in an AWS Region.
If a lexicon with the same name already exists in the region, it is overwritten
by the new lexicon. Lexicon operations have eventual consistency, therefore, it
might take some time before the lexicon is available to the SynthesizeSpeech
operation.
For more information, see [Managing Lexicons](https://docs.aws.amazon.com/polly/latest/dg/managing-lexicons.html).
"""
def put_lexicon(%Client{} = client, name, input, options \\ []) do
url_path = "/v1/lexicons/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Allows the creation of an asynchronous synthesis task, by starting a new
`SpeechSynthesisTask`.
This operation requires all the standard information needed for speech
synthesis, plus the name of an Amazon S3 bucket for the service to store the
output of the synthesis task and two optional parameters (OutputS3KeyPrefix and
SnsTopicArn). Once the synthesis task is created, this operation will return a
SpeechSynthesisTask object, which will include an identifier of this task as
well as the current status.
"""
def start_speech_synthesis_task(%Client{} = client, input, options \\ []) do
url_path = "/v1/synthesisTasks"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Synthesizes UTF-8 input, plain text or SSML, to a stream of bytes.
SSML input must be valid, well-formed SSML. Some alphabets might not be
available with all the voices (for example, Cyrillic might not be read at all by
English voices) unless phoneme mapping is used. For more information, see [How it
Works](https://docs.aws.amazon.com/polly/latest/dg/how-text-to-speech-works.html).
"""
def synthesize_speech(%Client{} = client, input, options \\ []) do
url_path = "/v1/speech"
headers = []
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"Content-Type", "ContentType"},
{"x-amzn-RequestCharacters", "RequestCharacters"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
| 25.658263 | 116 | 0.641921 |
9e5e88e84bd70ab1cc6f8b49f738ebe704cd7a64 | 1,447 | ex | Elixir | lib/bittrex/order_cancel_result.ex | straw-hat-team/bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 9 | 2017-11-17T21:07:50.000Z | 2018-01-19T09:53:03.000Z | lib/bittrex/order_cancel_result.ex | straw-hat-team/bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 27 | 2017-12-13T12:21:10.000Z | 2019-11-01T10:25:39.000Z | lib/bittrex/order_cancel_result.ex | straw-hat-llc/elixir_bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 5 | 2017-11-20T20:04:43.000Z | 2018-10-03T16:47:37.000Z | defmodule Bittrex.OrderCancelResult do
@moduledoc """
A Bittrex Order Cancel Result.
"""
alias StrawHat.Response
@typedoc """
- `id`: unique ID of this order.
- `fill_quantity`: fill quantity.
- `commission`: commission.
- `proceeds`: proceeds.
- `client_order_id`: client-provided identifier for advanced order tracking.
- `status`: order status.
- `updated_at`: timestamp (UTC) of last order update.
- `closed_at`: timestamp (UTC) when this order was closed.
"""
@type t :: %__MODULE__{
id: String.t(),
fill_quantity: number(),
commission: number(),
proceeds: number(),
client_order_id: String.t(),
status: String.t(),
updated_at: NaiveDateTime.t(),
closed_at: NaiveDateTime.t()
}
defstruct [
:id,
:fill_quantity,
:commission,
:proceeds,
:client_order_id,
:status,
:updated_at,
:closed_at
]
@doc false
def new(data) do
%__MODULE__{
id: data["id"],
fill_quantity: data["fillQuantity"],
commission: data["commission"],
proceeds: data["proceeds"],
client_order_id: data["clientOrderId"],
status: data["status"],
updated_at: Bittrex.format_datetime(data["updatedAt"]),
closed_at: Bittrex.format_datetime(data["closedAt"])
}
end
@doc false
def transform_response(data) do
data
|> new()
|> Response.ok()
end
end
| 23.721311 | 78 | 0.612301 |
9e5e93c907c951b2ddd54444556bff83a85e9b5a | 92 | ex | Elixir | lib/nexmo/phone.ex | CarouselSMS/nexmo_elixir | f9be807bb5198a165b6453bc0aa5cdb6a3dc1a05 | [
"MIT"
] | null | null | null | lib/nexmo/phone.ex | CarouselSMS/nexmo_elixir | f9be807bb5198a165b6453bc0aa5cdb6a3dc1a05 | [
"MIT"
] | null | null | null | lib/nexmo/phone.ex | CarouselSMS/nexmo_elixir | f9be807bb5198a165b6453bc0aa5cdb6a3dc1a05 | [
"MIT"
] | null | null | null | defmodule Nexmo.Record do
require Logger
def submit do
end
def encode do
end
end
| 10.222222 | 25 | 0.717391 |
9e5e95b4bb5df74243631248c5fb6e856479040b | 1,618 | ex | Elixir | lib/dapp_demo/server_registry.ex | arpnetwork/dapp_demo | ff87809625ae6053378ddffd63e27c6225d93370 | [
"Apache-2.0"
] | null | null | null | lib/dapp_demo/server_registry.ex | arpnetwork/dapp_demo | ff87809625ae6053378ddffd63e27c6225d93370 | [
"Apache-2.0"
] | null | null | null | lib/dapp_demo/server_registry.ex | arpnetwork/dapp_demo | ff87809625ae6053378ddffd63e27c6225d93370 | [
"Apache-2.0"
] | null | null | null | defmodule DappDemo.ServerRegistry do
@moduledoc false
use GenServer
def start_link(_opts) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def create(address, amount) do
GenServer.call(__MODULE__, {:create, address, amount})
end
def lookup(address) do
case :ets.lookup(__MODULE__, address) do
[{^address, pid}] -> {:ok, pid}
[] -> :error
end
end
def lookup_all() do
:ets.match_object(__MODULE__, {:"$1", :"$2"})
end
def init(_opts) do
server_tab =
:ets.new(__MODULE__, [
:named_table,
:public,
write_concurrency: true,
read_concurrency: true
])
# to save all device info
:ets.new(DappDemo.Device, [
:named_table,
:public,
write_concurrency: true,
read_concurrency: true
])
# to save all device app package
:ets.new(DappDemo.DevicePackages, [
:bag,
:named_table,
:public,
write_concurrency: true,
read_concurrency: true
])
{:ok, {server_tab}}
end
def handle_call({:create, address, amount}, _from, {server_tab} = state) do
case lookup(address) do
{:ok, data} ->
{:reply, data, state}
:error ->
case DynamicSupervisor.start_child(
DappDemo.DSupervisor,
{DappDemo.Server, [address: address, amount: amount, server_tab: server_tab]}
) do
{:ok, pid} ->
:ets.insert(server_tab, {address, pid})
{:reply, {:ok, pid}, state}
err ->
{:reply, err, state}
end
end
end
end
| 21.864865 | 92 | 0.573548 |
9e5ebf2c4538c8c6fae8de167ec5ce644e2aba80 | 1,086 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/crop_hints_params.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/crop_hints_params.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/crop_hints_params.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.CropHintsParams do
@moduledoc """
Parameters for crop hints annotation request.
"""
@derive [Poison.Encoder]
defstruct [
:"aspectRatios"
]
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.CropHintsParams do
def decode(value, _options) do
value
end
end
| 29.351351 | 77 | 0.751381 |
9e5f020b6309d0cd7f03642436964859d12cdf03 | 1,122 | exs | Elixir | mix.exs | faithandbrave/elixir-phoenix-chunked-response-example | 3390aba36a9fe9fa55316205021eef1f566b57a6 | [
"MIT"
] | 3 | 2016-12-16T03:00:32.000Z | 2020-09-17T16:04:58.000Z | mix.exs | faithandbrave/elixir-phoenix-chunked-response-example | 3390aba36a9fe9fa55316205021eef1f566b57a6 | [
"MIT"
] | null | null | null | mix.exs | faithandbrave/elixir-phoenix-chunked-response-example | 3390aba36a9fe9fa55316205021eef1f566b57a6 | [
"MIT"
] | null | null | null | defmodule ChunkServer.Mixfile do
use Mix.Project
def project do
[app: :chunk_server,
version: "0.0.1",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {ChunkServer, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext, :porcelain]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:porcelain, "~> 2.0"}]
end
end
| 28.05 | 102 | 0.606952 |
9e5f5f139abda7b2f5f3b984ce7edc4c781a5d52 | 868 | ex | Elixir | 2021/elixir/apps/day5/lib/part1.ex | ColdOrange/advent_of_code | 18c4054558fdcaf123c8d8057b6de23894488a73 | [
"MIT"
] | null | null | null | 2021/elixir/apps/day5/lib/part1.ex | ColdOrange/advent_of_code | 18c4054558fdcaf123c8d8057b6de23894488a73 | [
"MIT"
] | null | null | null | 2021/elixir/apps/day5/lib/part1.ex | ColdOrange/advent_of_code | 18c4054558fdcaf123c8d8057b6de23894488a73 | [
"MIT"
] | null | null | null | defmodule Day5Part1 do
@moduledoc """
Day5 - Part1
"""
def solve(input) do
input
|> String.split("\n", trim: true)
|> Enum.reduce(%{}, fn line, points -> add_points(points, line) end)
|> Enum.count(fn {_point, count} -> count >= 2 end)
end
defp add_points(points, line) do
line
|> parse_line()
|> points_on_line()
|> Enum.reduce(points, fn point, points ->
Map.update(points, point, 1, fn count -> count + 1 end)
end)
end
defp parse_line(line) do
line
|> String.split(" -> ")
|> Enum.map(fn part ->
part |> String.split(",") |> Enum.map(&String.to_integer/1)
end)
end
defp points_on_line([[x1, y1], [x2, y2]]) when x1 == x2 or y1 == y2 do
for x <- min(x1, x2)..max(x1, x2),
y <- min(y1, y2)..max(y1, y2),
do: {x, y}
end
defp points_on_line(_), do: []
end
| 22.842105 | 72 | 0.5553 |
9e5f6c7c6c1aa9dd7a19cf99b3f2b3420812a3b0 | 1,574 | ex | Elixir | clients/double_click_search/lib/google_api/double_click_search/v2/model/custom_metric.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/double_click_search/lib/google_api/double_click_search/v2/model/custom_metric.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | null | null | null | clients/double_click_search/lib/google_api/double_click_search/v2/model/custom_metric.ex | hauptbenutzer/elixir-google-api | 7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DoubleClickSearch.V2.Model.CustomMetric do
@moduledoc """
A message containing the custome metric.
## Attributes
- name (String.t): Custom metric name. Defaults to: `null`.
- value (float()): Custom metric numeric value. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => any(),
:value => any()
}
field(:name)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickSearch.V2.Model.CustomMetric do
def decode(value, options) do
GoogleApi.DoubleClickSearch.V2.Model.CustomMetric.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickSearch.V2.Model.CustomMetric do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.862745 | 81 | 0.733164 |
9e5fa5044fc018c896a340d3aa4b9f9f67424ad3 | 697 | exs | Elixir | test/message_test.exs | gabrielelana/twitter-kata | eb0406a64b208dd4e078a844e51ab719b0944f42 | [
"MIT"
] | 2 | 2015-07-19T21:33:15.000Z | 2015-07-23T10:26:23.000Z | test/message_test.exs | gabrielelana/twitter-kata | eb0406a64b208dd4e078a844e51ab719b0944f42 | [
"MIT"
] | null | null | null | test/message_test.exs | gabrielelana/twitter-kata | eb0406a64b208dd4e078a844e51ab719b0944f42 | [
"MIT"
] | null | null | null | defmodule Twitter.MessageTest do
use ExUnit.Case
alias Twitter.Message
alias Twitter.Clock
test "format" do
a_time = Clock.now
after_3_minutes = Clock.at(a_time, {:after, 3, :minutes})
message = %Message{at: a_time, from: "Alice", text: "Programming is fun!"}
assert Message.format(message, after_3_minutes) == "Programming is fun! (3 minutes ago)"
end
test "format for the wall" do
a_time = Clock.now
after_3_minutes = Clock.at(a_time, {:after, 3, :minutes})
message = %Message{at: a_time, from: "Alice", text: "Programming is fun!"}
assert Message.format_for_wall(message, after_3_minutes) == "Alice - Programming is fun! (3 minutes ago)"
end
end
| 34.85 | 109 | 0.69297 |
9e5faa58eb7b58177e7c259e8c0d1177d1fd145b | 1,079 | ex | Elixir | lib/rocketpay/user.ex | leandroslc/rocketpay | 92e807998ca48e842a30f10be681481271905691 | [
"MIT"
] | null | null | null | lib/rocketpay/user.ex | leandroslc/rocketpay | 92e807998ca48e842a30f10be681481271905691 | [
"MIT"
] | null | null | null | lib/rocketpay/user.ex | leandroslc/rocketpay | 92e807998ca48e842a30f10be681481271905691 | [
"MIT"
] | null | null | null | defmodule Rocketpay.User do
use Ecto.Schema
import Ecto.Changeset
alias Ecto.Changeset
alias Rocketpay.Account
@primary_key {:id, :binary_id, autogenerate: true}
@required_params [:name, :age, :email, :password, :nickname]
schema "users" do
field :name, :string
field :age, :integer
field :email, :string
field :password, :string, virtual: true
field :password_hash, :string
field :nickname, :string
has_one :account, Account
timestamps()
end
def changeset(params) do
%__MODULE__{}
|> cast(params, @required_params)
|> validate_required(@required_params)
|> validate_length(:password, min: 6)
|> validate_number(:age, greater_than_or_equal_to: 18)
|> validate_format(:email, ~r/@/)
|> unique_constraint([:email])
|> unique_constraint([:nickname])
|> put_password_hash()
end
defp put_password_hash(%Changeset{valid?: true, changes: %{password: password}} = changeset) do
change(changeset, Bcrypt.add_hash(password))
end
defp put_password_hash(changeset), do: changeset
end
| 25.093023 | 97 | 0.690454 |
9e5fba7caa092a619d2561aed7155eeeda62d03b | 422 | ex | Elixir | lib/philopets/sellers/seller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | lib/philopets/sellers/seller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | lib/philopets/sellers/seller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | defmodule Philopets.Sellers.Seller do
use Philopets.Schema
import Ecto.Changeset
schema "sellers" do
field :name, :string
field :display_pic, :id
timestamps()
end
@required_fields ~w[name]a
@optional_fields ~w[display_pic]a
@doc false
def changeset(profile, attrs) do
profile
|> cast(attrs, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
end
| 19.181818 | 56 | 0.706161 |
9e5fd89d2b00be476bc54b55dacdbdd502e9c9eb | 1,395 | exs | Elixir | mix.exs | acutario/ravenx_email | ea4f079e5345b54129de7003af396be0f4ce3886 | [
"MIT"
] | null | null | null | mix.exs | acutario/ravenx_email | ea4f079e5345b54129de7003af396be0f4ce3886 | [
"MIT"
] | null | null | null | mix.exs | acutario/ravenx_email | ea4f079e5345b54129de7003af396be0f4ce3886 | [
"MIT"
] | 1 | 2021-03-07T04:41:23.000Z | 2021-03-07T04:41:23.000Z | defmodule RavenxEmail.MixProject do
use Mix.Project
def project do
[
app: :ravenx_email,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ravenx, "~> 1.1 or ~> 2.0"},
{:bamboo, "~> 0.8 or ~> 1.0"},
{:bamboo_smtp, "~> 1.4 or ~> 1.5"},
{:ex_doc, ">= 0.0.0", only: :dev},
{:mock, "~> 0.3.0", only: :test},
{:credo, "~> 0.8", only: [:dev, :test], runtime: false}
]
end
defp docs do
[
main: "readme",
source_url: "https://github.com/acutario/ravenx_email",
extras: ["README.md"]
]
end
defp description do
"""
Ravenx strategy to send notifications using e-mail.
"""
end
defp package do
# These are the default files included in the package
[
name: :ravenx_email,
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Óscar de Arriba"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/acutario/ravenx_email"}
]
end
end
| 22.5 | 70 | 0.546953 |
9e5fdcd1aabba94cff36d38c2d9129e387652c7b | 87 | ex | Elixir | tests/fixtures/elixir.ex | mangs/leasot | e29c4ef7349a310da19c4f51a50f9044985ac49e | [
"MIT"
] | null | null | null | tests/fixtures/elixir.ex | mangs/leasot | e29c4ef7349a310da19c4f51a50f9044985ac49e | [
"MIT"
] | null | null | null | tests/fixtures/elixir.ex | mangs/leasot | e29c4ef7349a310da19c4f51a50f9044985ac49e | [
"MIT"
] | null | null | null | defmodule Leasot do
# TODO: add "!"
def hello do
IO.inspect("world")
end
end
| 12.428571 | 23 | 0.62069 |
9e5fe57fb121824853f45e450c5bda4fb0360b8e | 469 | exs | Elixir | config/test.exs | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | config/test.exs | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | config/test.exs | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :taskasync, TaskasyncWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :taskasync, Taskasync.Repo,
username: "postgres",
password: "postgres",
database: "taskasync_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.684211 | 56 | 0.73774 |
9e5ffb4e6e2b00cabf3e45d4a4cf72dd9185e00a | 3,082 | ex | Elixir | lib/vutuv_web/controllers/user_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 309 | 2016-05-03T17:16:23.000Z | 2022-03-01T09:30:22.000Z | lib/vutuv_web/controllers/user_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 662 | 2016-04-27T07:45:18.000Z | 2022-01-05T07:29:19.000Z | lib/vutuv_web/controllers/user_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 40 | 2016-04-27T07:46:22.000Z | 2021-12-31T05:54:34.000Z | defmodule VutuvWeb.UserController do
use VutuvWeb, :controller
import VutuvWeb.Authorize
alias Phauxth.Log
alias Vutuv.{UserProfiles, UserProfiles.User, Devices, Publications}
alias VutuvWeb.EmailAddressController
@dialyzer {:nowarn_function, new: 3}
def action(conn, _), do: auth_action_slug(conn, __MODULE__, [:index, :new, :create, :show])
def index(conn, params, _current_user) do
page = UserProfiles.paginate_users(params)
render(conn, "index.html", users: page.entries, page: page)
end
def new(conn, _, %User{} = user) do
redirect(conn, to: Routes.user_path(conn, :show, user))
end
def new(conn, _, _current_user) do
changeset = UserProfiles.change_user(%User{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"user" => user_params}, _current_user) do
user_params =
conn |> get_req_header("accept-language") |> add_accept_language_to_params(user_params)
case UserProfiles.create_user(user_params) do
{:ok, user} ->
Log.info(%Log{user: user.id, message: "user created"})
EmailAddressController.verify_email(conn, user_params, "confirm your account", true)
{:error, %Ecto.Changeset{} = changeset} ->
if Devices.duplicate_email_error?(changeset) do
EmailAddressController.verify_email(conn, user_params, "confirm your account", false)
else
render(conn, "new.html", changeset: changeset)
end
end
end
def show(conn, %{"slug" => slug}, %{slug: slug} = current_user) do
user = UserProfiles.get_user_overview(current_user)
posts = Publications.list_posts(current_user)
render(conn, "show.html", user: user, posts: posts)
end
def show(conn, %{"slug" => slug}, current_user) do
user = %{"slug" => slug} |> UserProfiles.get_user!() |> UserProfiles.get_user_overview()
posts = Publications.list_posts(user, current_user)
render(conn, "show.html", user: user, posts: posts)
end
def edit(conn, _, user) do
changeset = UserProfiles.change_user(user)
render(conn, "edit.html", user: user, changeset: changeset)
end
def update(conn, %{"user" => user_params}, user) do
case UserProfiles.update_user(user, user_params) do
{:ok, user} ->
conn
|> put_flash(:info, gettext("User updated successfully."))
|> redirect(to: Routes.user_path(conn, :show, user))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", user: user, changeset: changeset)
end
end
def delete(conn, _, user) do
{:ok, _user} = UserProfiles.delete_user(user)
conn
|> delete_session(:phauxth_session_id)
|> put_flash(:info, gettext("User deleted successfully."))
|> redirect(to: Routes.session_path(conn, :new))
end
defp add_accept_language_to_params(accept_language, %{"user" => _} = user_params) do
al = if accept_language == [], do: "", else: hd(accept_language)
put_in(user_params, ["user", "accept_language"], al)
end
defp add_accept_language_to_params(_, user_params), do: user_params
end
| 33.868132 | 95 | 0.678456 |
9e60019c949ae5f97e9702fec6e08eef78cb90fa | 826 | exs | Elixir | test/ex_aws/cognito_idp_test.exs | Theuns-Botha/ex_aws_cognito | 5f640e53f183e68238dbab78d567238ebd211e9c | [
"MIT"
] | 1 | 2018-08-27T20:55:49.000Z | 2018-08-27T20:55:49.000Z | test/ex_aws/cognito_idp_test.exs | Theuns-Botha/ex_aws_cognito | 5f640e53f183e68238dbab78d567238ebd211e9c | [
"MIT"
] | 2 | 2018-03-17T14:43:18.000Z | 2019-08-04T17:40:20.000Z | test/ex_aws/cognito_idp_test.exs | Theuns-Botha/ex_aws_cognito | 5f640e53f183e68238dbab78d567238ebd211e9c | [
"MIT"
] | 5 | 2018-09-28T12:47:01.000Z | 2020-04-07T14:45:45.000Z | defmodule ExAws.CognitoIdpTest do
use ExUnit.Case, async: true
alias ExAws.CognitoIdp
test "admin_create_user" do
opts = [
desired_delivery_mediums: ["foo", "bar"],
force_alias_creation: true,
message_action: "email",
user_attributes: [%{name: "name", value: "value"}]
]
operation = CognitoIdp.admin_create_user("pool", "name", opts)
assert {"x-amz-target", "AWSCognitoIdentityProviderService.AdminCreateUser"} in operation.headers
assert operation.data == %{
"DesiredDeliveryMediums" => ["foo", "bar"],
"ForceAliasCreation" => true,
"MessageAction" => "email",
"UserAttributes" => [%{"Name" => "name", "Value" => "value"}],
"UserPoolId" => "pool",
"Username" => "name"
}
end
end
| 29.5 | 101 | 0.588378 |
9e60061a91e1c65147bc2b818e3446c9a74b828e | 1,158 | ex | Elixir | clients/workflow_executions/lib/google_api/workflow_executions/v1beta/connection.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/workflow_executions/lib/google_api/workflow_executions/v1beta/connection.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/workflow_executions/lib/google_api/workflow_executions/v1beta/connection.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.WorkflowExecutions.V1beta.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.WorkflowExecutions.V1beta.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform"
],
otp_app: :google_api_workflow_executions,
base_url: "https://workflowexecutions.googleapis.com/"
end
| 35.090909 | 74 | 0.749568 |
9e600bbc2556cd32c2cff6dddadbe15524ed8908 | 443 | exs | Elixir | test/readme_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | 7 | 2018-01-30T14:09:13.000Z | 2021-01-22T22:38:32.000Z | test/readme_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | null | null | null | test/readme_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | 1 | 2018-01-30T14:09:17.000Z | 2018-01-30T14:09:17.000Z | defmodule ReadmeTest do
use ExUnit.Case, async: true
test "README deps example references current version" do
expected = SpaceEx.Mixfile.project()[:version]
actual = get_readme_version()
assert expected == actual
end
def get_readme_version() do
readme = File.read!("README.md")
case Regex.run(~r/^\s+{:space_ex, "~> ([0-9\.]+)"}\s*$/m, readme) do
[_, version] -> version
nil -> nil
end
end
end
| 23.315789 | 72 | 0.636569 |
9e600d9261f8535bec4e1baa9ecf65ac50c8c159 | 10,102 | ex | Elixir | lib/honeydew/sources/ecto_source.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 717 | 2015-06-15T19:30:54.000Z | 2022-03-22T06:10:09.000Z | lib/honeydew/sources/ecto_source.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 106 | 2015-06-25T05:38:05.000Z | 2021-12-08T23:17:19.000Z | lib/honeydew/sources/ecto_source.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 60 | 2015-06-07T00:48:37.000Z | 2022-03-06T08:20:23.000Z | #
# The goal of this module is to lamprey a queue onto an existing ecto schema with as few requirements and as little
# disruption as possible. It adds two fields to the schema, a "lock" field and a "private" field.
#
# The lock field is an integer overloaded with three jobs:
# 1. Acts as a lock, to ensure that only one worker is processing the job at a time, no matter how many nodes are running
# copies of the queue. It expires after a configurable period of time (the queue process or entire node crashed).
# 2. Indicates the status of the job, it can be either:
# - "ready", between zero and SQL.ready()
# - "delayed", between SQL.ready() and the beginning of the stale window
# - "in progress", from now until now + stale_timeout
# - "stale", within a year ago ago from now
# - "abandoned", -1
# - "finished", nil
# 3. Indicate the order in which jobs should be processed.
#
# unix epoch zero
# |<-------- ~ 24+ yrs ------->|<----- ~ 18 yrs ---->|<--- 5 yrs -->|<------- stale_timeout ------->|
# |<---------- ready ----------|<------ delayed -----| | |
# | | |<--- stale ---|<-------- in progress ---------|
# time -------0----------------------------|------------------------------------|-------------------------------|---->
# ^ ^ ^ ^
# abandoned(-1) SQL.ready() now reserve()
# now - far_in_the_past() now + stale_timeout
#
# The private field is a simple binary field that contains an erlang term, it's used for data that needs to be
# persisted between job attempts, specificaly, it's the "failure_private" contents of the job.
#
#
# As the main objective is to minimize disruption, I wanted the default values for the additional fields to be set
# statically in the migration, rather than possibly interfering with the user's schema validations on save etc...
# The only runtime configuration the user should set is the `stale_timeout`, which should be the maximum expected
# time that a job will take until it should be retried.
#
#
# This module is tested and dialyzed via the included test project in examples/ecto_poll_queue
#
if Code.ensure_loaded?(Ecto) do
defmodule Honeydew.EctoSource do
@moduledoc false
require Logger
alias Honeydew.Job
alias Honeydew.PollQueue
alias Honeydew.PollQueue.State, as: PollQueueState
alias Honeydew.EctoSource.State
alias Honeydew.Queue.State, as: QueueState
@behaviour PollQueue
@reset_stale_interval 5 * 60 * 1_000 # five minutes in msecs
@abandoned -1
def abandoned, do: @abandoned
@impl true
def init(queue, args) do
schema = Keyword.fetch!(args, :schema)
repo = Keyword.fetch!(args, :repo)
sql = Keyword.fetch!(args, :sql)
stale_timeout = args[:stale_timeout] * 1_000
reset_stale_interval = @reset_stale_interval # held in state so tests can change it
table = sql.table_name(schema)
key_fields = schema.__schema__(:primary_key)
task_fn =
schema.__info__(:functions)
|> Enum.member?({:honeydew_task, 2})
|> if do
&schema.honeydew_task/2
else
fn(id, _queue) -> {:run, [id]} end
end
run_if = args[:run_if]
reset_stale(reset_stale_interval)
{:ok, %State{schema: schema,
repo: repo,
sql: sql,
table: table,
key_fields: key_fields,
lock_field: field_name(queue, :lock),
private_field: field_name(queue, :private),
task_fn: task_fn,
queue: queue,
stale_timeout: stale_timeout,
reset_stale_interval: reset_stale_interval,
run_if: run_if}}
end
# lock a row for processing
@impl true
def reserve(%State{queue: queue, schema: schema, repo: repo, sql: sql, private_field: private_field, task_fn: task_fn} = state) do
try do
state
|> sql.reserve
|> repo.query([])
rescue e in DBConnection.ConnectionError ->
{:error, e}
end
|> case do
{:ok, %{num_rows: 1, rows: [[dumped_private | dumped_key_values]]}} ->
# convert key and private_field from db representation to schema's type
private = load_field(schema, repo, private_field, dumped_private)
loaded_keys = load_keys(state, dumped_key_values)
# if there's only one primary key, just pass the value, otherwise pass the list of compound key values
job =
loaded_keys
|> case do
[{_single_key, value}] -> value
other -> other
end
|> task_fn.(queue)
|> Job.new(queue)
|> struct(failure_private: private)
{{:value, {loaded_keys, job}}, state}
{:ok, %{num_rows: 0}} ->
{:empty, state}
{:error, error} ->
Logger.warn("[Honeydew] Ecto queue '#{inspect queue}' couldn't poll for jobs because #{inspect error}")
{:empty, state}
end
end
@impl true
# acked without completing, either moved or abandoned
def ack(%Job{private: key_fields, completed_at: nil}, state) do
finalize(key_fields, @abandoned, nil, state)
end
@impl true
def ack(%Job{private: key_fields}, state) do
finalize(key_fields, nil, nil, state)
end
@impl true
def nack(%Job{private: key_values, failure_private: private, delay_secs: delay_secs}, %State{sql: sql,
repo: repo,
schema: schema,
private_field: private_field} = state) do
dumped_keys = dump_keys(state, key_values)
{:ok, private} = dump_field(schema, repo, private_field, private)
{:ok, %{num_rows: 1}} =
state
|> sql.delay_ready
|> repo.query([delay_secs, private | dumped_keys])
state
end
@impl true
# handles the case where Honeydew.cancel/2 is called with just a single primary key id
def cancel(%Job{private: id}, state) when not is_list(id), do: cancel(%Job{private: [id: id]}, state)
# handles the case where Honeydew.cancel/2 is called with a compound key
def cancel(%Job{private: key_values}, %State{repo: repo, sql: sql} = state) do
dumped_keys = dump_keys(state, key_values)
state
|> sql.cancel
|> repo.query(dumped_keys)
|> case do
{:ok, %{num_rows: 1}} ->
{:ok, state}
{:ok, %{num_rows: 0}} ->
{{:error, :not_found}, state}
end
end
@impl true
def status(%State{repo: repo, sql: sql} = state) do
{:ok, %{num_rows: 1, columns: columns, rows: [values]}} =
state
|> sql.status
|> repo.query([])
columns
|> Enum.map(&String.to_atom/1)
|> Enum.zip(values)
|> Enum.into(%{})
end
@impl true
def filter(%State{repo: repo, sql: sql, queue: queue} = state, filter) do
{:ok, %{rows: rows}} =
state
|> sql.filter(filter)
|> repo.query([])
Enum.map(rows, fn key_field_values ->
%Job{queue: queue,
private: load_keys(state, key_field_values)}
end)
end
@impl true
def handle_info(:__reset_stale__, %QueueState{private: %PollQueueState{source: {__MODULE__, %State{sql: sql,
repo: repo,
reset_stale_interval: reset_stale_interval} = state}}} = queue_state) do
{:ok, _} =
state
|> sql.reset_stale
|> repo.query([])
reset_stale(reset_stale_interval)
{:noreply, queue_state}
end
def handle_info(msg, queue_state) do
Logger.warn("[Honeydew] Queue #{inspect(self())} received unexpected message #{inspect(msg)}")
{:noreply, queue_state}
end
def field_name(queue, name) do
:"honeydew_#{Honeydew.table_name(queue)}_#{name}"
end
defp reset_stale(reset_stale_interval) do
{:ok, _} = :timer.send_after(reset_stale_interval, :__reset_stale__)
end
defp finalize(key_fields, lock, private, state) do
import Ecto.Query
from(s in state.schema,
where: ^key_fields,
update: [set: ^[{state.lock_field, lock}, {state.private_field, private}]])
|> state.repo.update_all([]) # avoids touching auto-generated fields
state
end
defp dump_field(schema, repo, field, value) do
type = schema.__schema__(:type, field)
Ecto.Type.adapter_dump(repo.__adapter__(), type, value)
end
defp load_field(schema, repo, field, dumped_value) do
%^schema{^field => value} = repo.load(schema, %{field => dumped_value})
value
end
defp dump_keys(%State{repo: repo, schema: schema}, key_values) do
Enum.map(key_values, fn {key_field, dumped_value} ->
{:ok, value} = dump_field(schema, repo, key_field, dumped_value)
value
end)
end
defp load_keys(%State{repo: repo, schema: schema, key_fields: key_fields}, key_values) do
key_fields
|> Enum.zip(key_values)
|> Enum.into(%{})
|> Enum.map(fn {key_field, dumped_value} ->
key_value = load_field(schema, repo, key_field, dumped_value)
{key_field, key_value}
end)
end
end
end
| 36.734545 | 175 | 0.551871 |
9e601bedd49d997e8272edbc1139c17aa5072f14 | 2,382 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
@moduledoc """
The result of updating a conditional format rule.
## Attributes
* `newIndex` (*type:* `integer()`, *default:* `nil`) - The index of the new rule.
* `newRule` (*type:* `GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t`, *default:* `nil`) - The new rule that replaced the old rule (if replacing), or the rule that was moved (if moved)
* `oldIndex` (*type:* `integer()`, *default:* `nil`) - The old index of the rule. Not set if a rule was replaced (because it is the same as new_index).
* `oldRule` (*type:* `GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t`, *default:* `nil`) - The old (deleted) rule. Not set if a rule was moved (because it is the same as new_rule).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:newIndex => integer() | nil,
:newRule => GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t() | nil,
:oldIndex => integer() | nil,
:oldRule => GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t() | nil
}
field(:newIndex)
field(:newRule, as: GoogleApi.Sheets.V4.Model.ConditionalFormatRule)
field(:oldIndex)
field(:oldRule, as: GoogleApi.Sheets.V4.Model.ConditionalFormatRule)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.535714 | 191 | 0.725861 |
9e604b6ebc895d9d6b1fd01de8d9bcaa939f0ee7 | 322 | exs | Elixir | test/stix/bundle_test.exs | johnwunder/elixir-stix2 | 2b427694d45206acd556bf464de9fadd56095ba0 | [
"MIT"
] | null | null | null | test/stix/bundle_test.exs | johnwunder/elixir-stix2 | 2b427694d45206acd556bf464de9fadd56095ba0 | [
"MIT"
] | null | null | null | test/stix/bundle_test.exs | johnwunder/elixir-stix2 | 2b427694d45206acd556bf464de9fadd56095ba0 | [
"MIT"
] | null | null | null | defmodule StixBundleTest do
use ExUnit.Case
test "bundle" do
result = Stix.bundle([Stix.object("indicator")])
assert Map.keys(result) == [:id, :objects, :type]
end
test "bundle object" do
result = Stix.bundle(Stix.object("indicator"))
assert Map.keys(result) == [:id, :objects, :type]
end
end
| 21.466667 | 53 | 0.65528 |
9e60577f4e3c74b92fb35a0d1ae0bd2cb7e55913 | 443 | exs | Elixir | hgw/config/dogma.exs | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | 1 | 2019-12-10T22:25:31.000Z | 2019-12-10T22:25:31.000Z | hgw/config/dogma.exs | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | 3 | 2020-10-25T04:40:05.000Z | 2020-10-25T04:48:10.000Z | hgw/config/dogma.exs | techgaun/dumpster | c2a5394afe759fb99041aea677e9b0bc4bf91aec | [
"Unlicense"
] | null | null | null | # dogma config to override some settings
use Mix.Config
alias Dogma.Rule
config :dogma,
rule_set: Dogma.RuleSet.All,
exclude: [
~r(\Apriv/|\Atest/),
],
override: [
%Rule.ModuleDoc{enabled: false},
%Rule.LineLength{enabled: false},
%Rule.ComparisonToBoolean{enabled: false},
%Rule.FunctionArity{enabled: false},
%Rule.InterpolationOnlyString{enabled: false},
%Rule.InfixOperatorPadding{enabled: false}
]
| 23.315789 | 50 | 0.697517 |
9e6058cd47e2d261dd21cc032a6023ccbd03db5c | 2,747 | ex | Elixir | lib/bakeit.ex | efine/bakeit_ex | 3bec62b98e1117bad439ba74cf77cc19571b1092 | [
"BSD-3-Clause"
] | null | null | null | lib/bakeit.ex | efine/bakeit_ex | 3bec62b98e1117bad439ba74cf77cc19571b1092 | [
"BSD-3-Clause"
] | null | null | null | lib/bakeit.ex | efine/bakeit_ex | 3bec62b98e1117bad439ba74cf77cc19571b1092 | [
"BSD-3-Clause"
] | null | null | null | defmodule Bakeit do
@pastery_url "https://www.pastery.net/api/paste/"
def upload(input, opts) do
api_key = Bakeit.INI.read[:api_key]
opts[:debug] && (
IO.write "Opts: #{inspect opts}\n"
IO.write "#{api_key}\n"
)
HTTPoison.start
body = get_body(input)
headers = %{"User-Agent" => "Mozilla/5.0 (Elixir) bakeit library",
"Content-Type" => "application/octet-stream"}
post_opts = [
follow_redirect: true,
params: make_qps(api_key, opts),
ssl: []
]
IO.write "Uploading to Pastery...\n"
{:ok, resp} = HTTPoison.post(@pastery_url, body, headers, post_opts)
opts[:debug] && IO.write("Resp: #{inspect resp}\n")
{:ok, resp_body} = parse_upload_rsp(resp)
{:ok, paste_resp} = Poison.decode(resp_body)
paste_url = paste_resp["url"]
IO.write "Paste URL: #{paste_url}\n"
:ok = maybe_launch_webbrowser(paste_url, opts)
end
defp get_body(""), do: IO.getn("", 1024 * 1024)
defp get_body(fname), do: {:file, fname}
defp maybe_launch_webbrowser(url, cfg) do
(cfg[:open_browser] && launch_webbrowser(url); :ok)
end
defp launch_webbrowser(<<url :: binary>>) do
url |> String.to_char_list |> launch_webbrowser
end
defp launch_webbrowser(url) do
case :webbrowser.open(url) do
:ok ->
true;
{:error, {:not_found, msg}} ->
IO.write "#{msg}\n"
false
end
end
defp title_or_filename(cfg, files) do
case cfg[:title] do
"" ->
get_filename(files)
title ->
title
end
end
defp get_filename([fname|_]), do: Path.basename(fname)
defp get_filename([]), do: []
defp make_qps(api_key, opts) do
[api_key: api_key] ++
opt_qp(:title, opts, &str_nonempty?/1) ++
opt_qp(:language, opts, &str_nonempty?/1) ++
opt_qp(:duration, opts, &non_zero?/1) ++
opt_qp(:max_views, opts, &non_zero?/1)
end
defp opt_qp(key, opts, pred) do
val = opts[key]
pred.(val) && [{key, val}] || []
end
defp str_nonempty?(s), do: s != ""
defp non_zero?(i), do: i != 0
defp parse_upload_rsp(resp) do
case resp.status_code do
n when n in 300..399 ->
msg = "Unexpected redirect: #{n} #"
{:error, msg}
413 ->
{:error, "The chosen file was rejected by the server " <>
"because it was too large, please try a smaller " <>
"file."}
422 ->
{:error, "422"}
n when n in 400..499 ->
{:error, "There was a problem with the request: #{n}"}
n when n >= 500 ->
msg = "There was a server error #{n}, please try again later."
{:error, msg}
_ ->
{:ok, resp.body}
end
end
end
| 26.161905 | 71 | 0.569712 |
9e605dd3d9c0e662fc9480a806f0717562de1334 | 21,756 | exs | Elixir | test/websockex/frame_test.exs | Ma233/gunpowder | c1de28940c42aee6af6fb8aed53afe30bbb2eda9 | [
"MIT"
] | null | null | null | test/websockex/frame_test.exs | Ma233/gunpowder | c1de28940c42aee6af6fb8aed53afe30bbb2eda9 | [
"MIT"
] | 1 | 2021-12-24T06:23:32.000Z | 2021-12-24T06:23:32.000Z | test/websockex/frame_test.exs | Ma233/gunpowder | c1de28940c42aee6af6fb8aed53afe30bbb2eda9 | [
"MIT"
] | 1 | 2021-12-24T06:03:18.000Z | 2021-12-24T06:03:18.000Z | defmodule WebSockex.FrameTest do
use ExUnit.Case, async: true
# Frame: (val::bitsize)
# << fin::1, 0::3, opcode::4, 0::1, payload_len::7 >>
# << 1::1, 0::3, [8,9,10]::4, 0::1, payload_len::7 >>
# << 1::1, 0::3, [8,9,10]::4, 0::1, payload_len::7 >>
# << fin::1, 0::3, [0,1,2]::4, 0::1, payload_len::7 >>
# << fun::1, 0::3, [0,1,2]::4, 0::1, 126::7, payload_len::16 >>
# << fin::1, 0::3, [0,1,2]::4, 0::1, 127::7, payload_len::64 >>
# << fin::1, 0::3, opcode::4, 1::1, payload_len::(7-71), masking_key::32 >>
@close_frame <<1::1, 0::3, 8::4, 0::1, 0::7>>
@ping_frame <<1::1, 0::3, 9::4, 0::1, 0::7>>
@pong_frame <<1::1, 0::3, 10::4, 0::1, 0::7>>
@close_frame_with_payload <<1::1, 0::3, 8::4, 0::1, 7::7, 1000::16, "Hello">>
@ping_frame_with_payload <<1::1, 0::3, 9::4, 0::1, 5::7, "Hello">>
@pong_frame_with_payload <<1::1, 0::3, 10::4, 0::1, 5::7, "Hello">>
@binary :erlang.term_to_binary(:hello)
alias WebSockex.{Frame}
def unmask(key, payload, acc \\ <<>>)
def unmask(_, <<>>, acc), do: acc
for x <- 1..3 do
def unmask(<<key::8*unquote(x), _::binary>>, <<payload::8*unquote(x)>>, acc) do
part = Bitwise.bxor(payload, key)
<<acc::binary, part::8*unquote(x)>>
end
end
def unmask(<<key::8*4>>, <<payload::8*4, rest::binary>>, acc) do
part = Bitwise.bxor(payload, key)
unmask(<<key::8*4>>, rest, <<acc::binary, part::8*4>>)
end
@large_binary <<0::300*8, "Hello">>
describe "parse_frame" do
test "returns incomplete when the frame is less than 16 bits" do
<<part::10, _::bits>> = @ping_frame
assert Frame.parse_frame(<<part>>) == :incomplete
end
test "handles incomplete frames with complete headers" do
frame = <<1::1, 0::3, 1::4, 0::1, 5::7, "Hello"::utf8>>
<<part::bits-size(20), rest::bits>> = frame
assert Frame.parse_frame(part) == :incomplete
assert Frame.parse_frame(<<part::bits, rest::bits>>) == {:ok, {:text, "Hello"}, <<>>}
end
test "handles incomplete continuation large frames" do
len = 0x5555
frame = <<1::1, 0::3, 0::4, 0::1, 126::7, len::16, 0::500*8, "Hello">>
assert Frame.parse_frame(frame) == :incomplete
end
test "handles incomplete continuation very large frame" do
len = 0x5FFFF
frame = <<1::1, 0::3, 0::4, 0::1, 127::7, len::64, 0::1000*8, "Hello">>
assert Frame.parse_frame(frame) == :incomplete
end
test "handles incomplete text large frames" do
len = 0x5555
frame = <<1::1, 0::3, 1::4, 0::1, 126::7, len::16, 0::500*8, "Hello">>
assert Frame.parse_frame(frame) == :incomplete
end
test "handles incomplete text very large frame" do
len = 0x5FFFF
frame = <<1::1, 0::3, 1::4, 0::1, 127::7, len::64, 0::1000*8, "Hello">>
assert Frame.parse_frame(frame) == :incomplete
end
test "returns overflow buffer" do
<<first::bits-size(16), overflow::bits-size(14), rest::bitstring>> = <<@ping_frame, @ping_frame_with_payload>>
payload = <<first::bits, overflow::bits>>
assert Frame.parse_frame(payload) == {:ok, :ping, overflow}
assert Frame.parse_frame(<<overflow::bits, rest::bits>>) == {:ok, {:ping, "Hello"}, <<>>}
end
test "parses a close frame" do
assert Frame.parse_frame(@close_frame) == {:ok, :close, <<>>}
end
test "parses a ping frame" do
assert Frame.parse_frame(@ping_frame) == {:ok, :ping, <<>>}
end
test "parses a pong frame" do
assert Frame.parse_frame(@pong_frame) == {:ok, :pong, <<>>}
end
test "parses a close frame with a payload" do
assert Frame.parse_frame(@close_frame_with_payload) == {:ok, {:close, 1000, "Hello"}, <<>>}
end
test "parses a ping frame with a payload" do
assert Frame.parse_frame(@ping_frame_with_payload) == {:ok, {:ping, "Hello"}, <<>>}
end
test "parses a pong frame with a payload" do
assert Frame.parse_frame(@pong_frame_with_payload) == {:ok, {:pong, "Hello"}, <<>>}
end
test "parses a text frame" do
frame = <<1::1, 0::3, 1::4, 0::1, 5::7, "Hello"::utf8>>
assert Frame.parse_frame(frame) == {:ok, {:text, "Hello"}, <<>>}
end
test "parses a large text frame" do
string = <<0::5000*8, "Hello">>
len = byte_size(string)
frame = <<1::1, 0::3, 1::4, 0::1, 126::7, len::16, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:text, string}, <<>>}
end
test "parses a very large text frame" do
string = <<0::80_000*8, "Hello">>
len = byte_size(string)
frame = <<1::1, 0::3, 1::4, 0::1, 127::7, len::64, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:text, string}, <<>>}
end
test "parses a binary frame" do
len = byte_size(@binary)
frame = <<1::1, 0::3, 2::4, 0::1, len::7, @binary::bytes>>
assert Frame.parse_frame(frame) == {:ok, {:binary, @binary}, <<>>}
end
test "parses a large binary frame" do
binary = <<0::5000*8, @binary::binary>>
len = byte_size(binary)
frame = <<1::1, 0::3, 2::4, 0::1, 126::7, len::16, binary::binary>>
assert Frame.parse_frame(frame) == {:ok, {:binary, binary}, <<>>}
end
test "parses a very large binary frame" do
binary = <<0::80_000*8, @binary::binary>>
len = byte_size(binary)
frame = <<1::1, 0::3, 2::4, 0::1, 127::7, len::64, binary::binary>>
assert Frame.parse_frame(frame) == {:ok, {:binary, binary}, <<>>}
end
test "parses a text fragment frame" do
frame = <<0::1, 0::3, 1::4, 0::1, 5::7, "Hello"::utf8>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :text, "Hello"}, <<>>}
end
test "parses a large text fragment frame" do
string = <<0::5000*8, "Hello">>
len = byte_size(string)
frame = <<0::1, 0::3, 1::4, 0::1, 126::7, len::16, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :text, string}, <<>>}
end
test "parses a very large text fragment frame" do
string = <<0::80_000*8, "Hello">>
len = byte_size(string)
frame = <<0::1, 0::3, 1::4, 0::1, 127::7, len::64, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :text, string}, <<>>}
end
test "parses a binary fragment frame" do
len = byte_size(@binary)
frame = <<0::1, 0::3, 2::4, 0::1, len::7, @binary::bytes>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :binary, @binary}, <<>>}
end
test "parses a large binary fragment frame" do
binary = <<0::5000*8, @binary::binary>>
len = byte_size(binary)
frame = <<0::1, 0::3, 2::4, 0::1, 126::7, len::16, binary::binary>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :binary, binary}, <<>>}
end
test "parses a very large binary fragment frame" do
binary = <<0::80_000*8, @binary::binary>>
len = byte_size(binary)
frame = <<0::1, 0::3, 2::4, 0::1, 127::7, len::64, binary::binary>>
assert Frame.parse_frame(frame) == {:ok, {:fragment, :binary, binary}, <<>>}
end
test "parses a continuation frame in a fragmented segment" do
frame = <<0::1, 0::3, 0::4, 0::1, 5::7, "Hello"::utf8>>
assert Frame.parse_frame(frame) == {:ok, {:continuation, "Hello"}, <<>>}
end
test "parses a large continuation frame in a fragmented segment" do
string = <<0::5000*8, "Hello">>
len = byte_size(string)
frame = <<0::1, 0::3, 0::4, 0::1, 126::7, len::16, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:continuation, string}, <<>>}
end
test "parses a very large continuation frame in a fragmented segment" do
string = <<0::80_000*8, "Hello">>
len = byte_size(string)
frame = <<0::1, 0::3, 0::4, 0::1, 127::7, len::64, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:continuation, string}, <<>>}
end
test "parses a finish frame in a fragmented segment" do
frame = <<1::1, 0::3, 0::4, 0::1, 5::7, "Hello"::utf8>>
assert Frame.parse_frame(frame) == {:ok, {:finish, "Hello"}, <<>>}
end
test "parses a large finish frame in a fragmented segment" do
string = <<0::5000*8, "Hello">>
len = byte_size(string)
frame = <<1::1, 0::3, 0::4, 0::1, 126::7, len::16, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:finish, string}, <<>>}
end
test "parses a very large finish frame in a fragmented segment" do
string = <<0::80_000*8, "Hello">>
len = byte_size(string)
frame = <<1::1, 0::3, 0::4, 0::1, 127::7, len::64, string::binary>>
assert Frame.parse_frame(frame) == {:ok, {:finish, string}, <<>>}
end
test "nonfin control frame returns an error" do
frame = <<0::1, 0::3, 9::4, 0::1, 0::7>>
assert Frame.parse_frame(frame) ==
{:error, %WebSockex.FrameError{reason: :nonfin_control_frame, opcode: :ping, buffer: frame}}
end
test "large control frames return an error" do
error = %WebSockex.FrameError{reason: :control_frame_too_large, opcode: :ping}
frame = <<1::1, 0::3, 9::4, 0::1, 126::7>>
assert Frame.parse_frame(frame) == {:error, %{error | buffer: frame}}
frame = <<1::1, 0::3, 9::4, 0::1, 127::7>>
assert Frame.parse_frame(frame) == {:error, %{error | buffer: frame}}
end
test "close frames with data must have atleast 2 bytes of data" do
frame = <<1::1, 0::3, 8::4, 0::1, 1::7, 0::8>>
assert Frame.parse_frame(frame) ==
{:error,
%WebSockex.FrameError{
reason: :close_with_single_byte_payload,
opcode: :close,
buffer: frame
}}
end
test "Close Frames with improper close codes return an error" do
frame = <<1::1, 0::3, 8::4, 0::1, 7::7, 5000::16, "Hello">>
assert Frame.parse_frame(frame) ==
{:error, %WebSockex.FrameError{reason: :invalid_close_code, opcode: :close, buffer: frame}}
end
test "Text Frames check for valid UTF-8" do
frame = <<1::1, 0::3, 1::4, 0::1, 7::7, 0xFFFF::16, "Hello"::utf8>>
assert Frame.parse_frame(frame) ==
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :text, buffer: frame}}
end
test "Close Frames with payloads check for valid UTF-8" do
frame = <<1::1, 0::3, 8::4, 0::1, 9::7, 1000::16, 0xFFFF::16, "Hello"::utf8>>
assert Frame.parse_frame(frame) ==
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :close, buffer: frame}}
end
end
describe "parse_fragment" do
test "Errors with two fragment starts" do
frame0 = {:fragment, :text, "Hello"}
frame1 = {:fragment, :text, "Goodbye"}
assert Frame.parse_fragment(frame0, frame1) ==
{:error,
%WebSockex.FragmentParseError{
reason: :two_start_frames,
fragment: frame0,
continuation: frame1
}}
end
test "Applies continuation to a text fragment" do
frame = <<0xFFFF::16, "Hello"::utf8>>
<<part::binary-size(4), rest::binary>> = frame
assert Frame.parse_fragment({:fragment, :text, part}, {:continuation, rest}) ==
{:ok, {:fragment, :text, frame}}
end
test "Finishes a text fragment" do
frame0 = {:fragment, :text, "Hel"}
frame1 = {:finish, "lo"}
assert Frame.parse_fragment(frame0, frame1) == {:ok, {:text, "Hello"}}
end
test "Errors with invalid utf-8 in a text fragment" do
frame = <<0xFFFF::16, "Hello"::utf8>>
<<part::binary-size(4), rest::binary>> = frame
assert Frame.parse_fragment({:fragment, :text, part}, {:finish, rest}) ==
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :text, buffer: frame}}
end
test "Applies a continuation to a binary fragment" do
<<part::binary-size(3), rest::binary>> = @binary
assert Frame.parse_fragment({:fragment, :binary, part}, {:continuation, rest}) ==
{:ok, {:fragment, :binary, @binary}}
end
test "Finishes a binary fragment" do
<<part::binary-size(3), rest::binary>> = @binary
assert Frame.parse_fragment({:fragment, :binary, part}, {:finish, rest}) == {:ok, {:binary, @binary}}
end
end
describe "encode_frame" do
test "encodes a ping frame" do
assert {:ok, <<1::1, 0::3, 9::4, 1::1, 0::7, _::32>>} = Frame.encode_frame(:ping)
end
test "encodes a ping frame with a payload" do
payload = "A longer but different string."
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 9::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary-size(len)>>} =
Frame.encode_frame({:ping, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a pong frame" do
assert {:ok, <<1::1, 0::3, 10::4, 1::1, 0::7, _::32>>} = Frame.encode_frame(:pong)
end
test "encodes a pong frame with a payload" do
payload = "No"
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 10::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary-size(len)>>} =
Frame.encode_frame({:pong, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a close frame" do
assert {:ok, <<1::1, 0::3, 8::4, 1::1, 0::7, _::32>>} = Frame.encode_frame(:close)
end
test "encodes a close frame with a payload" do
payload = "Hello"
len = byte_size(<<1000::16, payload::binary>>)
assert {:ok, <<1::1, 0::3, 8::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary-size(len)>>} =
Frame.encode_frame({:close, 1000, payload})
assert unmask(mask, masked_payload) == <<1000::16, payload::binary>>
end
test "returns an error with large ping frame" do
assert Frame.encode_frame({:ping, @large_binary}) ==
{:error,
%WebSockex.FrameEncodeError{
reason: :control_frame_too_large,
frame_type: :ping,
frame_payload: @large_binary
}}
end
test "returns an error with large pong frame" do
assert Frame.encode_frame({:pong, @large_binary}) ==
{:error,
%WebSockex.FrameEncodeError{
reason: :control_frame_too_large,
frame_type: :pong,
frame_payload: @large_binary
}}
end
test "returns an error with large close frame" do
assert Frame.encode_frame({:close, 1000, @large_binary}) ==
{:error,
%WebSockex.FrameEncodeError{
reason: :control_frame_too_large,
frame_type: :close,
frame_payload: @large_binary,
close_code: 1000
}}
end
test "returns an error with close code out of range" do
assert Frame.encode_frame({:close, 5838, "Hello"}) ==
{:error,
%WebSockex.FrameEncodeError{
reason: :close_code_out_of_range,
frame_type: :close,
frame_payload: "Hello",
close_code: 5838
}}
end
test "encodes a text frame" do
payload = "Lemon Pies are Pies."
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 1::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large text frame" do
payload = <<0::300*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 1::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large text frame" do
payload = <<0::0xFFFFF*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 1::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a binary frame" do
payload = @binary
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 2::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large binary frame" do
payload = <<0::300*8, @binary::binary>>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 2::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large binary frame" do
payload = <<0::0xFFFFF*8, @binary::binary>>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 2::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a text fragment frame" do
payload = "Lemon Pies are Pies."
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 1::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large text fragment frame" do
payload = <<0::300*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 1::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large text fragment frame" do
payload = <<0::0xFFFFF*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 1::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :text, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a binary fragment frame" do
payload = @binary
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 2::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large binary fragment frame" do
payload = <<0::300*8, @binary::binary>>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 2::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large binary fragment frame" do
payload = <<0::0xFFFFF*8, @binary::binary>>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 2::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:fragment, :binary, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a continuation frame" do
payload = "Lemon Pies are Pies."
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 0::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:continuation, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large continuation frame" do
payload = <<0::300*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 0::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:continuation, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large continuation frame" do
payload = <<0::0xFFFFF*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<0::1, 0::3, 0::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:continuation, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a finish to a fragmented segment" do
payload = "Lemon Pies are Pies."
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 0::4, 1::1, ^len::7, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:finish, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a large finish to a fragmented segment" do
payload = <<0::300*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 0::4, 1::1, 126::7, ^len::16, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:finish, payload})
assert unmask(mask, masked_payload) == payload
end
test "encodes a very large finish to a fragmented segment" do
payload = <<0::0xFFFFF*8, "Lemon Pies are Pies.">>
len = byte_size(payload)
assert {:ok, <<1::1, 0::3, 0::4, 1::1, 127::7, ^len::64, mask::bytes-size(4), masked_payload::binary>>} =
Frame.encode_frame({:finish, payload})
assert unmask(mask, masked_payload) == payload
end
end
end
| 36.688027 | 116 | 0.56913 |
9e60ad04fdd14b2f8468f70e6aff8674fd0816ba | 1,109 | ex | Elixir | languages/elixir/exercises/concept/community-garden/.meta/example.ex | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | languages/elixir/exercises/concept/community-garden/.meta/example.ex | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | 1,938 | 2019-12-12T08:07:10.000Z | 2021-01-29T12:56:13.000Z | languages/elixir/exercises/concept/community-garden/.meta/example.ex | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | 239 | 2019-12-12T14:09:08.000Z | 2022-03-18T00:04:07.000Z | defmodule Plot do
@enforce_keys [:plot_id, :registered_to]
defstruct [:plot_id, :registered_to]
end
defmodule CommunityGarden do
def start(opts \\ []) do
Agent.start(fn -> %{registry: %{}, next_id: 1} end, opts)
end
def list_registrations(pid) do
Agent.get(pid, fn state ->
Map.values(state.registry)
end)
end
def register(pid, register_to) do
Agent.get_and_update(pid, fn %{registry: registry, next_id: next_id} = state ->
new_plot = %Plot{plot_id: next_id, registered_to: register_to}
updated = Map.put(registry, next_id, new_plot)
{new_plot, %{state | registry: updated, next_id: next_id + 1}}
end)
end
def release(pid, plot_id) do
Agent.update(pid, fn %{registry: registry} = state ->
updated = Map.delete(registry, plot_id)
%{state | registry: updated}
end)
end
def get_registration(pid, plot_id) do
registration =
Agent.get(pid, fn state ->
state.registry[plot_id]
end)
case registration do
nil -> {:not_found, "plot is unregistered"}
_ -> registration
end
end
end
| 25.204545 | 83 | 0.649234 |
9e60f743e51a80489e4b9efdbc9fc6ad9cfec1a6 | 26,598 | ex | Elixir | lib/teiserver/tcp/tcp_server.ex | marseel/teiserver | 7e085ae7853205d217183737d3eb69a4941bbe7e | [
"MIT"
] | 1 | 2021-02-23T22:34:12.000Z | 2021-02-23T22:34:12.000Z | lib/teiserver/tcp/tcp_server.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | null | null | null | lib/teiserver/tcp/tcp_server.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | null | null | null | defmodule Teiserver.TcpServer do
@moduledoc false
use GenServer
require Logger
alias Teiserver.{User, Client}
alias Teiserver.Tcp.{TcpLobby}
# Duration refers to how long it will track commands for
# Limit is the number of commands that can be sent in that time
@cmd_flood_duration 10
@cmd_flood_limit 20
@behaviour :ranch_protocol
@spec get_ssl_opts :: [
{:cacertfile, String.t()} | {:certfile, String.t()} | {:keyfile, String.t()}
]
def get_ssl_opts() do
{certfile, cacertfile, keyfile} = {
Application.get_env(:central, Teiserver)[:certs][:certfile],
Application.get_env(:central, Teiserver)[:certs][:cacertfile],
Application.get_env(:central, Teiserver)[:certs][:keyfile]
}
[
certfile: certfile,
cacertfile: cacertfile,
keyfile: keyfile
]
end
# Called at startup
def start_link(opts) do
mode = if opts[:ssl], do: :ranch_ssl, else: :ranch_tcp
# start_listener(Ref, Transport, TransOpts0, Protocol, ProtoOpts)
if mode == :ranch_ssl do
ssl_opts = get_ssl_opts()
:ranch.start_listener(
make_ref(),
:ranch_ssl,
ssl_opts ++
[
{:port, Application.get_env(:central, Teiserver)[:ports][:tls]}
],
__MODULE__,
[]
)
else
:ranch.start_listener(
make_ref(),
:ranch_tcp,
[
{:port, Application.get_env(:central, Teiserver)[:ports][:tcp]}
],
__MODULE__,
[]
)
end
end
# Called on new connection
def start_link(ref, socket, transport, _opts) do
pid = :proc_lib.spawn_link(__MODULE__, :init, [ref, socket, transport])
{:ok, pid}
end
def init(ref, socket, transport) do
{:ok, {ip, _}} = transport.peername(socket)
ip =
ip
|> Tuple.to_list()
|> Enum.join(".")
:ranch.accept_ack(ref)
transport.setopts(socket, [{:active, true}])
heartbeat = Application.get_env(:central, Teiserver)[:heartbeat_interval]
if heartbeat do
:timer.send_interval(heartbeat, self(), :heartbeat)
end
state = %{
# Connection state
message_part: "",
last_msg: System.system_time(:second),
socket: socket,
transport: transport,
protocol_in: Application.get_env(:central, Teiserver)[:default_protocol].protocol_in(),
protocol_out: Application.get_env(:central, Teiserver)[:default_protocol].protocol_out(),
ip: ip,
# Client state
userid: nil,
username: nil,
lobby_host: false,
user: nil,
queues: [],
ready_queue_id: nil,
# Connection microstate
msg_id: nil,
lobby_id: nil,
room_member_cache: %{},
known_users: %{},
known_battles: [],
print_client_messages: false,
print_server_messages: false,
script_password: nil,
exempt_from_cmd_throttle: true,
cmd_timestamps: []
}
send(self(), {:action, {:welcome, nil}})
:gen_server.enter_loop(__MODULE__, [], state)
end
def init(init_arg) do
{:ok, init_arg}
end
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_call({:get, key}, _from, state) do
{:reply, Map.get(state, key), state}
end
def handle_info({:put, key, value}, state) do
new_state = Map.put(state, key, value)
{:noreply, new_state}
end
# If Ctrl + C is sent through it kills the connection, makes telnet debugging easier
def handle_info({_, _socket, <<255, 244, 255, 253, 6>>}, state) do
Client.disconnect(state.userid, "Spring EXIT command")
send(self(), :terminate)
{:noreply, state}
end
# Main source of data ingress
def handle_info({:tcp, _socket, data}, %{exempt_from_cmd_throttle: false} = state) do
data = to_string(data)
cmd_timestamps = if String.contains?(data, "\n") do
now = System.system_time(:second)
limiter = now - @cmd_flood_duration
cmd_timestamps = [now | state.cmd_timestamps]
|> Enum.filter(fn cmd_ts -> cmd_ts > limiter end)
if Enum.count(cmd_timestamps) > @cmd_flood_limit do
User.set_flood_level(state.userid, 10)
Client.disconnect(state.userid, :flood)
Logger.error("Command overflow from #{state.username}/#{state.userid} with #{Enum.count(cmd_timestamps)} commands. Disconnected and flood protection engaged.")
end
cmd_timestamps
else
state.cmd_timestamps
end
new_state = state.protocol_in.data_in(data, state)
{:noreply, %{new_state | cmd_timestamps: cmd_timestamps}}
end
def handle_info({:tcp, _socket, data}, %{exempt_from_cmd_throttle: true} = state) do
new_state = state.protocol_in.data_in(to_string(data), state)
{:noreply, new_state}
end
def handle_info({:ssl, _socket, data}, state) do
new_state = state.protocol_in.data_in(to_string(data), state)
{:noreply, new_state}
end
# Email, when an email is sent we get a message, we don't care about that for the most part (yet)
def handle_info({:delivered_email, _email}, state) do
{:noreply, state}
end
# Heartbeat allows us to kill stale connections
def handle_info(:heartbeat, state) do
diff = System.system_time(:second) - state.last_msg
if diff > Application.get_env(:central, Teiserver)[:heartbeat_timeout] do
if state.username do
Logger.error("Heartbeat timeout for #{state.username}")
end
{:stop, :normal, state}
else
{:noreply, state}
end
end
def handle_info({:action, {action_type, data}}, state) do
new_state = do_action(action_type, data, state)
{:noreply, new_state}
end
# Client channel messages
def handle_info({:client_message, :direct_message, _userid, {_from_id, _message}}, state) do
# TODO: Currently we seem to subscribe to multiple channels at once
# so if we uncomment this we get messages double up
# new_state = new_chat_message(:direct_message, from_id, nil, message, state)
# {:noreply, new_state}
{:noreply, state}
end
def handle_info({:client_message, :matchmaking, _userid, data}, state) do
{:noreply, matchmaking_update(data, state)}
end
def handle_info({:client_message, :lobby, userid, data}, state) do
{:noreply, TcpLobby.handle_info({:client_message, :lobby, userid, data}, state)}
end
def handle_info({:global_battle_lobby, action, lobby_id}, state) do
{:noreply, TcpLobby.handle_info({:global_battle_lobby, action, lobby_id}, state)}
end
def handle_info({:client_message, topic, _userid, _data}, state) do
Logger.warn("No tcp_server handler for :client_message with topic #{topic}")
{:noreply, state}
end
# teiserver_lobby_updates:#{lobby_id}
# Client updates
def handle_info({:user_logged_in, nil}, state), do: {:noreply, state}
def handle_info({:user_logged_in, userid}, state) do
new_state = user_logged_in(userid, state)
{:noreply, new_state}
end
# Lobby chat
# def handle_info({:lobby_chat, action, lobby_id, userid, msg}, state) do
# {:noreply, TcpChat.do_handle({action, lobby_id, userid, msg}, state)}
# end
# Some logic because if we're the one logged out we need to disconnect
def handle_info({:user_logged_out, userid, username}, state) do
if state.userid == userid do
{:stop, :normal, state}
else
new_state = user_logged_out(userid, username, state)
{:noreply, new_state}
end
end
def handle_info({:updated_client, new_client, reason}, state) do
new_state =
case reason do
:client_updated_status ->
client_status_update(new_client, state)
:client_updated_battlestatus ->
client_battlestatus_update(new_client, state)
end
{:noreply, new_state}
end
# User
def handle_info({:this_user_updated, fields}, state) do
new_state = user_updated(fields, state)
{:noreply, new_state}
end
# Matchmaking
# def handle_info({:matchmaking, data}, state) do
# new_state = matchmaking_update(data, state)
# {:noreply, new_state}
# end
# Chat
def handle_info({:direct_message, from, msg}, state) do
new_state = new_chat_message(:direct_message, from, nil, msg, state)
{:noreply, new_state}
end
def handle_info({:new_message, from, room_name, msg}, state) do
new_state = new_chat_message(:chat_message, from, room_name, msg, state)
{:noreply, new_state}
end
def handle_info({:new_message_ex, from, room_name, msg}, state) do
new_state = new_chat_message(:chat_message_ex, from, room_name, msg, state)
{:noreply, new_state}
end
def handle_info({:add_user_to_room, userid, room_name}, state) do
new_state = user_join_chat_room(userid, room_name, state)
{:noreply, new_state}
end
def handle_info({:remove_user_from_room, userid, room_name}, state) do
new_state = user_leave_chat_room(userid, room_name, state)
{:noreply, new_state}
end
# Battles
def handle_info({:battle_updated, _lobby_id, data, reason}, state) do
new_state = battle_update(data, reason, state)
{:noreply, new_state}
end
def handle_info({:global_battle_updated, lobby_id, reason}, state) do
new_state = global_battle_update(lobby_id, reason, state)
{:noreply, new_state}
end
def handle_info({:request_user_join_battle, userid}, state) do
new_state = request_user_join_battle(userid, state)
{:noreply, new_state}
end
def handle_info({:join_battle_request_response, lobby_id, response, reason}, state) do
new_state = join_battle_request_response(lobby_id, response, reason, state)
{:noreply, new_state}
end
def handle_info({:force_join_battle, lobby_id, script_password}, state) do
new_state = force_join_battle(lobby_id, script_password, state)
{:noreply, new_state}
end
def handle_info({:add_user_to_battle, userid, lobby_id, script_password}, state) do
new_state = user_join_battle(userid, lobby_id, script_password, state)
{:noreply, new_state}
end
def handle_info({:remove_user_from_battle, userid, lobby_id}, state) do
new_state = user_leave_battle(userid, lobby_id, state)
{:noreply, new_state}
end
def handle_info({:kick_user_from_battle, userid, lobby_id}, state) do
new_state = user_kicked_from_battle(userid, lobby_id, state)
{:noreply, new_state}
end
# Timeout error
def handle_info({:tcp_error, _port, :etimedout}, %{socket: socket, transport: transport} = state) do
transport.close(socket)
Client.disconnect(state.userid, ":tcp_closed with tcp_error :etimedout")
{:stop, :normal, %{state | userid: nil}}
end
def handle_info({:tcp_error, _port, :ehostunreach}, %{socket: socket, transport: transport} = state) do
transport.close(socket)
Client.disconnect(state.userid, ":tcp_closed with tcp_error :ehostunreach")
{:stop, :normal, %{state | userid: nil}}
end
# Connection
def handle_info({:tcp_closed, _socket}, %{socket: socket, transport: transport} = state) do
transport.close(socket)
Client.disconnect(state.userid, ":tcp_closed with socket")
{:stop, :normal, %{state | userid: nil}}
end
def handle_info({:tcp_closed, _socket}, state) do
Client.disconnect(state.userid, ":tcp_closed no socket")
{:stop, :normal, %{state | userid: nil}}
end
def handle_info({:ssl_closed, socket}, %{socket: socket, transport: transport} = state) do
transport.close(socket)
Client.disconnect(state.userid, ":ssl_closed with socket")
{:stop, :normal, %{state | userid: nil}}
end
def handle_info({:ssl_closed, _socket}, state) do
Client.disconnect(state.userid, ":ssl_closed no socket")
{:stop, :normal, %{state | userid: nil}}
end
def handle_info(:terminate, state) do
Client.disconnect(state.userid, "tcp_server :terminate")
{:stop, :normal, %{state | userid: nil}}
end
def terminate(_reason, state) do
Client.disconnect(state.userid, "tcp_server terminate")
end
# #############################
# Internal functions
# #############################
# User updates
defp user_logged_in(userid, state) do
known_users =
case state.known_users[userid] do
nil ->
case Client.get_client_by_id(userid) do
nil ->
state.known_users
client ->
state.protocol_out.reply(:user_logged_in, client, nil, state)
Map.put(state.known_users, userid, _blank_user(userid))
end
_ ->
state.known_users
end
%{state | known_users: known_users}
end
defp user_logged_out(userid, username, state) do
known_users =
case state.known_users[userid] do
nil ->
state.known_users
_ ->
state.protocol_out.reply(:user_logged_out, {userid, username}, nil, state)
Map.delete(state.known_users, userid)
end
%{state | known_users: known_users}
end
defp user_updated(fields, state) do
new_user = User.get_user_by_id(state.userid)
new_state = %{state | user: new_user}
fields
|> Enum.each(fn field ->
case field do
:friends ->
state.protocol_out.reply(:friendlist, new_user, nil, state)
:friend_requests ->
state.protocol_out.reply(:friendlist_request, new_user, nil, state)
:ignored ->
state.protocol_out.reply(:ignorelist, new_user, nil, state)
_ ->
Logger.error("No handler in tcp_server:user_updated with field #{field}")
end
end)
new_state
end
# Client updates
defp client_status_update(new_client, state) do
state.protocol_out.reply(:client_status, new_client, nil, state)
state
end
defp client_battlestatus_update(new_client, state) do
if state.lobby_id != nil and state.lobby_id == new_client.lobby_id do
state.protocol_out.reply(:client_battlestatus, new_client, nil, state)
end
state
end
# Matchmaking
defp matchmaking_update({cmd, data}, state) do
case cmd do
:match_ready ->
state.protocol_out.reply(:matchmaking, :match_ready, data, nil, state)
%{state | ready_queue_id: data}
:match_cancel ->
%{state | ready_queue_id: nil}
:join_lobby ->
# TODO: Make it so we know what the script password is because normally it's sent
# by the client, maybe update the MM protocol so when you join a queue it's there?
state.protocol_out.do_join_battle(state, data, state.script_password)
:dequeue ->
state
end
end
# Battle updates
defp battle_update(data, reason, state) do
case reason do
:add_start_rectangle ->
state.protocol_out.reply(:add_start_rectangle, data, nil, state)
:remove_start_rectangle ->
state.protocol_out.reply(:remove_start_rectangle, data, nil, state)
:add_script_tags ->
state.protocol_out.reply(:add_script_tags, data, nil, state)
:remove_script_tags ->
state.protocol_out.reply(:remove_script_tags, data, nil, state)
:enable_all_units ->
state.protocol_out.reply(:enable_all_units, data, nil, state)
:enable_units ->
state.protocol_out.reply(:enable_units, data, nil, state)
:disable_units ->
state.protocol_out.reply(:disable_units, data, nil, state)
:say ->
state.protocol_out.reply(:battle_message, data, nil, state)
:sayex ->
state.protocol_out.reply(:battle_message_ex, data, nil, state)
# TODO: Check we can't get an out of sync server-client state
# with the bot commands
:add_bot_to_battle ->
state.protocol_out.reply(:add_bot_to_battle, data, nil, state)
:update_bot ->
state.protocol_out.reply(:update_bot, data, nil, state)
:remove_bot_from_battle ->
state.protocol_out.reply(:remove_bot_from_battle, data, nil, state)
_ ->
Logger.error("No handler in tcp_server:battle_update with reason #{reason}")
state
end
end
defp global_battle_update(lobby_id, reason, state) do
case reason do
:update_battle_info ->
state.protocol_out.reply(:update_battle, lobby_id, nil, state)
:battle_opened ->
if state.lobby_host == false or state.lobby_id != lobby_id do
new_known_battles = [lobby_id | state.known_battles]
new_state = %{state | known_battles: new_known_battles}
new_state.protocol_out.reply(:battle_opened, lobby_id, nil, new_state)
else
state
end
:battle_closed ->
if Enum.member?(state.known_battles, lobby_id) do
new_known_battles = List.delete(state.known_battles, lobby_id)
new_state = %{state | known_battles: new_known_battles}
new_state.protocol_out.reply(:battle_closed, lobby_id, nil, new_state)
else
state
end
_ ->
Logger.error("No handler in tcp_server:global_battle_update with reason #{reason}")
state
end
end
# This is the server asking the host if a client can join the battle
# the client is expected to reply with a yes or no
defp request_user_join_battle(userid, state) do
state.protocol_out.reply(:request_user_join_battle, userid, nil, state)
end
# This is the result of the host responding to the server asking if the client
# can join the battle
defp join_battle_request_response(nil, _, _, state) do
state.protocol_out.reply(:join_battle_failure, "No battle", nil, state)
end
defp join_battle_request_response(lobby_id, response, reason, state) do
case response do
:accept ->
state.protocol_out.do_join_battle(state, lobby_id, state.script_password)
:deny ->
state.protocol_out.reply(:join_battle_failure, reason, nil, state)
end
end
# This is the result of being forced to join a battle
defp force_join_battle(lobby_id, script_password, state) do
new_state = state.protocol_out.do_leave_battle(state, state.lobby_id)
new_state = %{new_state | lobby_id: lobby_id, script_password: script_password}
state.protocol_out.do_join_battle(new_state, lobby_id, script_password)
end
# Depending on our current understanding of where the user is
# we will send a selection of commands on the assumption this
# genserver is incorrect and needs to alter its state accordingly
defp user_join_battle(userid, lobby_id, script_password, state) do
script_password =
cond do
state.lobby_host and state.lobby_id == lobby_id -> script_password
state.userid == userid -> script_password
true -> nil
end
new_user =
cond do
state.userid == userid ->
_blank_user(userid, %{lobby_id: lobby_id})
# User isn't known about so we say they've logged in
# Then we add them to the battle
state.known_users[userid] == nil ->
client = Client.get_client_by_id(userid)
state.protocol_out.reply(:user_logged_in, client, nil, state)
state.protocol_out.reply(
:add_user_to_battle,
{userid, lobby_id, script_password},
nil,
state
)
_blank_user(userid, %{lobby_id: lobby_id})
# User is known about and not in a battle, this is the ideal
# state
state.known_users[userid].lobby_id == nil ->
state.protocol_out.reply(
:add_user_to_battle,
{userid, lobby_id, script_password},
nil,
state
)
%{state.known_users[userid] | lobby_id: lobby_id}
# User is known about but already in a battle
state.known_users[userid].lobby_id != lobby_id ->
# If we don't know about the battle we don't need to remove the user from it first
if Enum.member?(state.known_battles, state.known_users[userid].lobby_id) do
state.protocol_out.reply(
:remove_user_from_battle,
{userid, state.known_users[userid].lobby_id},
nil,
state
)
end
state.protocol_out.reply(
:add_user_to_battle,
{userid, lobby_id, script_password},
nil,
state
)
%{state.known_users[userid] | lobby_id: lobby_id}
# User is known about and in this battle already, no change
state.known_users[userid].lobby_id == lobby_id ->
state.known_users[userid]
end
new_knowns = Map.put(state.known_users, userid, new_user)
%{state | known_users: new_knowns}
end
defp user_leave_battle(userid, lobby_id, state) do
# If they are kicked then it's possible they won't be unsubbed
if userid == state.userid do
Phoenix.PubSub.unsubscribe(Central.PubSub, "legacy_battle_updates:#{lobby_id}")
end
# Do they know about the battle?
state = if not Enum.member?(state.known_battles, lobby_id) do
state.protocol_out.reply(:battle_opened, lobby_id, nil, state)
else
state
end
# Now the user
new_user =
cond do
Enum.member?(state.known_battles, lobby_id) == false ->
state.known_users[userid]
state.known_users[userid] == nil ->
client = Client.get_client_by_id(userid)
state.protocol_out.reply(:user_logged_in, client, nil, state)
_blank_user(userid)
state.known_users[userid].lobby_id == nil ->
# No change
state.known_users[userid]
true ->
# We don't care which battle we thought they are in, they're no longer in it
state.protocol_out.reply(
:remove_user_from_battle,
{userid, state.known_users[userid].lobby_id},
nil,
state
)
%{state.known_users[userid] | lobby_id: nil}
end
new_knowns = Map.put(state.known_users, userid, new_user)
%{state | known_users: new_knowns}
end
defp user_kicked_from_battle(userid, lobby_id, state) do
# If it's the user, we need to tell them the bad news
state =
if userid == state.userid do
state.protocol_out.reply(:forcequit_battle, nil, nil, state)
%{state | lobby_id: nil}
else
state
end
user_leave_battle(userid, lobby_id, state)
end
# Chat
defp new_chat_message(type, from, room_name, msg, state) do
case Client.get_client_by_id(from) do
nil ->
# No client? Ignore them
state
client ->
# Do they know about the user?
state =
case Map.has_key?(state.known_users, from) do
false ->
state.protocol_out.reply(:user_logged_in, client, nil, state)
%{state | known_users: Map.put(state.known_users, from, _blank_user(from))}
true ->
state
end
case type do
:direct_message ->
state.protocol_out.reply(:direct_message, {from, msg, state.user}, nil, state)
:chat_message ->
state.protocol_out.reply(:chat_message, {from, room_name, msg, state.user}, nil, state)
:chat_message_ex ->
state.protocol_out.reply(:chat_message_ex, {from, room_name, msg, state.user}, nil, state)
end
end
state
end
defp user_join_chat_room(userid, room_name, state) do
case Client.get_client_by_id(userid) do
nil ->
# No client? Ignore them
state
client ->
# Do they know about the user?
state =
case Map.has_key?(state.known_users, userid) do
false ->
state.protocol_out.reply(:user_logged_in, client, nil, state)
%{state | known_users: Map.put(state.known_users, userid, _blank_user(userid))}
true ->
state
end
new_members =
if not Enum.member?(state.room_member_cache[room_name] || [], userid) do
state.protocol_out.reply(:add_user_to_room, {userid, room_name}, nil, state)
[userid | (state.room_member_cache[room_name] || [])]
else
state.room_member_cache[room_name] || []
end
new_cache = Map.put(state.room_member_cache, room_name, new_members)
%{state | room_member_cache: new_cache}
end
end
defp user_leave_chat_room(userid, room_name, state) do
case Map.has_key?(state.known_users, userid) do
false ->
# We don't know who they are, we don't care they've left the chat room
state
true ->
new_members =
if Enum.member?(state.room_member_cache[room_name] || [], userid) do
state.protocol_out.reply(:remove_user_from_room, {userid, room_name}, nil, state)
state.room_member_cache[room_name] |> Enum.filter(fn m -> m != userid end)
else
state.room_member_cache[room_name] || []
end
new_cache = Map.put(state.room_member_cache, room_name, new_members)
%{state | room_member_cache: new_cache}
end
end
# Actions
defp do_action(action_type, data, state) do
case action_type do
:ring ->
state.protocol_out.reply(:ring, {data, state.userid}, nil, state)
:welcome ->
state.protocol_out.reply(:welcome, nil, nil, state)
:login_end ->
state.protocol_out.reply(:login_end, nil, nil, state)
_ ->
Logger.error("No handler in tcp_server:do_action with action #{action_type}")
end
state
end
# Example of how gen-smtp handles upgrading the connection
# https://github.com/gen-smtp/gen_smtp/blob/master/src/gen_smtp_server_session.erl#L683-L720
@spec upgrade_connection(Map.t()) :: Map.t()
def upgrade_connection(state) do
:ok = state.transport.setopts(state.socket, [{:active, false}])
ssl_opts =
get_ssl_opts() ++
[
{:packet, :line},
{:mode, :list},
{:verify, :verify_none},
{:ssl_imp, :new}
]
case :ranch_ssl.handshake(state.socket, ssl_opts, 5000) do
{:ok, new_socket} ->
:ok = :ranch_ssl.setopts(new_socket, [{:active, true}])
%{state | socket: new_socket, transport: :ranch_ssl}
err ->
Logger.error(
"Error upgrading connection\nError: #{Kernel.inspect(err)}\nssl_opts: #{
Kernel.inspect(ssl_opts)
}"
)
state
end
end
# Other functions
def _blank_user(userid, defaults \\ %{}) do
Map.merge(
%{
userid: userid,
lobby_id: nil
},
defaults
)
end
end
| 30.642857 | 167 | 0.647041 |
9e61224e5ed39d9988c18521dde85649b1b9dd31 | 21,201 | exs | Elixir | lib/elixir/test/elixir/macro_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/macro_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/macro_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule Macro.ExternalTest do
defmacro external do
line = 18
file = __ENV__.file
^line = __CALLER__.line
^file = __CALLER__.file
^line = Macro.Env.location(__CALLER__)[:line]
^file = Macro.Env.location(__CALLER__)[:file]
end
defmacro oror(left, right) do
quote do: unquote(left) || unquote(right)
end
end
defmodule MacroTest do
use ExUnit.Case, async: true
# Changing the lines above will make compilation
# fail since we are asserting on the caller lines
import Macro.ExternalTest
## Escape
test "escape handle tuples with size different than two" do
assert {:{}, [], [:a]} == Macro.escape({:a})
assert {:{}, [], [:a, :b, :c]} == Macro.escape({:a, :b, :c})
assert {:{}, [], [:a, {:{}, [], [1, 2, 3]}, :c]} == Macro.escape({:a, {1, 2, 3}, :c})
end
test "escape simply returns tuples with size equal to two" do
assert {:a, :b} == Macro.escape({:a, :b})
end
test "escape simply returns any other structure" do
assert [1, 2, 3] == Macro.escape([1, 2, 3])
end
test "escape handles maps" do
assert {:%{}, [], [a: 1]} = Macro.escape(%{a: 1})
end
test "escape handles bitstring" do
assert {:<<>>, [], [{:::, [], [1, 4]}, ","]} == Macro.escape(<<300::12>>)
end
test "escape works recursively" do
assert [1, {:{}, [], [:a, :b, :c]}, 3] == Macro.escape([1, {:a, :b, :c}, 3])
end
test "escape improper" do
assert [{:|, [], [1, 2]}] == Macro.escape([1|2])
assert [1, {:|, [], [2, 3]}] == Macro.escape([1, 2|3])
end
test "escape with unquote" do
contents = quote unquote: false, do: unquote(1)
assert Macro.escape(contents, unquote: true) == 1
contents = quote unquote: false, do: unquote(x)
assert Macro.escape(contents, unquote: true) == {:x, [], MacroTest}
end
defp eval_escaped(contents) do
{eval, []} = Code.eval_quoted(Macro.escape(contents, unquote: true))
eval
end
test "escape with remote unquote" do
contents = quote unquote: false, do: Kernel.unquote(:is_atom)(:ok)
assert eval_escaped(contents) == quote(do: Kernel.is_atom(:ok))
end
test "escape with nested unquote" do
contents = quote do
quote do: unquote(x)
end
assert eval_escaped(contents) == quote do: (quote do: unquote(x))
end
test "escape with alias or no args remote unquote" do
contents = quote unquote: false, do: Kernel.unquote(:self)
assert eval_escaped(contents) == quote(do: Kernel.self())
contents = quote unquote: false, do: x.unquote(Foo)
assert eval_escaped(contents) == quote(do: x.unquote(Foo))
end
test "escape with splicing" do
contents = quote unquote: false, do: [1, 2, 3, 4, 5]
assert Macro.escape(contents, unquote: true) == [1, 2, 3, 4, 5]
contents = quote unquote: false, do: [1, 2, unquote_splicing([3, 4, 5])]
assert eval_escaped(contents) == [1, 2, 3, 4, 5]
contents = quote unquote: false, do: [unquote_splicing([1, 2, 3]), 4, 5]
assert eval_escaped(contents) == [1, 2, 3, 4, 5]
contents = quote unquote: false, do: [unquote_splicing([1, 2, 3]), unquote_splicing([4, 5])]
assert eval_escaped(contents) == [1, 2, 3, 4, 5]
contents = quote unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4]), 5]
assert eval_escaped(contents) == [1, 2, 3, 4, 5]
contents = quote unquote: false, do: [1, unquote_splicing([2]), 3, unquote_splicing([4])|[5]]
assert eval_escaped(contents) == [1, 2, 3, 4, 5]
end
## Expansion
test "expand once" do
assert {:||, _, _} = Macro.expand_once(quote(do: oror(1, false)), __ENV__)
end
test "expand once with raw atom" do
assert Macro.expand_once(quote(do: :foo), __ENV__) == :foo
end
test "expand once with current module" do
assert Macro.expand_once(quote(do: __MODULE__), __ENV__) == __MODULE__
end
test "expand once with main" do
assert Macro.expand_once(quote(do: Elixir), __ENV__) == Elixir
end
test "expand once with simple alias" do
assert Macro.expand_once(quote(do: Foo), __ENV__) == Foo
end
test "expand once with current module plus alias" do
assert Macro.expand_once(quote(do: __MODULE__.Foo), __ENV__) == __MODULE__.Foo
end
test "expand once with main plus alias" do
assert Macro.expand_once(quote(do: Elixir.Foo), __ENV__) == Foo
end
test "expand once with custom alias" do
alias Foo, as: Bar
assert Macro.expand_once(quote(do: Bar.Baz), __ENV__) == Foo.Baz
end
test "expand once with main plus custom alias" do
alias Foo, as: Bar, warn: false
assert Macro.expand_once(quote(do: Elixir.Bar.Baz), __ENV__) == Elixir.Bar.Baz
end
test "expand once with op" do
assert Macro.expand_once(quote(do: Foo.bar.Baz), __ENV__) == (quote do
Foo.bar.Baz
end)
end
test "expand once with erlang" do
assert Macro.expand_once(quote(do: :foo), __ENV__) == :foo
end
test "expand once env" do
env = %{__ENV__ | line: 0}
assert Macro.expand_once(quote(do: __ENV__), env) == {:%{}, [], Map.to_list(env)}
assert Macro.expand_once(quote(do: __ENV__.file), env) == env.file
assert Macro.expand_once(quote(do: __ENV__.unknown), env) == quote(do: __ENV__.unknown)
end
defmacro local_macro do
:local_macro
end
test "expand once local macro" do
assert Macro.expand_once(quote(do: local_macro), __ENV__) == :local_macro
end
test "expand once checks vars" do
local_macro = 1
assert local_macro == 1
quote = {:local_macro, [], nil}
assert Macro.expand_once(quote, __ENV__) == quote
end
defp expand_once_and_clean(quoted, env) do
cleaner = &Keyword.drop(&1, [:counter])
quoted
|> Macro.expand_once(env)
|> Macro.prewalk(&Macro.update_meta(&1, cleaner))
end
test "expand once with imported macro" do
temp_var = {:x, [], Kernel}
assert expand_once_and_clean(quote(do: 1 || false), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
unquote(temp_var) -> unquote(temp_var)
end
end)
end
test "expand once with require macro" do
temp_var = {:x, [], Kernel}
assert expand_once_and_clean(quote(do: Kernel.||(1, false)), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
unquote(temp_var) -> unquote(temp_var)
end
end)
end
test "expand once with not expandable expression" do
expr = quote(do: other(1, 2, 3))
assert Macro.expand_once(expr, __ENV__) == expr
end
@foo 1
@bar Macro.expand_once(quote(do: @foo), __ENV__)
test "expand once with module at" do
assert @bar == 1
end
defp expand_and_clean(quoted, env) do
cleaner = &Keyword.drop(&1, [:counter])
quoted
|> Macro.expand(env)
|> Macro.prewalk(&Macro.update_meta(&1, cleaner))
end
test "expand" do
temp_var = {:x, [], Kernel}
assert expand_and_clean(quote(do: oror(1, false)), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
unquote(temp_var) -> unquote(temp_var)
end
end)
end
test "var" do
assert Macro.var(:foo, nil) == {:foo, [], nil}
assert Macro.var(:foo, Other) == {:foo, [], Other}
end
## to_string
test "var to string" do
assert Macro.to_string(quote do: foo) == "foo"
end
test "local call to string" do
assert Macro.to_string(quote do: foo(1, 2, 3)) == "foo(1, 2, 3)"
assert Macro.to_string(quote do: foo([1, 2, 3])) == "foo([1, 2, 3])"
end
test "remote call to string" do
assert Macro.to_string(quote do: foo.bar(1, 2, 3)) == "foo.bar(1, 2, 3)"
assert Macro.to_string(quote do: foo.bar([1, 2, 3])) == "foo.bar([1, 2, 3])"
end
test "low atom remote call to string" do
assert Macro.to_string(quote do: :foo.bar(1, 2, 3)) == ":foo.bar(1, 2, 3)"
end
test "big atom remote call to string" do
assert Macro.to_string(quote do: Foo.Bar.bar(1, 2, 3)) == "Foo.Bar.bar(1, 2, 3)"
end
test "remote and fun call to string" do
assert Macro.to_string(quote do: foo.bar.(1, 2, 3)) == "foo.bar().(1, 2, 3)"
assert Macro.to_string(quote do: foo.bar.([1, 2, 3])) == "foo.bar().([1, 2, 3])"
end
test "atom call to string" do
assert Macro.to_string(quote do: :foo.(1, 2, 3)) == ":foo.(1, 2, 3)"
end
test "aliases call to string" do
assert Macro.to_string(quote do: Foo.Bar.baz(1, 2, 3)) == "Foo.Bar.baz(1, 2, 3)"
assert Macro.to_string(quote do: Foo.Bar.baz([1, 2, 3])) == "Foo.Bar.baz([1, 2, 3])"
end
test "sigil call to string" do
assert Macro.to_string(quote do: ~r"123") == ~s/~r"123"/
assert Macro.to_string(quote do: ~r"123"u) == ~s/~r"123"u/
assert Macro.to_string(quote do: ~r"\n123") == ~s/~r"\\\\n123"/
assert Macro.to_string(quote do: ~r"1#{two}3") == ~S/~r"1#{two}3"/
assert Macro.to_string(quote do: ~r"1#{two}3"u) == ~S/~r"1#{two}3"u/
assert Macro.to_string(quote do: ~R"123") == ~s/~R"123"/
assert Macro.to_string(quote do: ~R"123"u) == ~s/~R"123"u/
assert Macro.to_string(quote do: ~R"\n123") == ~s/~R"\\\\n123"/
end
test "arrow to string" do
assert Macro.to_string(quote do: foo(1, (2 -> 3))) == "foo(1, (2 -> 3))"
end
test "blocks to string" do
assert Macro.to_string(quote do: (1; 2; (:foo; :bar); 3)) <> "\n" == """
(
1
2
(
:foo
:bar
)
3
)
"""
end
test "if else to string" do
assert Macro.to_string(quote do: (if foo, do: bar, else: baz)) <> "\n" == """
if(foo) do
bar
else
baz
end
"""
end
test "case to string" do
assert Macro.to_string(quote do: (case foo do true -> 0; false -> (1; 2) end)) <> "\n" == """
case(foo) do
true ->
0
false ->
1
2
end
"""
end
test "fn to string" do
assert Macro.to_string(quote do: (fn -> 1 + 2 end)) == "fn -> 1 + 2 end"
assert Macro.to_string(quote do: (fn(x) -> x + 1 end)) == "fn x -> x + 1 end"
assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y end)) <> "\n" == """
fn x ->
y = x + 1
y
end
"""
assert Macro.to_string(quote do: (fn(x) -> y = x + 1; y; (z) -> z end)) <> "\n" == """
fn
x ->
y = x + 1
y
z ->
z
end
"""
end
test "range to string" do
assert Macro.to_string(quote do: unquote(-1 .. +2)) == "-1..2"
assert Macro.to_string(quote do: Foo.integer..3) == "Foo.integer()..3"
end
test "when" do
assert Macro.to_string(quote do: (() -> x)) == "(() -> x)"
assert Macro.to_string(quote do: (x when y -> z)) == "(x when y -> z)"
assert Macro.to_string(quote do: (x, y when z -> w)) == "((x, y) when z -> w)"
assert Macro.to_string(quote do: ((x, y) when z -> w)) == "((x, y) when z -> w)"
end
test "nested to string" do
assert Macro.to_string(quote do: (defmodule Foo do def foo do 1 + 1 end end)) <> "\n" == """
defmodule(Foo) do
def(foo) do
1 + 1
end
end
"""
end
test "op precedence to string" do
assert Macro.to_string(quote do: (1 + 2) * (3 - 4)) == "(1 + 2) * (3 - 4)"
assert Macro.to_string(quote do: ((1 + 2) * 3) - 4) == "(1 + 2) * 3 - 4"
assert Macro.to_string(quote do: (1 + 2 + 3) == "(1 + 2 + 3)")
assert Macro.to_string(quote do: (1 + 2 - 3) == "(1 + 2 - 3)")
end
test "capture op to string" do
assert Macro.to_string(quote do: &foo/0) == "&foo/0"
assert Macro.to_string(quote do: &Foo.foo/0) == "&Foo.foo/0"
assert Macro.to_string(quote do: & &1 + &2) == "&(&1 + &2)"
end
test "containers to string" do
assert Macro.to_string(quote do: {}) == "{}"
assert Macro.to_string(quote do: []) == "[]"
assert Macro.to_string(quote do: {1, 2, 3}) == "{1, 2, 3}"
assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]"
assert Macro.to_string(quote do: %{}) == "%{}"
assert Macro.to_string(quote do: %{:foo => :bar}) == "%{foo: :bar}"
assert Macro.to_string(quote do: %{{1, 2} => [1, 2, 3]}) == "%{{1, 2} => [1, 2, 3]}"
assert Macro.to_string(quote do: %{map | "a" => "b"}) == "%{map | \"a\" => \"b\"}"
assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]"
assert Macro.to_string(quote do: << 1, 2, 3 >>) == "<<1, 2, 3>>"
assert Macro.to_string(quote do: << <<1>> >>) == "<< <<1>> >>"
end
test "struct to string" do
assert Macro.to_string(quote do: %Test{}) == "%Test{}"
assert Macro.to_string(quote do: %Test{foo: 1, bar: 1}) == "%Test{foo: 1, bar: 1}"
assert Macro.to_string(quote do: %Test{struct | foo: 2}) == "%Test{struct | foo: 2}"
assert Macro.to_string(quote do: %Test{} + 1) == "%Test{} + 1"
end
test "binary ops to string" do
assert Macro.to_string(quote do: 1 + 2) == "1 + 2"
assert Macro.to_string(quote do: [ 1, 2 | 3 ]) == "[1, 2 | 3]"
assert Macro.to_string(quote do: [h|t] = [1, 2, 3]) == "[h | t] = [1, 2, 3]"
assert Macro.to_string(quote do: (x ++ y) ++ z) == "(x ++ y) ++ z"
end
test "unary ops to string" do
assert Macro.to_string(quote do: not 1) == "not 1"
assert Macro.to_string(quote do: not foo) == "not foo"
assert Macro.to_string(quote do: -1) == "-1"
assert Macro.to_string(quote do: !(foo > bar)) == "!(foo > bar)"
assert Macro.to_string(quote do: @foo(bar)) == "@foo(bar)"
assert Macro.to_string(quote do: identity(&1)) == "identity(&1)"
assert Macro.to_string(quote do: identity(&foo)) == "identity(&foo)"
end
test "access to string" do
assert Macro.to_string(quote do: a[b]) == "a[b]"
assert Macro.to_string(quote do: a[1 + 2]) == "a[1 + 2]"
end
test "kw list to string" do
assert Macro.to_string(quote do: [a: a, b: b]) == "[a: a, b: b]"
assert Macro.to_string(quote do: [a: 1, b: 1 + 2]) == "[a: 1, b: 1 + 2]"
assert Macro.to_string(quote do: ["a.b": 1, c: 1 + 2]) == "[\"a.b\": 1, c: 1 + 2]"
end
test "interpolation to string" do
assert Macro.to_string(quote do: "foo#{bar}baz") == ~S["foo#{bar}baz"]
end
test "charlist to string" do
assert Macro.to_string(quote do: []) == "[]"
assert Macro.to_string(quote do: 'abc') == "'abc'"
end
test "last arg kw list to string" do
assert Macro.to_string(quote do: foo([])) == "foo([])"
assert Macro.to_string(quote do: foo(x: y)) == "foo(x: y)"
assert Macro.to_string(quote do: foo(x: 1 + 2)) == "foo(x: 1 + 2)"
assert Macro.to_string(quote do: foo(x: y, p: q)) == "foo(x: y, p: q)"
assert Macro.to_string(quote do: foo(a, x: y, p: q)) == "foo(a, x: y, p: q)"
assert Macro.to_string(quote do: {[]}) == "{[]}"
assert Macro.to_string(quote do: {[a: b]}) == "{[a: b]}"
assert Macro.to_string(quote do: {x, a: b}) == "{x, [a: b]}"
end
test "to string with fun" do
assert Macro.to_string(quote(do: foo(1, 2, 3)), fn _, string -> ":#{string}:" end) ==
":foo(:1:, :2:, :3:):"
assert Macro.to_string(quote(do: Bar.foo(1, 2, 3)), fn _, string -> ":#{string}:" end) ==
"::Bar:.foo(:1:, :2:, :3:):"
end
## validate
test "validate" do
ref = make_ref()
assert Macro.validate(1) == :ok
assert Macro.validate(1.0) == :ok
assert Macro.validate(:foo) == :ok
assert Macro.validate("bar") == :ok
assert Macro.validate(self()) == :ok
assert Macro.validate({1, 2}) == :ok
assert Macro.validate({:foo, [], :baz}) == :ok
assert Macro.validate({:foo, [], []}) == :ok
assert Macro.validate([1, 2, 3]) == :ok
assert Macro.validate(<<0::4>>) == {:error, <<0::4>>}
assert Macro.validate(ref) == {:error, ref}
assert Macro.validate({1, ref}) == {:error, ref}
assert Macro.validate({ref, 2}) == {:error, ref}
assert Macro.validate([1, ref, 3]) == {:error, ref}
assert Macro.validate({:foo, [], 0}) == {:error, {:foo, [], 0}}
assert Macro.validate({:foo, 0, []}) == {:error, {:foo, 0, []}}
end
## decompose_call
test "decompose call" do
assert Macro.decompose_call(quote do: foo) == {:foo, []}
assert Macro.decompose_call(quote do: foo()) == {:foo, []}
assert Macro.decompose_call(quote do: foo(1, 2, 3)) == {:foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: M.N.foo(1, 2, 3)) ==
{{:__aliases__, [alias: false], [:M, :N]}, :foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: :foo.foo(1, 2, 3)) ==
{:foo, :foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: 1.(1, 2, 3)) == :error
assert Macro.decompose_call(quote do: "some string") == :error
end
## env
test "env stacktrace" do
env = %{__ENV__ | file: "foo", line: 12}
assert Macro.Env.stacktrace(env) ==
[{__MODULE__, :"test env stacktrace", 1, [file: "foo", line: 12]}]
env = %{env | function: nil}
assert Macro.Env.stacktrace(env) ==
[{__MODULE__, :__MODULE__, 0, [file: "foo", line: 12]}]
env = %{env | module: nil}
assert Macro.Env.stacktrace(env) ==
[{:elixir_compiler, :__FILE__, 1, [file: "foo", line: 12]}]
end
test "context modules" do
defmodule Foo.Bar do
assert __MODULE__ in __ENV__.context_modules
end
end
## pipe/unpipe
test "pipe" do
assert Macro.pipe(1, quote(do: foo), 0) == quote(do: foo(1))
assert Macro.pipe(1, quote(do: foo(2)), 0) == quote(do: foo(1, 2))
assert Macro.pipe(1, quote(do: foo), -1) == quote(do: foo(1))
assert Macro.pipe(2, quote(do: foo(1)), -1) == quote(do: foo(1, 2))
assert_raise ArgumentError, ~r"cannot pipe 1 into 2", fn ->
Macro.pipe(1, 2, 0)
end
assert_raise ArgumentError, ~r"cannot pipe 1 into {:ok}", fn ->
Macro.pipe(1, {:ok}, 0)
end
assert_raise ArgumentError, ~r"cannot pipe 1 into 1 \+ 1", fn ->
Macro.pipe(1, quote(do: 1 + 1), 0) == quote(do: foo(1))
end
end
test "unpipe" do
assert Macro.unpipe(quote(do: foo)) == quote(do: [{foo, 0}])
assert Macro.unpipe(quote(do: foo |> bar)) == quote(do: [{foo, 0}, {bar, 0}])
assert Macro.unpipe(quote(do: foo |> bar |> baz)) == quote(do: [{foo, 0}, {bar, 0}, {baz, 0}])
end
## traverse/pre/postwalk
test "traverse" do
assert traverse({:foo, [], nil}) ==
[{:foo, [], nil}, {:foo, [], nil}]
assert traverse({:foo, [], [1, 2, 3]}) ==
[{:foo, [], [1, 2, 3]}, 1, 1, 2, 2, 3, 3, {:foo, [], [1, 2, 3]}]
assert traverse({{:., [], [:foo, :bar]}, [], [1, 2, 3]}) ==
[{{:., [], [:foo, :bar]}, [], [1, 2, 3]}, {:., [], [:foo, :bar]}, :foo, :foo, :bar, :bar, {:., [], [:foo, :bar]},
1, 1, 2, 2, 3, 3, {{:., [], [:foo, :bar]}, [], [1, 2, 3]}]
assert traverse({[1, 2, 3], [4, 5, 6]}) ==
[{[1, 2, 3], [4, 5, 6]}, [1, 2, 3], 1, 1, 2, 2, 3, 3, [1, 2, 3],
[4, 5, 6], 4, 4, 5, 5, 6, 6, [4, 5, 6], {[1, 2, 3], [4, 5, 6]}]
end
defp traverse(ast) do
Macro.traverse(ast, [], &{&1, [&1|&2]}, &{&1, [&1|&2]}) |> elem(1) |> Enum.reverse
end
test "prewalk" do
assert prewalk({:foo, [], nil}) ==
[{:foo, [], nil}]
assert prewalk({:foo, [], [1, 2, 3]}) ==
[{:foo, [], [1, 2, 3]}, 1, 2, 3]
assert prewalk({{:., [], [:foo, :bar]}, [], [1, 2, 3]}) ==
[{{:., [], [:foo, :bar]}, [], [1, 2, 3]}, {:., [], [:foo, :bar]}, :foo, :bar, 1, 2, 3]
assert prewalk({[1, 2, 3], [4, 5, 6]}) ==
[{[1, 2, 3], [4, 5, 6]}, [1, 2, 3], 1, 2, 3, [4, 5, 6], 4, 5, 6]
end
defp prewalk(ast) do
Macro.prewalk(ast, [], &{&1, [&1|&2]}) |> elem(1) |> Enum.reverse
end
test "postwalk" do
assert postwalk({:foo, [], nil}) ==
[{:foo, [], nil}]
assert postwalk({:foo, [], [1, 2, 3]}) ==
[1, 2, 3, {:foo, [], [1, 2, 3]}]
assert postwalk({{:., [], [:foo, :bar]}, [], [1, 2, 3]}) ==
[:foo, :bar, {:., [], [:foo, :bar]}, 1, 2, 3, {{:., [], [:foo, :bar]}, [], [1, 2, 3]}]
assert postwalk({[1, 2, 3], [4, 5, 6]}) ==
[1, 2, 3, [1, 2, 3], 4, 5, 6, [4, 5, 6], {[1, 2, 3], [4, 5, 6]}]
end
defp postwalk(ast) do
Macro.postwalk(ast, [], &{&1, [&1|&2]}) |> elem(1) |> Enum.reverse
end
test "underscore" do
assert Macro.underscore("foo") == "foo"
assert Macro.underscore("foo_bar") == "foo_bar"
assert Macro.underscore("Foo") == "foo"
assert Macro.underscore("FooBar") == "foo_bar"
assert Macro.underscore("FOOBar") == "foo_bar"
assert Macro.underscore("FooBAR") == "foo_bar"
assert Macro.underscore("FoBaZa") == "fo_ba_za"
assert Macro.underscore("Foo.Bar") == "foo/bar"
assert Macro.underscore(Foo.Bar) == "foo/bar"
assert Macro.underscore("API.V1.User") == "api/v1/user"
assert Macro.underscore("") == ""
end
test "camelize" do
assert Macro.camelize("Foo") == "Foo"
assert Macro.camelize("FooBar") == "FooBar"
assert Macro.camelize("foo") == "Foo"
assert Macro.camelize("foo_bar") == "FooBar"
assert Macro.camelize("foo_") == "Foo"
assert Macro.camelize("_foo") == "Foo"
assert Macro.camelize("foo__bar") == "FooBar"
assert Macro.camelize("foo/bar") == "Foo.Bar"
assert Macro.camelize("") == ""
end
end
| 33.023364 | 124 | 0.562521 |
9e612d3c5bb6ffa810ee53f60adbe1f700a1bf7e | 1,927 | exs | Elixir | clients/double_click_bid_manager/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DoubleClickBidManager.Mixfile do
use Mix.Project
@version "0.20.1"
def project() do
[
app: :google_api_double_click_bid_manager,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/double_click_bid_manager"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
DoubleClick Bid Manager API client library. DoubleClick Bid Manager API allows users to manage and create campaigns and reports.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/double_click_bid_manager",
"Homepage" => "https://developers.google.com/bid-manager/"
}
]
end
end
| 28.761194 | 132 | 0.668915 |
9e614db3b1b01f2387134f8e3d16cd26ff41903f | 755 | ex | Elixir | lib/mechanics/units.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | 5 | 2016-10-20T10:00:59.000Z | 2017-11-19T08:14:18.000Z | lib/mechanics/units.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | 39 | 2020-04-22T05:27:32.000Z | 2022-03-13T17:22:26.000Z | lib/mechanics/units.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | null | null | null | defmodule LaFamiglia.Mechanics.Units do
@moduledoc """
All build times are specified in microseconds.
"""
alias LaFamiglia.Unit
def units do
[
%Unit{
id: 1,
key: :unit_1,
build_time: 10_000_000,
costs: %{
resource_1: 1,
resource_2: 0,
resource_3: 1
},
supply: 5,
speed: 2,
attack: 2,
defense: 2,
load: 10
},
%Unit{
id: 2,
key: :unit_2,
build_time: 43_400_000_000,
costs: %{
resource_1: 50,
resource_2: 50,
resource_3: 50
},
supply: 20,
speed: 1,
attack: 5,
defense: 4,
load: 0
}
]
end
end
| 17.55814 | 48 | 0.451656 |
9e616ad3bafb1ba4bfe06d6e43ea778bf374e997 | 155 | exs | Elixir | test/friends_app_test.exs | Gabriel-Ayala/FriendsApp | 0795b87b7b9562f89ecb15794a05ecc8801c9614 | [
"MIT"
] | 1 | 2020-10-28T01:16:50.000Z | 2020-10-28T01:16:50.000Z | test/friends_app_test.exs | Gabriel-Ayala/FriendsApp | 0795b87b7b9562f89ecb15794a05ecc8801c9614 | [
"MIT"
] | null | null | null | test/friends_app_test.exs | Gabriel-Ayala/FriendsApp | 0795b87b7b9562f89ecb15794a05ecc8801c9614 | [
"MIT"
] | null | null | null | defmodule FriendsAppTest do
use ExUnit.Case
doctest FriendsApp
test "greets the world" do
assert FriendsApp.hello() == :world
end
end
| 17.222222 | 40 | 0.690323 |
9e617744a3eabddd8468d780d04412df47841836 | 5,784 | exs | Elixir | .credo.exs | mirego/sigaws | d16d6bc72859ef9664f3892dad001e628e6263e3 | [
"MIT"
] | 8 | 2017-04-18T05:28:21.000Z | 2022-01-20T16:32:35.000Z | .credo.exs | mirego/sigaws | d16d6bc72859ef9664f3892dad001e628e6263e3 | [
"MIT"
] | 11 | 2017-04-09T18:51:33.000Z | 2021-11-11T00:10:17.000Z | .credo.exs | mirego/sigaws | d16d6bc72859ef9664f3892dad001e628e6263e3 | [
"MIT"
] | 14 | 2017-08-06T22:11:46.000Z | 2022-03-17T18:24:49.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
{Credo.Check.Design.AliasUsage, priority: :low},
# For others you can set parameters
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 85},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity, max_arity: 8},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
# Deprecated checks (these will be deleted after a grace period)
{Credo.Check.Readability.Specs, false},
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
{Credo.Check.Warning.NameRedeclarationByCase, false},
{Credo.Check.Warning.NameRedeclarationByDef, false},
{Credo.Check.Warning.NameRedeclarationByFn, false},
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 41.913043 | 80 | 0.662172 |
9e617799c5a44c6556c81a49bd9fea3f5f0b1acf | 5,441 | ex | Elixir | lib/oli/delivery/page/page_context.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | lib/oli/delivery/page/page_context.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli/delivery/page/page_context.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Page.PageContext do
@moduledoc """
Defines the context required to render a page in delivery mode.
"""
@enforce_keys [
:review_mode,
:page,
:progress_state,
:resource_attempts,
:activities,
:objectives,
:latest_attempts
]
defstruct [
:review_mode,
:page,
:progress_state,
:resource_attempts,
:activities,
:objectives,
:latest_attempts
]
alias Oli.Delivery.Attempts.PageLifecycle
alias Oli.Delivery.Attempts.PageLifecycle.{AttemptState, HistorySummary}
alias Oli.Delivery.Page.ActivityContext
alias Oli.Delivery.Page.PageContext
alias Oli.Publishing.DeliveryResolver
alias Oli.Delivery.Attempts.Core, as: Attempts
alias Oli.Delivery.Page.ObjectivesRollup
alias Oli.Delivery.Sections.Section
@doc """
Creates the page context required to render a page for reviewing a historical
attempt.
The key task performed here is the resolution of all referenced objectives
and activities that may be present in the content of the page. This
information is collected and then assembled in a fashion that can be given
to a renderer.
"""
@spec create_for_review(String.t(), String.t(), Oli.Accounts.User) ::
%PageContext{}
def create_for_review(section_slug, attempt_guid, _) do
{progress_state, resource_attempts, latest_attempts, activities} =
case PageLifecycle.review(attempt_guid) do
{:ok,
{state,
%AttemptState{resource_attempt: resource_attempt, attempt_hierarchy: latest_attempts}}} ->
assemble_final_context(
state,
resource_attempt,
latest_attempts,
resource_attempt.revision
)
{:error, _} ->
{:error, [], %{}}
end
page_revision = hd(resource_attempts).revision
%PageContext{
review_mode: true,
page: page_revision,
progress_state: progress_state,
resource_attempts: resource_attempts,
activities: activities,
objectives:
rollup_objectives(page_revision, latest_attempts, DeliveryResolver, section_slug),
latest_attempts: latest_attempts
}
end
@doc """
Creates the page context required to render a page for visiting a current or new
attempt.
The key task performed here is the resolution of all referenced objectives
and activities that may be present in the content of the page. This
information is collected and then assembled in a fashion that can be given
to a renderer.
"""
def create_for_visit(
%Section{slug: section_slug, id: section_id},
page_slug,
user
) do
# resolve the page revision per section
page_revision = DeliveryResolver.from_revision_slug(section_slug, page_slug)
Attempts.track_access(page_revision.resource_id, section_id, user.id)
activity_provider = &Oli.Delivery.ActivityProvider.provide/3
{progress_state, resource_attempts, latest_attempts, activities} =
case PageLifecycle.visit(
page_revision,
section_slug,
user.id,
activity_provider
) do
{:ok, {:not_started, %HistorySummary{resource_attempts: resource_attempts}}} ->
{:not_started, resource_attempts, %{}, nil}
{:ok,
{state,
%AttemptState{resource_attempt: resource_attempt, attempt_hierarchy: latest_attempts}}} ->
assemble_final_context(state, resource_attempt, latest_attempts, page_revision)
{:error, _} ->
{:error, [], %{}}
end
# Fetch the revision pinned to the resource attempt if it was revised since this attempt began. This
# is what enables existing attempts that are being revisited after a change was published to the page
# to display the old content
page_revision =
if progress_state == :revised or progress_state == :in_review do
Oli.Resources.get_revision!(hd(resource_attempts).revision_id)
else
page_revision
end
%PageContext{
review_mode: false,
page: page_revision,
progress_state: progress_state,
resource_attempts: resource_attempts,
activities: activities,
objectives:
rollup_objectives(page_revision, latest_attempts, DeliveryResolver, section_slug),
latest_attempts: latest_attempts
}
end
defp assemble_final_context(state, resource_attempt, latest_attempts, %{
content: %{"advancedDelivery" => true}
}) do
{state, [resource_attempt], latest_attempts, latest_attempts}
end
defp assemble_final_context(state, resource_attempt, latest_attempts, page_revision) do
{state, [resource_attempt], latest_attempts,
ActivityContext.create_context_map(page_revision.graded, latest_attempts)}
end
# for a map of activity ids to latest attempt tuples (where the first tuple item is the activity attempt)
# return the parent objective revisions of all attached objectives
# if an attached objective is a parent, include that in the return list
defp rollup_objectives(%{content: %{"advancedDelivery" => true}}, _, _, _) do
[]
end
defp rollup_objectives(page_rev, latest_attempts, resolver, section_slug) do
activity_revisions =
Enum.map(latest_attempts, fn {_, {%{revision: revision}, _}} -> revision end)
ObjectivesRollup.rollup_objectives(page_rev, activity_revisions, resolver, section_slug)
end
end
| 33.380368 | 107 | 0.70079 |
9e61a09062c102468af2429c3428980ea6fd7fbb | 495 | ex | Elixir | spec/espec_phoenix_extend.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | 1 | 2018-01-20T00:58:33.000Z | 2018-01-20T00:58:33.000Z | spec/espec_phoenix_extend.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | null | null | null | spec/espec_phoenix_extend.ex | enixdark/audit_api | 3aa6e9169cd9e80f06d91e4104438398a012a86f | [
"MIT"
] | null | null | null | defmodule ESpec.Phoenix.Extend do
def model do
quote do
alias AuditApi.Repo
end
end
def controller do
quote do
alias AuditApi.Repo
import AuditApi.Router.Helpers
end
end
def request do
quote do
alias AuditApi.Repo
import AuditApi.Router.Helpers
end
end
def view do
quote do
import AuditApi.Router.Helpers
end
end
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 15.46875 | 50 | 0.656566 |
9e61b4b855721a71a5ef79c3b4439ade227e9741 | 6,694 | ex | Elixir | clients/url_shortener/lib/google_api/url_shortener/v1/api/url.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/url_shortener/lib/google_api/url_shortener/v1/api/url.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/url_shortener/lib/google_api/url_shortener/v1/api/url.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.UrlShortener.V1.Api.Url do
@moduledoc """
API calls for all endpoints tagged `Url`.
"""
alias GoogleApi.UrlShortener.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Expands a short URL or gets creation time and analytics.
## Parameters
- connection (GoogleApi.UrlShortener.V1.Connection): Connection to server
- short_url (String.t): The short URL, including the protocol.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :projection (String.t): Additional information to return.
## Returns
{:ok, %GoogleApi.UrlShortener.V1.Model.Url{}} on success
{:error, info} on failure
"""
@spec urlshortener_url_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.UrlShortener.V1.Model.Url.t()} | {:error, Tesla.Env.t()}
def urlshortener_url_get(connection, short_url, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:projection => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/url")
|> Request.add_param(:query, :shortUrl, short_url)
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.UrlShortener.V1.Model.Url{}])
end
@doc """
Creates a new short URL.
## Parameters
- connection (GoogleApi.UrlShortener.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (Url):
## Returns
{:ok, %GoogleApi.UrlShortener.V1.Model.Url{}} on success
{:error, info} on failure
"""
@spec urlshortener_url_insert(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.UrlShortener.V1.Model.Url.t()} | {:error, Tesla.Env.t()}
def urlshortener_url_insert(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/url")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.UrlShortener.V1.Model.Url{}])
end
@doc """
Retrieves a list of URLs shortened by a user.
## Parameters
- connection (GoogleApi.UrlShortener.V1.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :projection (String.t): Additional information to return.
- :start-token (String.t): Token for requesting successive pages of results.
## Returns
{:ok, %GoogleApi.UrlShortener.V1.Model.UrlHistory{}} on success
{:error, info} on failure
"""
@spec urlshortener_url_list(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.UrlShortener.V1.Model.UrlHistory.t()} | {:error, Tesla.Env.t()}
def urlshortener_url_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:projection => :query,
:"start-token" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/url/history")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.UrlShortener.V1.Model.UrlHistory{}])
end
end
| 39.609467 | 170 | 0.68061 |
9e61cfe45b253ad28c1ff1749e7bd72697b446d1 | 4,540 | ex | Elixir | lib/mnemonix/application.ex | christhekeele/mnemonix | 41c81b00b89562250ea451035fd34bf775173e50 | [
"MIT"
] | 36 | 2016-10-30T21:40:19.000Z | 2021-05-03T10:51:50.000Z | lib/mnemonix/application.ex | christhekeele/mnemonix | 41c81b00b89562250ea451035fd34bf775173e50 | [
"MIT"
] | 52 | 2016-10-30T20:57:35.000Z | 2017-12-28T03:39:57.000Z | lib/mnemonix/application.ex | christhekeele/mnemonix | 41c81b00b89562250ea451035fd34bf775173e50 | [
"MIT"
] | 5 | 2017-01-25T22:02:58.000Z | 2018-01-30T14:27:55.000Z | defmodule Mnemonix.Application do
@moduledoc """
Automatically starts stores when your application starts.
Mnemonix can manage your stores for you. To do so, it looks in your config files for named stores:
config :mnemonix, stores: [:foo, :bar]
For all stores so listed, it will check for store-specific configuration:
config :mnemonix, :foo, {Memonix.ETS.Store, table: :my_ets_table, name: :my_ets}
If no configuration is found for a named store, it will use the default configuration specified
in `default/0`.
The name of the store in your config will be the reference you pass to `Mnemonix` to interact with it.
This can be overriden by providing a `:name` in the options.
Given the config above, `:foo` would refer to a default Map-backed store,
and `:bar` to an ETS-backed store named `:my_ets` that uses a table named `:my_ets_table`,
both available to you at boot time without writing a line of code:
Application.ensure_started(:mnemonix)
Mnemonix.put(:foo, :a, :b)
Mnemonix.get(:foo, :a)
#=> :b
Mnemonix.put(:my_ets, :a, :b)
Mnemonix.get(:my_ets, :a)
#=> :b
"""
use Application
@doc """
Starts the `:mnemonix` application.
Finds stores in your application configuration and brings them up when your app starts.
Reads from the `:mnemonix` application's `:stores` configuration
to detect store specifications to automatically supervise.
If a store named in the configuration has its own entry under the `:mnemonix` application configuration,
that specification will be used to configure the store. If no specification is provided, Mnemonix will use
the `default` specification documented in `default/0`.
### Examples
config :mnemonix, stores: [Foo, Bar]
config :mnemonix, Bar: {Mnemonix.Stores.ETS, table: Baz}
"""
@impl Application
@spec start(Application.start_type(), [Mnemonix.spec()]) ::
{:ok, pid} | {:error, reason :: term}
def start(_type, [default]) do
default
|> tree
|> Supervisor.start_link(name: Mnemonix.Supervisor, strategy: :rest_for_one)
end
@spec tree() :: [:supervisor.child_spec()]
def tree, do: specification() |> tree
@spec tree(Mnemonix.spec()) :: [:supervisor.child_spec()]
def tree(default), do: [
# prepare_supervisor_spec(
# Mnemonix.Application.Supervisor,
# [
# prepare_supervisor_spec(
# Mnemonix.Store.Expiry.Supervisor,
# [Mnemonix.Store.Expiry.Engine],
# strategy: :simple_one_for_one,
# )
# ],
# strategy: :one_for_one,
# ),
prepare_supervisor_spec(
Mnemonix.Store.Supervisor,
managed_stores(default),
strategy: :one_for_one
),
]
defp prepare_supervisor_spec(module, children, opts) do
%{
id: module,
start: {Supervisor, :start_link, [children, Keyword.put(opts, :name, module)]},
restart: :permanent,
type: :supervisor,
}
end
@doc """
Convenience function to preview the stores that `Mnemonix.Application` will manage for you.
"""
@spec managed_stores :: [Supervisor.child_spec()]
def managed_stores, do: specification() |> managed_stores
@doc """
Convenience function to see the configuration of the stores that `Mnemonix.Application` manages for you.
Provide a store specification to compare the generated configuration against
the `default` `specification/0` that Mnemonix uses by default.
"""
@spec managed_stores(Mnemonix.spec()) :: [Supervisor.child_spec()]
def managed_stores(default) do
:mnemonix
|> Application.get_env(:stores, [])
|> Enum.map(fn name ->
:mnemonix
|> Application.get_env(name, default)
|> prepare_child_spec(name)
end)
end
defp prepare_child_spec({impl, opts}, name) do
{impl, Keyword.put_new(opts, :name, name)}
end
@doc """
Convenience function to access the default `Mnemonix` store specification defined in its `mix.exs`.
This is the specification used for stores named in `config :mnemonix, :stores`
without corresponding configuration under `config :mnemonix, <store_name>`.
"""
@spec specification :: Mnemonix.spec()
def specification do
:mnemonix
|> Application.spec()
|> Keyword.get(:mod)
|> elem(1)
|> List.first()
end
@doc """
Convenience function to access the current hex version of the `Mnemonix` application.
"""
def version do
with {:ok, version} = :application.get_key(:mnemonix, :vsn), do: version
end
end
| 31.748252 | 108 | 0.678634 |
9e61d37e7177d404f2658746a04ee932202fc061 | 1,579 | ex | Elixir | lib/edgedb/types/config_memory.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/types/config_memory.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/types/config_memory.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | defmodule EdgeDB.ConfigMemory do
@moduledoc """
An immutable value represeting an EdgeDB `cfg::memory` value as a quantity of memory storage.
```elixir
iex(1)> {:ok, pid} = EdgeDB.start_link()
iex(2)> mem = EdgeDB.query_required_single!(pid, "SELECT <cfg::memory>'5KiB'")
#EdgeDB.ConfigMemory<"5KiB">
iex(3)> EdgeDB.ConfigMemory.bytes(mem)
5120
```
"""
defstruct [
:bytes
]
@typedoc """
An immutable value represeting an EdgeDB `cfg::memory` value as a quantity of memory storage.
"""
@opaque t() :: %__MODULE__{
bytes: pos_integer()
}
@doc """
Get a quantity of memory storage in bytes.
"""
@spec bytes(t()) :: pos_integer()
def bytes(%__MODULE__{bytes: bytes}) do
bytes
end
end
defimpl Inspect, for: EdgeDB.ConfigMemory do
import Inspect.Algebra
@kib 1024
@mib 1024 * @kib
@gib 1024 * @mib
@tib 1024 * @gib
@pib 1024 * @tib
@impl Inspect
def inspect(%EdgeDB.ConfigMemory{bytes: bytes}, _opts) do
bytes_repr =
cond do
bytes >= @pib and rem(bytes, @pib) == 0 ->
"#{div(bytes, @pib)}PiB"
bytes >= @tib and rem(bytes, @tib) == 0 ->
"#{div(bytes, @tib)}TiB"
bytes >= @gib and rem(bytes, @gib) == 0 ->
"#{div(bytes, @gib)}GiB"
bytes >= @mib and rem(bytes, @mib) == 0 ->
"#{div(bytes, @mib)}MiB"
bytes >= @kib and rem(bytes, @kib) == 0 ->
"#{div(bytes, @kib)}KiB"
true ->
"#{bytes}B"
end
concat(["#EdgeDB.ConfigMemory<\"", bytes_repr, "\">"])
end
end
| 22.884058 | 95 | 0.568714 |
9e61d874804ee6cf1eb2c0cb24a15b91e5ce5324 | 1,237 | ex | Elixir | lib/grizzly/zwave/commands/thermostat_setpoint_get.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/commands/thermostat_setpoint_get.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/zwave/commands/thermostat_setpoint_get.ex | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.ZWave.Commands.ThermostatSetpointGet do
@moduledoc """
This module implements command THERMOSTAT_SETPOINT_GET of the
COMMAND_CLASS_THERMOSTAT_SETPOINT command class.
This command is used to request the target value for a given setpoint type.
Params:
* `:type` - the setback type (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.ThermostatSetpoint
@type param :: {:type | ThermostatSetpoint.type()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :thermostat_setpoint_get,
command_byte: 0x02,
command_class: ThermostatSetpoint,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
type_byte = ThermostatSetpoint.encode_type(Command.param!(command, :type))
<<0x00::size(4), type_byte::size(4)>>
end
@impl true
def decode_params(<<0x00::size(4), type_byte::size(4)>>) do
with {:ok, type} <- ThermostatSetpoint.decode_type(type_byte) do
{:ok, [type: type]}
else
{:error, %DecodeError{}} = error ->
error
end
end
end
| 24.254902 | 78 | 0.678254 |
9e61df20ee691653446749e54faac723880b91b6 | 1,208 | ex | Elixir | lib/sir_alex_web/router.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 3 | 2019-05-19T05:27:37.000Z | 2020-04-21T06:23:08.000Z | lib/sir_alex_web/router.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 3 | 2017-10-28T20:52:07.000Z | 2017-11-24T08:15:27.000Z | lib/sir_alex_web/router.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 1 | 2020-04-15T16:31:28.000Z | 2020-04-15T16:31:28.000Z | defmodule SirAlexWeb.Router do
use SirAlexWeb, :router
import SirAlexWeb.Plugs.CurrentUser
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :get_current_user
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", SirAlexWeb do
pipe_through :browser # Use the default browser stack
resources "/groups/:group_id/members", MemberController
delete "/groups/:group_id/members", MemberController, :leave
get "/groups/:group_id/requests", MemberRequestController, :index
put "/groups/:group_id/requests/:member_id/accept", MemberRequestController, :accept
put "/groups/:group_id/requests/:member_id/reject", MemberRequestController, :reject
resources "/groups", GroupController
resources "/users", UserController, only: [:new, :show]
get "/", PageController, :index
end
scope "/auth", SirAlexWeb do
pipe_through :browser
get "/login", AuthController, :login
get "/logout", AuthController, :logout
get "/:provider", AuthController, :request
get "/:provider/callback", AuthController, :callback
end
end
| 30.2 | 88 | 0.71606 |
9e61f2713ef5589cb87f2afc0482f2606682af05 | 554 | exs | Elixir | mix.exs | kianmeng/ssl_verify_fun.erl | b503f3e1a0397c495dca9930f131613190cea25b | [
"MIT"
] | 260 | 2015-01-02T12:59:27.000Z | 2022-03-13T00:43:38.000Z | mix.exs | kianmeng/ssl_verify_fun.erl | b503f3e1a0397c495dca9930f131613190cea25b | [
"MIT"
] | 107 | 2015-01-03T21:51:25.000Z | 2021-12-22T05:09:46.000Z | deps/ssl_verify_fun/mix.exs | carlosviana/blog | 1dcf58c3ca40bc3a7105d75de6f51954eb44bca8 | [
"MIT"
] | 34 | 2015-02-11T05:49:41.000Z | 2020-07-13T21:07:43.000Z | defmodule SSLVerifyFun.Mixfile do
use Mix.Project
def project do
[app: :ssl_verify_fun,
language: :erlang,
version: "1.1.6",
description: description(),
package: package()]
end
defp description() do
"""
SSL verification functions for Erlang
"""
end
defp package() do
[maintainers: ["Ilya Khaprov"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/deadtrickster/ssl_verify_fun.erl"},
files: ["src", "README.md", "LICENSE", "Makefile", "rebar.config", "mix.exs"]]
end
end
| 22.16 | 83 | 0.619134 |
9e6201e3a546700dbac9d8d105082e00971bfd6b | 2,592 | exs | Elixir | config/prod.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | 2 | 2020-02-21T15:40:27.000Z | 2020-12-06T21:50:39.000Z | config/prod.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | 3 | 2020-02-19T17:06:24.000Z | 2020-04-20T14:33:07.000Z | config/prod.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | null | null | null | use Mix.Config
config :bus_kiosk,
ecto_repos: [BusKiosk.Repo],
mcts_api_key: System.fetch_env!("MCTS_API_KEY"),
real_time_module: BusKiosk.RealTime
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :bus_kiosk, BusKioskWeb.Endpoint,
# Possibly not needed, but doesn't hurt
http: [port: {:system, "PORT"}, compress: true],
url: [host: "busmke.com", port: 80],
check_origin: ["https://busmke.com"],
force_ssl: [rewrite_on: [:x_forwarded_proto]],
secret_key_base: Map.fetch!(System.get_env(), "SECRET_KEY_BASE"),
server: true,
cache_static_manifest: "priv/static/cache_manifest.json",
live_view: [
signing_salt: System.fetch_env!("SIGNING_SALT")
]
config :bus_kiosk, BusKiosk.Repo,
url: System.get_env("DO_DATABASE_URL"),
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
ssl: true,
types: BusKiosk.PostgresTypes
# Do not print debug messages in production
config :logger,
level: :info,
backends: [:console, Sentry.LoggerBackend]
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :bus_kiosk, BusKioskWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :bus_kiosk, BusKioskWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 35.027027 | 68 | 0.710262 |
9e621c73a4c7fc1b9b950da993c5299abe77416e | 1,248 | ex | Elixir | lib/blog_post_api_web/endpoint.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | lib/blog_post_api_web/endpoint.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | lib/blog_post_api_web/endpoint.ex | dannielb/blog-post-api | 214520beb57164375bc6596e85cbc42be67c0fb9 | [
"MIT"
] | null | null | null | defmodule BlogPostApiWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :blog_post_api
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_blog_post_api_key",
signing_salt: "3qJ+3fZZ"
]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :blog_post_api,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :blog_post_api
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug BlogPostApiWeb.Router
end
| 28.363636 | 63 | 0.716346 |
9e62245a7369be1af6fd17e98e77ae3505938414 | 207 | ex | Elixir | try-elixir/level1/calculating-investment.ex | kidchenko/playground | 750f1d12a793f6851df68bbd1b9d3ec32b5f70a3 | [
"MIT"
] | 4 | 2016-11-10T02:29:32.000Z | 2017-08-24T15:19:12.000Z | try-elixir/level1/calculating-investment.ex | kidchenko/playground | 750f1d12a793f6851df68bbd1b9d3ec32b5f70a3 | [
"MIT"
] | 13 | 2019-09-16T20:01:18.000Z | 2022-02-13T11:00:49.000Z | try-elixir/level1/calculating-investment.ex | kidchenko/playground | 750f1d12a793f6851df68bbd1b9d3ec32b5f70a3 | [
"MIT"
] | 1 | 2022-02-24T06:35:25.000Z | 2022-02-24T06:35:25.000Z | defmodule Account do
def investment_return(initial, interest) do
initial + (initial * interest)
end
end
amount = Account.investment_return(1000, 0.0001)
IO.puts "Investment return: $#{amount}" | 25.875 | 48 | 0.724638 |
9e62269f77b6e1fd33a33779abe1ba8a09b03417 | 1,686 | ex | Elixir | clients/composer/lib/google_api/composer/v1beta1/model/store_environment_state_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/composer/lib/google_api/composer/v1beta1/model/store_environment_state_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/composer/lib/google_api/composer/v1beta1/model/store_environment_state_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1beta1.Model.StoreEnvironmentStateResponse do
@moduledoc """
Store environment state response.
## Attributes
* `snapshotLocation` (*type:* `String.t`, *default:* `nil`) - The fully-resolved Cloud Storage location of the created snapshot, e.g.: "gs://my-bucket/snapshots/project_id/location/environment_uuid/timestamp". This field is populated only if the snapshot creation was successful.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:snapshotLocation => String.t() | nil
}
field(:snapshotLocation)
end
defimpl Poison.Decoder, for: GoogleApi.Composer.V1beta1.Model.StoreEnvironmentStateResponse do
def decode(value, options) do
GoogleApi.Composer.V1beta1.Model.StoreEnvironmentStateResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Composer.V1beta1.Model.StoreEnvironmentStateResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.87234 | 283 | 0.759786 |
9e622a13c527ff5a3cc12bb4903e60579743f13c | 604 | ex | Elixir | lib/ex_pwned/pastes.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | 18 | 2017-03-04T11:35:34.000Z | 2020-04-29T22:24:26.000Z | lib/ex_pwned/pastes.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | 11 | 2018-05-16T04:36:57.000Z | 2020-04-07T19:47:40.000Z | lib/ex_pwned/pastes.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | 8 | 2018-05-16T18:15:51.000Z | 2020-06-23T16:58:52.000Z | defmodule ExPwned.Pastes do
@moduledoc """
Module to interact with hibp API to retrive breaches data
"""
use ExPwned.Api
@doc """
The API takes a single parameter which is the email address to be searched for.
Unlike searching for breaches, usernames that are not email addresses cannot be searched for.
The email is not case sensitive and will be trimmed of leading or trailing white spaces.
The email should always be URL encoded.
## Examples
iex> ExPwned.Pastes.pasteaccount("[email protected]")
"""
def pasteaccount(account), do: do_get("pasteaccount/#{account}")
end
| 30.2 | 95 | 0.735099 |
9e6236e0b4146185e76c1f5933b7e65fed12f843 | 1,407 | exs | Elixir | test/unit/xsd/datatypes/any_uri_test.exs | marcelotto/rdf-ex | 12adce69eb2dbff027cbc83aaaf912067aea1b02 | [
"MIT"
] | 53 | 2017-06-25T22:20:44.000Z | 2020-04-27T17:27:51.000Z | test/unit/xsd/datatypes/any_uri_test.exs | marcelotto/rdf-ex | 12adce69eb2dbff027cbc83aaaf912067aea1b02 | [
"MIT"
] | 7 | 2017-06-25T00:29:11.000Z | 2020-03-11T00:23:47.000Z | test/unit/xsd/datatypes/any_uri_test.exs | rdf-elixir/rdf-ex | 7d4280ec9a912ef6ee9fc96ecdfdf26647016d6a | [
"MIT"
] | 3 | 2020-07-03T13:25:36.000Z | 2021-04-04T12:33:51.000Z | defmodule RDF.XSD.AnyURITest do
use RDF.XSD.Datatype.Test.Case,
datatype: RDF.XSD.AnyURI,
name: "anyURI",
primitive: true,
applicable_facets: [
RDF.XSD.Facets.MinLength,
RDF.XSD.Facets.MaxLength,
RDF.XSD.Facets.Length,
RDF.XSD.Facets.Pattern
],
facets: %{
max_length: nil,
min_length: nil,
length: nil,
pattern: nil
},
valid: %{
# input => { value, lexical, canonicalized }
"http://example.com/foo" =>
{URI.parse("http://example.com/foo"), nil, "http://example.com/foo"},
URI.parse("http://example.com/foo") =>
{URI.parse("http://example.com/foo"), nil, "http://example.com/foo"},
RDF.iri("http://example.com/foo") =>
{URI.parse("http://example.com/foo"), nil, "http://example.com/foo"},
RDF.List =>
{URI.parse("http://www.w3.org/1999/02/22-rdf-syntax-ns#List"), nil,
"http://www.w3.org/1999/02/22-rdf-syntax-ns#List"}
},
invalid: [42, 3.14, Foo, :foo, true, false]
describe "cast/1" do
test "casting an anyURI returns the input as it is" do
assert XSD.anyURI("http://example.com/") |> XSD.AnyURI.cast() ==
XSD.anyURI("http://example.com/")
end
test "casting an RDF.IRI" do
assert RDF.iri("http://example.com/") |> XSD.AnyURI.cast() ==
XSD.anyURI("http://example.com/")
end
end
end
| 31.977273 | 77 | 0.574982 |
9e6238c8f8a096ead330edc17eedd16ec7f8120f | 16,101 | ex | Elixir | tools/astarte_e2e/lib/astarte_e2e/client.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | tools/astarte_e2e/lib/astarte_e2e/client.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | tools/astarte_e2e/lib/astarte_e2e/client.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Cospyright 2020 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule AstarteE2E.Client do
alias Phoenix.Channels.GenSocketClient
alias Phoenix.Channels.GenSocketClient.Transport.WebSocketClient
alias AstarteE2E.{Utils, Config, ServiceNotifier}
require Logger
@connection_backoff_ms 10_000
@connection_attempts 10
# API
@doc "Starts the client process."
@spec start_link(Config.client_options()) :: GenServer.on_start()
def start_link(opts) do
url = Keyword.fetch!(opts, :url)
jwt = Keyword.fetch!(opts, :jwt)
realm = Keyword.fetch!(opts, :realm)
device_id = Keyword.fetch!(opts, :device_id)
check_repetitions = Keyword.fetch!(opts, :check_repetitions)
verify_option =
if Keyword.get(opts, :ignore_ssl_errors, false) do
:verify_none
else
:verify_peer
end
remote_device = [
url: url,
realm: realm,
jwt: jwt,
device_id: device_id,
check_repetitions: check_repetitions
]
with {:ok, pid} <-
GenSocketClient.start_link(
__MODULE__,
WebSocketClient,
remote_device,
[transport_opts: [ssl_verify: verify_option]],
name: via_tuple(realm, device_id)
) do
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 0}
)
Logger.info("Started process with pid #{inspect(pid)}.", tag: "client_started")
{:ok, pid}
end
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :transient,
shutdown: 500
}
end
@spec verify_device_payload(String.t(), String.t(), String.t(), String.t(), any(), integer()) ::
:ok
| {:error, :not_connected | :timeout}
def verify_device_payload(realm, device_id, interface_name, path, value, timestamp) do
via_tuple(realm, device_id)
|> GenSocketClient.call(
{:verify_payload, interface_name, path, value, timestamp},
:infinity
)
end
def wait_for_connection(realm, device_id) do
via_tuple(realm, device_id)
|> GenSocketClient.call(:wait_for_connection, :infinity)
end
defp join_topic(transport, state) do
topic =
state
|> Map.fetch!(:callback_state)
|> Map.fetch!(:topic)
Logger.info("Asking to join topic #{inspect(topic)}.", tag: "join_request")
case GenSocketClient.join(transport, topic) do
{:error, reason} ->
Logger.error("Cannot join topic #{inspect(topic)}. Reason: #{inspect(reason)}",
tag: "join_failed"
)
{:error, :join_failed}
{:ok, _ref} ->
Logger.info("Joined topic #{inspect(topic)}.", tag: "join_success")
{:ok, state}
end
end
defp setup_watches(transport, state) do
callback_state =
state
|> Map.fetch!(:callback_state)
device_id = Map.fetch!(callback_state, :device_id)
device_triggers = [
%{
name: "connectiontrigger-#{device_id}",
device_id: device_id,
simple_trigger: %{
type: "device_trigger",
on: "device_connected",
device_id: device_id
}
},
%{
name: "disconnectiontrigger-#{device_id}",
device_id: device_id,
simple_trigger: %{
type: "device_trigger",
on: "device_disconnected",
device_id: device_id
}
}
]
data_triggers = [
%{
name: "valuetrigger-#{device_id}",
device_id: device_id,
simple_trigger: %{
type: "data_trigger",
on: "incoming_data",
interface_name: "*",
interface_major: 1,
match_path: "/*",
value_match_operator: "*"
}
}
]
with :ok <- install_triggers(device_triggers, transport, state),
:ok <- install_triggers(data_triggers, transport, state) do
Logger.info("Triggers installed.", tag: "triggers_installed")
:ok
else
{:error, reason} ->
Logger.warn("Failed to install triggers with reason: #{inspect(reason)}.",
tag: "install_triggers_failed"
)
{:error, reason}
end
end
defp install_triggers(triggers, transport, %{callback_state: %{topic: topic}} = _state) do
Enum.reduce_while(triggers, :ok, fn trigger, _acc ->
case GenSocketClient.push(transport, topic, "watch", trigger) do
{:error, reason} ->
Logger.warn("Watch failed with reason: #{inspect(reason)}.",
tag: "watch_failed"
)
{:halt, {:error, reason}}
{:ok, _ref} ->
Logger.info("Successful watch request.",
tag: "watch_success"
)
{:cont, :ok}
end
end)
end
defp make_topic(realm, device_id) do
room_name = Utils.random_string()
"rooms:#{realm}:#{device_id}_#{room_name}"
end
defp via_tuple(realm, device_id) do
{:via, Registry, {Registry.AstarteE2E, {:client, realm, device_id}}}
end
# Callbacks
def init(opts) do
url = Keyword.fetch!(opts, :url)
realm = Keyword.fetch!(opts, :realm)
jwt = Keyword.fetch!(opts, :jwt)
device_id = Keyword.fetch!(opts, :device_id)
check_repetitions = Keyword.fetch!(opts, :check_repetitions)
topic = make_topic(realm, device_id)
callback_state = %{
device_id: device_id,
topic: topic
}
query_params = [realm: realm, token: jwt]
state = %{
callback_state: callback_state,
pending_requests: %{},
pending_messages: %{},
connection_attempts: @connection_attempts,
check_repetitions: check_repetitions,
waiting_for_connection: %{},
timeouts_to_crash: Config.client_max_timeouts!(),
connected: false
}
{:connect, url, query_params, state}
end
def handle_connected(transport, state) do
Logger.info("Connected.", tag: "client_connected")
waiting_for_connection = state.waiting_for_connection
new_waiting =
if Map.has_key?(waiting_for_connection, self()) do
{tref, new_waiting} = Map.pop!(waiting_for_connection, self())
:ok = Process.cancel_timer(tref, async: false, info: false)
new_waiting
else
waiting_for_connection
end
new_state = %{
state
| waiting_for_connection: new_waiting,
connection_attempts: @connection_attempts,
connected: true
}
{:ok, updated_state} = join_topic(transport, new_state)
{:ok, updated_state}
end
def handle_disconnected(reason, state) do
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 0}
)
ServiceNotifier.notify_service_down("Client disconnected")
Logger.info("Disconnected with reason: #{inspect(reason)}.",
tag: "client_disconnected"
)
Process.send_after(self(), :try_connect, @connection_backoff_ms)
{:ok, %{state | connected: false}}
end
def handle_joined(topic, _payload, transport, state) do
Logger.info("Joined topic #{inspect(topic)}.", tag: "topic_joined")
case setup_watches(transport, state) do
:ok -> {:ok, state}
{:error, reason} -> {:stop, reason, state}
end
end
def handle_channel_closed(topic, _payload, _transport, state) do
Logger.warn("Channel closed for #{inspect(topic)}.",
tag: "channel_closed"
)
{:ok, state}
end
def handle_message(
_topic,
_event,
%{
"event" => %{
"interface" => interface_name,
"path" => path,
"type" => "incoming_data",
"value" => value
}
} = _payload,
_transport,
state
) do
Logger.debug("Handling incoming data message.", tag: "handle_incoming_message")
:telemetry.execute([:astarte_end_to_end, :messages, :received], %{})
reception_timestamp = :erlang.monotonic_time(:millisecond)
%{
pending_messages: pending_messages,
pending_requests: pending_requests
} = state
if Map.has_key?(pending_requests, {interface_name, path, value}) do
{{timestamp, from, tref}, new_pending_requests} =
Map.pop(pending_requests, {interface_name, path, value})
:ok = Process.cancel_timer(tref, async: false, info: false)
Logger.debug("Timeout timer canceled successfully in handle_message.",
tag: "cancel_timer_success"
)
dt_ms = reception_timestamp - timestamp
new_state = Map.put(state, :pending_requests, new_pending_requests)
Logger.debug("Message verified. Round trip time = #{inspect(dt_ms)} ms.")
:telemetry.execute(
[:astarte_end_to_end, :messages, :round_trip_time],
%{duration_seconds: dt_ms / 1_000}
)
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 1}
)
ServiceNotifier.notify_service_up()
GenSocketClient.reply(from, :ok)
{:ok, new_state}
else
timeout_ms =
Config.client_timeout_s!()
|> Utils.to_ms()
key = {interface_name, path, value}
tref = Process.send_after(self(), {:message_timeout, key}, timeout_ms)
new_pending_messages = Map.put(pending_messages, key, {reception_timestamp, tref})
new_state = Map.put(state, :pending_messages, new_pending_messages)
{:ok, new_state}
end
end
def handle_message(_topic, event, payload, _transport, state) do
Logger.debug("Ignoring msg. Event: #{inspect(event)}, payload: #{inspect(payload)}.")
{:ok, state}
end
def handle_reply(_topic, event, payload, _transport, state) do
Logger.debug("Handling reply. Event: #{inspect(event)}, payload: #{inspect(payload)}.")
{:ok, state}
end
def handle_join_error(topic, _payload, _transport, state) do
Logger.error(
"Join topic #{inspect(topic)} failed. Please, check the realm and the claims you used to generate the token.",
tag: "join_error"
)
System.stop(1)
{:stop, :join_error, state}
end
def handle_info(
{:message_timeout, key},
_transport,
%{pending_messages: pending_messages} = state
) do
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 0}
)
ServiceNotifier.notify_service_down("Message timeout")
Logger.warn("Incoming message timeout. Key = #{inspect(key)}",
tag: "message_timeout"
)
{{_ts, _tref}, new_pending_messages} = Map.pop(pending_messages, key)
{:ok, %{state | pending_messages: new_pending_messages}}
end
def handle_info(
{:request_timeout, key},
_transport,
%{pending_requests: pending_requests, timeouts_to_crash: 0} = state
) do
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 0}
)
ServiceNotifier.notify_service_down("Maximum number of request timeout reached")
Logger.warn(
"Maximum number of requests timeout reached. The websocket client is going to crash.",
tag: "maximum_timeout_number_reached"
)
{{_ts, from, _tref}, _new_pending_requests} = Map.pop(pending_requests, key)
:ok = GenSocketClient.reply(from, {:error, :timeout})
{:stop, :maximum_timeout_number_reached, state}
end
def handle_info(
{:request_timeout, key},
_transport,
%{pending_requests: pending_requests} = state
) do
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 0}
)
ServiceNotifier.notify_service_down("Request timeout")
Logger.warn("Request timed out. Key = #{inspect(key)}", tag: "request_timeout")
{{_ts, from, _tref}, new_pending_requests} = Map.pop(pending_requests, key)
remaining_timeouts_to_crash = state.timeouts_to_crash - 1
:ok = GenSocketClient.reply(from, {:error, :timeout})
{:ok,
%{
state
| pending_requests: new_pending_requests,
timeouts_to_crash: remaining_timeouts_to_crash
}}
end
def handle_info(:try_connect, _transport, %{check_repetitions: :infinity} = state) do
{:connect, state}
end
def handle_info(:try_connect, _transport, state) do
if state.connection_attempts > 0 do
updated_attempts = state.connection_attempts - 1
updated_state = %{state | connection_attempts: updated_attempts}
{:connect, updated_state}
else
Logger.warn(
"Cannot establish a connection after #{inspect(@connection_attempts)} attempts. Closing application.",
tag: "connection_failed"
)
System.stop(1)
{:stop, :connection_failed, state}
end
end
def handle_info({:wait_for_connection_expired, from}, _transport, state) do
{_tref, new_waiting} = Map.pop!(state.waiting_for_connection, from)
:ok = GenSocketClient.reply(from, {:error, :not_connected})
{:ok, %{state | waiting_for_connection: new_waiting}}
end
def handle_call(:wait_for_connection, _from, _transport, %{connected: true} = state) do
{:reply, :ok, state}
end
def handle_call(:wait_for_connection, from, _transport, %{connected: false} = state) do
tref =
Process.send_after(self(), {:wait_for_connection_expired, from}, @connection_backoff_ms)
waiting_for_connection =
state.waiting_for_connection
|> Map.put(from, tref)
updated_state = %{state | waiting_for_connection: waiting_for_connection}
{:noreply, updated_state, @connection_backoff_ms}
end
def handle_call(
{:verify_payload, _interface_name, _path, _value, _timestamp},
_from,
_transport,
%{connected: false} = state
) do
:telemetry.execute([:astarte_end_to_end, :messages, :failed], %{})
Logger.warn("Cannot verify the payload: the client is not connected.",
tag: "verify_not_possible"
)
{:reply, {:error, :not_connected}, state}
end
def handle_call(
{:verify_payload, interface_name, path, value, timestamp},
from,
_transport,
state
) do
%{
pending_messages: pending_messages,
pending_requests: pending_requests
} = state
if Map.has_key?(pending_messages, {interface_name, path, value}) do
{{reception_timestamp, tref}, new_pending_messages} =
Map.pop(pending_messages, {interface_name, path, value})
:ok = Process.cancel_timer(tref, async: false, info: false)
Logger.debug("Timeout timer canceled successfully in handle_call.",
tag: "cancel_timer_success"
)
dt_ms = reception_timestamp - timestamp
new_state = %{
state
| pending_messages: new_pending_messages,
timeouts_to_crash: Config.client_max_timeouts!()
}
:telemetry.execute(
[:astarte_end_to_end, :messages, :round_trip_time],
%{duration_seconds: dt_ms / 1_000}
)
:telemetry.execute(
[:astarte_end_to_end, :astarte_platform],
%{status: 1}
)
ServiceNotifier.notify_service_up()
Logger.debug("Round trip time = #{inspect(dt_ms)} ms.")
{:reply, :ok, new_state}
else
timeout_ms =
Config.client_timeout_s!()
|> Utils.to_ms()
key = {interface_name, path, value}
tref = Process.send_after(self(), {:request_timeout, key}, timeout_ms)
new_pending_requests = Map.put(pending_requests, key, {timestamp, from, tref})
new_state = Map.put(state, :pending_requests, new_pending_requests)
{:noreply, new_state}
end
end
end
| 27.760345 | 116 | 0.638842 |
9e625c4cf99a7c4756747d8ffba1b50550cd58a2 | 7,685 | exs | Elixir | test/phoenix_live_view/integrations/update_test.exs | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | test/phoenix_live_view/integrations/update_test.exs | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | test/phoenix_live_view/integrations/update_test.exs | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | defmodule LiveElement.UpdateTest do
use ExUnit.Case, async: true
import Phoenix.ConnTest
import LiveElementTest
alias LiveElementTest.{Endpoint, DOM}
@endpoint Endpoint
@moduletag :capture_log
setup config do
{:ok,
conn: Plug.Test.init_test_session(Phoenix.ConnTest.build_conn(), config[:session] || %{})}
end
describe "phx-update=append" do
@tag session: %{time_zones: {:append, [%{id: "ny", name: "NY"}]}}
test "static mount followed by connected mount", %{conn: conn} do
conn = get(conn, "/time-zones")
html = html_response(conn, 200)
assert [{"div", _, ["time: 12:00 NY" | _]}] = find_time_zones(html, ["ny", "tokyo"])
{:ok, view, _html} = live(conn)
html = render(view)
assert [{"div", _, ["time: 12:00 NY" | _]}] = find_time_zones(html, ["ny", "tokyo"])
html = render_click(view, "add-tz", %{id: "tokyo", name: "Tokyo"})
assert [
{"div", _, ["time: 12:00 NY" | _]},
{"div", _, ["time: 12:00 Tokyo" | _]}
] = find_time_zones(html, ["ny", "tokyo"])
_html = render_click(view, "add-tz", %{id: "la", name: "LA"})
html = render_click(view, "add-tz", %{id: "sf", name: "SF"})
assert [
{"div", _, ["time: 12:00 NY" | _]},
{"div", _, ["time: 12:00 Tokyo" | _]},
{"div", _, ["time: 12:00 LA" | _]},
{"div", _, ["time: 12:00 SF" | _]}
] = find_time_zones(html, ["ny", "tokyo", "la", "sf"])
end
@tag session: %{time_zones: {:append, [%{id: "ny", name: "NY"}, %{id: "sf", name: "SF"}]}}
test "updates to existing ids patch in place", %{conn: conn} do
{:ok, view, _html} = live(conn, "/time-zones")
assert [
{"h1", [{"id", "title-ny"}], ["NY"]},
{"h1", [{"id", "title-sf"}], ["SF"]}
] = find_time_titles(render(view), ["ny", "sf"])
html = render_click(view, "add-tz", %{id: "sf", name: "SanFran"})
assert [
{"h1", [{"id", "title-ny"}], ["NY"]},
{"h1", [{"id", "title-sf"}], ["SanFran"]}
] = find_time_titles(html, ["ny", "sf"])
end
@tag session: %{time_zones: {:append, [%{id: "nested-append", name: "NestedAppend"}]}}
test "with nested append child", %{conn: conn} do
conn = get(conn, "/time-zones")
html = html_response(conn, 200)
assert [
{"div", _,
[
"time: 12:00 NestedAppend\n",
{"div", [{"id", "append-NestedAppend"}, {"phx-update", "append"}], []}
]}
] = find_time_zones(html, ["nested-append", "tokyo"])
{:ok, view, _html} = live(conn)
assert nested_view = find_live_child(view, "tz-nested-append")
GenServer.call(nested_view.pid, {:append, ["item1"]})
GenServer.call(nested_view.pid, {:append, ["item2"]})
html = render(view)
assert [
{"div", _,
[
"time: 12:00 NestedAppend\n",
{"div", [{"id", "append-NestedAppend"}, {"phx-update", "append"}],
[
{:comment, " example "},
{"div", [{"id", "item-item1"}], ["item1"]},
{:comment, " example "},
{"div", [{"id", "item-item2"}], ["item2"]}
]}
]}
] = find_time_zones(html, ["nested-append", "tokyo"])
html = render_click(view, "add-tz", %{id: "tokyo", name: "Tokyo"})
assert [
{"div", _, ["time: 12:00 NestedAppend\n", _]},
{"div", _, ["time: 12:00 Tokyo" | _]}
] = find_time_zones(html, ["nested-append", "tokyo"])
end
@tag session: %{time_zones: {:append, [%{id: "ny", name: "NY"}]}}
test "raises without id on the parent", %{conn: conn} do
Process.flag(:trap_exit, true)
{:ok, view, _html} = live(conn, "/time-zones")
assert Exception.format(:exit, catch_exit(render_click(view, "remove-id", %{}))) =~
"setting phx-update to \"append\" requires setting an ID on the container"
end
@tag session: %{time_zones: {:append, [%{id: "ny", name: "NY"}]}}
test "raises without id on the child", %{conn: conn} do
Process.flag(:trap_exit, true)
{:ok, view, _html} = live(conn, "/time-zones")
assert Exception.format(
:exit,
catch_exit(render_click(view, "add-tz", %{id: nil, name: "Tokyo"}))
) =~
"setting phx-update to \"append\" requires setting an ID on each child"
end
end
describe "phx-update=prepend" do
@tag session: %{time_zones: {:prepend, [%{id: "ny", name: "NY"}]}}
test "static mount followed by connected mount", %{conn: conn} do
conn = get(conn, "/time-zones")
html = html_response(conn, 200)
assert [{"div", _, ["time: 12:00 NY" | _]}] = find_time_zones(html, ["ny", "tokyo"])
{:ok, view, _html} = live(conn)
html = render(view)
assert [{"div", _, ["time: 12:00 NY" | _]}] = find_time_zones(html, ["ny", "tokyo"])
html = render_click(view, "add-tz", %{id: "tokyo", name: "Tokyo"})
assert [
{"div", _, ["time: 12:00 Tokyo" | _]},
{"div", _, ["time: 12:00 NY" | _]}
] = find_time_zones(html, ["ny", "tokyo"])
_html = render_click(view, "add-tz", %{id: "la", name: "LA"})
html = render_click(view, "add-tz", %{id: "sf", name: "SF"})
assert [
{"div", _, ["time: 12:00 SF" | _]},
{"div", _, ["time: 12:00 LA" | _]},
{"div", _, ["time: 12:00 Tokyo" | _]},
{"div", _, ["time: 12:00 NY" | _]}
] = find_time_zones(html, ["ny", "tokyo", "la", "sf"])
end
@tag session: %{time_zones: {:prepend, [%{id: "ny", name: "NY"}, %{id: "sf", name: "SF"}]}}
test "updates to existing ids patch in place", %{conn: conn} do
{:ok, view, _html} = live(conn, "/time-zones")
assert [
{"h1", [{"id", "title-ny"}], ["NY"]},
{"h1", [{"id", "title-sf"}], ["SF"]}
] = find_time_titles(render(view), ["ny", "sf"])
html = render_click(view, "add-tz", %{id: "sf", name: "SanFran"})
assert [
{"h1", [{"id", "title-ny"}], ["NY"]},
{"h1", [{"id", "title-sf"}], ["SanFran"]}
] = find_time_titles(html, ["ny", "sf"])
end
end
describe "regular updates" do
@tag session: %{
time_zones: [%{"id" => "ny", "name" => "NY"}, %{"id" => "sf", "name" => "SF"}]
}
test "existing ids are replaced when patched without respawning children", %{conn: conn} do
{:ok, view, html} = live(conn, "/shuffle")
assert [
{"div", _, ["time: 12:00 NY" | _]},
{"div", _, ["time: 12:00 SF" | _]}
] = find_time_zones(html, ["ny", "sf"])
children_pids_before = for child <- live_children(view), do: child.pid
html = render_click(view, :reverse)
children_pids_after = for child <- live_children(view), do: child.pid
assert [
{"div", _, ["time: 12:00 SF" | _]},
{"div", _, ["time: 12:00 NY" | _]}
] = find_time_zones(html, ["ny", "sf"])
assert children_pids_after == children_pids_before
end
end
defp find_time_zones(html, zones) do
html |> DOM.parse() |> DOM.all(Enum.join(for(tz <- zones, do: "#tz-#{tz}"), ","))
end
defp find_time_titles(html, zones) do
html |> DOM.parse() |> DOM.all(Enum.join(for(tz <- zones, do: "#title-#{tz}"), ","))
end
end
| 37.125604 | 95 | 0.490956 |
9e6263d2b92be0e66b93580789e625718638a471 | 5,343 | ex | Elixir | lib/niacademy_web/live/activity_live/show.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | lib/niacademy_web/live/activity_live/show.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | lib/niacademy_web/live/activity_live/show.ex | Ruin0x11/niacademy | f0b07aefa7b2bf5a8f643d851523ee43c6fd1c0f | [
"MIT"
] | null | null | null | defmodule NiacademyWeb.ActivityLive.Show do
use Phoenix.LiveView, layout: {NiacademyWeb.LayoutView, "live.html"}
alias Niacademy.Session
alias NiacademyWeb.ActivityView
alias NiacademyWeb.Router.Helpers, as: Routes
require Logger
@impl true
def mount(_params, _session, socket) do
Niacademy.LiveMonitor.monitor(self(), __MODULE__, %{id: socket.id})
{:ok,
assign(socket,
mode: :paused,
session: %Session{},
remaining: 0,
total: 0,
activity_count: 0,
percent_elapsed: 0,
display_minutes: 0,
display_seconds: 0,
unbounded: false,
loaded: false
)}
end
# called by LiveMonitor
def unmount(reason, %{id: id}) do
Logger.debug("View #{id} unmounted: #{inspect(reason)}")
with {:shutdown, _} <- reason do
Logger.warning("Halting tracking for session.")
Niacademy.Tracking.stop_tracking_active()
end
:ok
end
@impl true
def handle_params(%{"session_id" => session_id}, _val, socket) do
session = Session.get!(session_id)
activities = Jason.decode!(session.activities)
session = %{session | activities: activities}
activity = session.activities |> Enum.at(session.position)
{:noreply, assign(socket,
mode: :paused,
session: session,
activity: activity,
activity_count: Enum.count(activities),
remaining: 0,
total: 0,
percent_elapsed: 0,
display_minutes: 0,
display_seconds: 0,
unbounded: activity["unboundedDuration"],
loaded: false)
}
end
def set_position(%{assigns: %{session: session, activity_count: activity_count}} = socket, delta) do
with session <- Session.get!(session.id) do
if session.position + delta < 0 do
raise "Can't go backward here."
end
finished = session.position + delta >= activity_count
if finished && session.project_type != :none && !session.finished do
{:ok, _} = Niacademy.Db.increment_preset_position(session.project_type)
Niacademy.Jobs.TrackerTimeout.cancel()
Niacademy.Tracking.stop_tracking_active()
end
case Session.update(session, %{position: session.position + delta, finished: finished}) do
{:ok, session} ->
{:noreply, socket |> push_redirect(to: Routes.activity_live_path(socket, :show, session.id))}
{:error, %Ecto.Changeset{} = changeset} ->
raise changeset
end
end
end
@impl true
def handle_event("start", _, socket) do
{:noreply,
socket
|> activate()}
end
@impl true
def handle_event("content_loaded", _, socket) do
Logger.debug("Loaded content!")
handle_event("start", %{}, socket |> assign(loaded: true))
end
@impl true
def handle_event("next", _, socket) do
set_position(socket, 1)
end
@impl true
def handle_event("prev", _, socket) do
set_position(socket, -1)
end
@impl true
def handle_info(:tick, socket) do
update_socket = update_timer(socket)
if update_socket.assigns.mode == :finished do
handle_event("next", %{}, update_socket)
else
{:noreply, update_socket}
end
end
defp update_timer(%{assigns: %{remaining: 0, mode: :active, timer: timer, unbounded: unbounded}} = socket) do
if unbounded do
decrement(socket)
else
{:ok, _} = :timer.cancel(timer)
assign(socket, mode: :finished, timer: nil)
end
end
defp update_timer(socket) do
decrement(socket)
end
defp decrement(socket) do
if socket.assigns.loaded do
%{assigns: %{remaining: remaining}} = socket
set_timer(socket, remaining - 1)
else
socket
end
end
defp set_timer(socket, seconds) do
%{assigns: %{total: total}} = socket
display_minutes = div(seconds, 60)
display_seconds = rem(seconds, 60)
percent = ((total - seconds) / total) * 100.0
assign(socket,
remaining: seconds,
percent_elapsed: percent,
display_minutes: display_minutes,
display_seconds: display_seconds)
end
@impl true
def render(assigns) do
if assigns.activity do
ActivityView.render("show.html", assigns)
else
ActivityView.render("show_finished.html", assigns)
end
end
defp activate(%{assigns: %{session: session, activity: activity, activity_count: activity_count, mode: :paused}} = socket) do
{:ok, timer} = :timer.send_interval(1000, :tick)
total_seconds = (activity["durationMinutes"]) * 60
activity = activity["activity"]
description = "Activity: #{activity["humanName"]} (#{session.position+1}/#{activity_count})"
tags = [
"niacademy",
"activity:#{activity["id"]}",
"preset:#{session.preset_id || "<none>"}",
"regimen:#{activity["regimenId"]}"
]
project_type =
case session.project_type do
:none -> activity["projectType"] |> String.downcase |> String.to_atom
type -> type
end
project_type
|> Niacademy.Tracking.project_type_to_project
|> Niacademy.Tracking.start_tracking(description, tags)
|> IO.inspect
Niacademy.Jobs.TrackerTimeout.persist(total_seconds * 1.5)
socket |> assign(mode: :active, total: total_seconds, timer: timer) |> set_timer(total_seconds)
end
defp activate(socket) do
socket
end
end
| 27.4 | 127 | 0.645705 |
9e628ab81334b3af9df75f1b8bf84455be3243d9 | 3,140 | ex | Elixir | lib/koans/03_numbers.ex | samstarling/elixir-koans | 9f9546e2b795cfedbafa8cec5661972ca1fc4b8d | [
"MIT"
] | null | null | null | lib/koans/03_numbers.ex | samstarling/elixir-koans | 9f9546e2b795cfedbafa8cec5661972ca1fc4b8d | [
"MIT"
] | null | null | null | lib/koans/03_numbers.ex | samstarling/elixir-koans | 9f9546e2b795cfedbafa8cec5661972ca1fc4b8d | [
"MIT"
] | null | null | null | defmodule Numbers do
require Integer
use Koans
@intro "Why is the number six so scared? Because seven eight nine!\nWe should get to know numbers a bit more!"
koan "Is an integer equal to its float equivalent?" do
assert 1 == 1.0 == true
end
koan "Is an integer threequal to its float equivalent?" do
assert 1 === 1.0 == false
end
koan "Revisit division with threequal" do
assert 2 / 2 === 1.0
end
koan "Another way to divide" do
assert div(5, 2) == 2
end
koan "What remains or: The Case of the Missing Modulo Operator (%)" do
assert rem(5, 2) == 1
end
koan "Other math operators may produce this" do
assert 2 * 2 === 4
end
koan "Or other math operators may produce this" do
assert 2 * 2.0 === 4.0
end
koan "Two ways to round, are they exactly the same?" do
assert Float.round(1.2) === round(1.2) == false
end
koan "Release the decimals into the void" do
assert trunc(5.6) === 5
end
koan "Are you odd?" do
assert Integer.is_odd(3) == true
end
koan "Actually you might be even" do
assert Integer.is_even(4) == true
end
koan "Let's grab the individual digits in a list" do
individual_digits = Integer.digits(58127)
assert individual_digits == [5, 8, 1, 2, 7]
end
koan "Oh no! I need it back together" do
number = Integer.undigits([1, 2, 3, 4])
assert number == 1234
end
koan "Actually I want my number as a string" do
string_digit = Integer.to_string(1234)
assert string_digit == "1234"
end
koan "The meaning of life in hexadecimal is 2A!" do
assert Integer.parse("2A", 16) == {42, ""}
end
koan "The remaining unparsable part is also returned" do
assert Integer.parse("5 years") == {5, " years"}
end
koan "What if you parse a floating point value as an integer?" do
assert Integer.parse("1.2") == {1, ".2"}
end
koan "Just want to parse to a float" do
assert Float.parse("34.5") == {34.5, ""}
end
koan "Hmm, I want to parse this but it has some strings" do
assert Float.parse("1.5 million dollars") == {1.5, " million dollars"}
end
koan "I don't want this decimal point, let's round up" do
assert Float.ceil(34.25) === 35.0
end
koan "OK, I only want it to 1 decimal place" do
assert Float.ceil(34.25, 1) === 34.3
end
koan "Rounding down is what I need" do
assert Float.floor(99.99) === 99.0
end
koan "Rounding down to 2 decimal places" do
assert Float.floor(12.345, 2) === 12.34
end
koan "Round the number up or down for me" do
assert Float.round(5.5) == 6
assert Float.round(5.4) == 5
assert Float.round(8.94, 1) == 8.9
assert Float.round(-5.5674, 3) == -5.567
end
koan "I want the first and last in the range" do
first..last = Range.new(1, 10)
assert first == 1
assert last == 10
end
koan "Does my number exist in the range?" do
range = Range.new(1, 10)
assert 4 in range == true
assert 10 in range == true
assert 0 in range == false
end
koan "Is this a range?" do
assert Range.range?(1..10) == true
assert Range.range?(0) == false
end
end
| 23.969466 | 112 | 0.635032 |
9e629e57cdcdb79413636677ad9fde436f6987d3 | 343 | ex | Elixir | lib/supabase_surface/components/icons/icon_volume_1.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 5 | 2021-06-08T08:02:43.000Z | 2022-02-09T23:13:46.000Z | lib/supabase_surface/components/icons/icon_volume_1.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | null | null | null | lib/supabase_surface/components/icons/icon_volume_1.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 1 | 2021-07-14T05:20:31.000Z | 2021-07-14T05:20:31.000Z | defmodule SupabaseSurface.Components.Icons.IconVolume1 do
use SupabaseSurface.Components.Icon
@impl true
def render(assigns) do
icon_size = IconContainer.get_size(assigns.size)
~F"""
<IconContainer assigns={assigns}>
{Feathericons.volume_1(width: icon_size, height: icon_size)}
</IconContainer>
"""
end
end
| 22.866667 | 66 | 0.720117 |
9e62a620b3590b8b28900b379f59bc23490e296f | 284 | ex | Elixir | lib/ucl.ex | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | lib/ucl.ex | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | lib/ucl.ex | skunkwerks/ucl-elixir | 059183f107db4997958e8cd57ae5a2e1557835a7 | [
"MIT"
] | null | null | null | defmodule UCL do
alias UCL.AST
alias UCL.Merge
alias UCL.Parser
@doc """
Parse an UCL string into a single map
"""
def parse(string) do
case Parser.parse(string) do
{:ok, ast} ->
ast
|> AST.walk()
|> Merge.collapse()
end
end
end
| 15.777778 | 39 | 0.566901 |
9e62a825198f36788c8b604e179642c61fea8fa6 | 634 | exs | Elixir | test/board_test.exs | ckampfe/chess | a4aec40ecbc5c6d109363e6b982cd9eaab338fd4 | [
"BSD-3-Clause"
] | 1 | 2021-10-06T12:59:52.000Z | 2021-10-06T12:59:52.000Z | test/board_test.exs | ckampfe/chess | a4aec40ecbc5c6d109363e6b982cd9eaab338fd4 | [
"BSD-3-Clause"
] | null | null | null | test/board_test.exs | ckampfe/chess | a4aec40ecbc5c6d109363e6b982cd9eaab338fd4 | [
"BSD-3-Clause"
] | null | null | null | defmodule BoardTest do
use ExUnit.Case
alias Chess.Board
test "to_string/1" do
default_board =
"""
┌───────────────────────────────┐
│ ♜ │ ♞ │ ♝ │ ♛ │ ♚ │ ♝ │ ♞ │ ♜ │
│ ♟ │ ♟ │ ♟ │ ♟ │ ♟ │ ♟ │ ♟ │ ♟ │
│ │ ▀ │ │ ▀ │ │ ▀ │ │ ▀ │
│ ▀ │ │ ▀ │ │ ▀ │ │ ▀ │ │
│ │ ▀ │ │ ▀ │ │ ▀ │ │ ▀ │
│ ▀ │ │ ▀ │ │ ▀ │ │ ▀ │ │
│ ♙ │ ♙ │ ♙ │ ♙ │ ♙ │ ♙ │ ♙ │ ♙ │
│ ♖ │ ♘ │ ♗ │ ♕ │ ♔ │ ♗ │ ♘ │ ♖ │
└───────────────────────────────┘
"""
|> String.trim_trailing()
assert Board.default() |> Board.to_string() == default_board
end
end
| 26.416667 | 64 | 0.231861 |
9e62d36bb4093341574f2ebc7b61f66f477679f0 | 1,585 | ex | Elixir | clients/page_speed_online/lib/google_api/page_speed_online/v5/model/category_group_v5.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/page_speed_online/lib/google_api/page_speed_online/v5/model/category_group_v5.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/page_speed_online/lib/google_api/page_speed_online/v5/model/category_group_v5.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PageSpeedOnline.V5.Model.CategoryGroupV5 do
@moduledoc """
Message containing a category
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - The description of what the category is grouping
* `title` (*type:* `String.t`, *default:* `nil`) - The human readable title of the group
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:title => String.t()
}
field(:description)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.PageSpeedOnline.V5.Model.CategoryGroupV5 do
def decode(value, options) do
GoogleApi.PageSpeedOnline.V5.Model.CategoryGroupV5.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PageSpeedOnline.V5.Model.CategoryGroupV5 do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.7 | 109 | 0.728076 |
9e62d546a5869b251c9e83355759c77a48be0c83 | 1,593 | ex | Elixir | api/lib/remote_day_web/views/error_helpers.ex | arkanoryn/remote_day | 10e0a4b8995c44fae774c21189725b54f69186b4 | [
"Apache-2.0"
] | null | null | null | api/lib/remote_day_web/views/error_helpers.ex | arkanoryn/remote_day | 10e0a4b8995c44fae774c21189725b54f69186b4 | [
"Apache-2.0"
] | 24 | 2018-10-19T07:54:11.000Z | 2022-02-26T13:28:55.000Z | api/lib/remote_day_web/views/error_helpers.ex | arkanoryn/remote_day | 10e0a4b8995c44fae774c21189725b54f69186b4 | [
"Apache-2.0"
] | null | null | null | defmodule RemoteDayWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext "errors", "is invalid"
#
# # Translate the number of files with plural rules
# dngettext "errors", "1 file", "%{count} files", count
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(RemoteDayWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(RemoteDayWeb.Gettext, "errors", msg, opts)
end
end
def handle_changeset_errors(errors) do
errors
|> Enum.map(fn {field, detail} ->
"#{field} " <> render_detail(detail)
end)
|> Enum.join()
end
def render_detail({message, values}) do
Enum.reduce(values, message, fn {k, v}, acc ->
String.replace(acc, "%{#{k}}", to_string(v))
end)
end
def render_detail(message) do
message
end
end
| 30.634615 | 78 | 0.655995 |
9e62dadfe659bac1fefaed1abd98433854815049 | 2,198 | exs | Elixir | test/companion_web/controllers/auth_controller_test.exs | praekeltfoundation/nurseconnect-companion | 9afaabaf3ae3e0123abcbd12e0a2073b681e9052 | [
"BSD-3-Clause"
] | 1 | 2018-10-10T18:20:22.000Z | 2018-10-10T18:20:22.000Z | test/companion_web/controllers/auth_controller_test.exs | praekeltfoundation/nurseconnect-companion | 9afaabaf3ae3e0123abcbd12e0a2073b681e9052 | [
"BSD-3-Clause"
] | 23 | 2018-06-07T15:13:15.000Z | 2019-07-30T09:06:03.000Z | test/companion_web/controllers/auth_controller_test.exs | praekeltfoundation/nurseconnect-companion | 9afaabaf3ae3e0123abcbd12e0a2073b681e9052 | [
"BSD-3-Clause"
] | null | null | null | defmodule CompanionWeb.AuthControllerTest do
use CompanionWeb.ConnCase
@ueberauth_auth %{
credentials: %{token: "pretendthisisavalidtoken"},
info: %{email: "[email protected]", urls: %{website: "example.org"}},
provider: "google"
}
@user %{email: "[email protected]", provider: "google"}
test "redirects user to Google for authentication", %{conn: conn} do
conn = get(conn, "/auth/google")
assert redirected_to(conn, 302)
end
test "creates user from Google information", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @ueberauth_auth)
|> get("/auth/google/callback")
assert get_flash(conn, :info) == "Successfully signed in!"
assert get_session(conn, :user) == @user
assert redirected_to(conn) == "/"
end
test "doesn't log use in if Google auth fails", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_failure, %{})
|> get("/auth/google/callback")
assert get_flash(conn, :error) == "Sign in failed"
assert get_session(conn, :user) == nil
assert redirected_to(conn) == "/"
end
test "doesn't log user in if incorrect domain", %{conn: conn} do
# Domain is set to example.org in test config
auth = %{@ueberauth_auth | info: %{email: "[email protected]", urls: %{website: "bad.com"}}}
conn =
conn
|> assign(:ueberauth_auth, auth)
|> get("/auth/google/callback")
assert get_flash(conn, :error) == "Sign in failed. Invalid domain"
assert get_session(conn, :user) == nil
assert redirected_to(conn) == "/"
end
test "Login page contains link to login with Google auth", %{conn: conn} do
conn =
conn
|> get("/auth/login")
assert html_response(conn, 200) =~ "Sign in with Google"
end
test "Logout page removes user from session", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @ueberauth_auth)
|> get("/auth/google/callback")
assert get_session(conn, :user) == @user
conn =
conn
|> get("/auth/logout")
assert get_flash(conn, :info) == "Successfully logged out"
assert redirected_to(conn) == "/"
assert get_session(conn, :user) == nil
end
end
| 26.804878 | 91 | 0.628298 |
9e62e6899260afa3accff25655ddc6798684f9d6 | 1,428 | exs | Elixir | test/jwt/claim_test.exs | tzumby/elixir-jwt | 1e44bef4d1f706062050fedad79bdd9b3b4d81a5 | [
"MIT"
] | 13 | 2017-05-15T13:37:11.000Z | 2021-07-29T23:06:23.000Z | test/jwt/claim_test.exs | tzumby/elixir-jwt | 1e44bef4d1f706062050fedad79bdd9b3b4d81a5 | [
"MIT"
] | 7 | 2019-01-26T12:42:24.000Z | 2021-03-16T23:01:00.000Z | test/jwt/claim_test.exs | tzumby/elixir-jwt | 1e44bef4d1f706062050fedad79bdd9b3b4d81a5 | [
"MIT"
] | 3 | 2019-07-26T06:03:48.000Z | 2020-01-14T20:42:55.000Z | defmodule JWT.ClaimTest do
use ExUnit.Case
doctest JWT.Claim
@now DateTime.to_unix(DateTime.utc_now)
@after_now (@now + 5)
@before_now (@now - 5)
test "verify/2 w rejected_claims" do
claims = %{"exp" => @before_now}
assert {:error, _rejected_claims} = JWT.Claim.verify(claims, %{})
end
test "verify/2 w/o rejected_claims" do
claims = %{"exp" => @after_now}
assert :ok == JWT.Claim.verify(claims, %{})
end
@uri "http://www.example.com"
@recipient "recipient"
@issuer "issuer"
@jwt_id "jwt_id"
@default_options %{
aud: @uri,
iss: @issuer,
jti: @jwt_id
}
@default_claims %{
"aud" => [@uri, @recipient],
"exp" => @after_now,
"iat" => @before_now,
"iss" => @issuer,
"jti" => @jwt_id,
"nbf" => @before_now
}
@invalid_claims %{
"aud" => ["http://www.other.com", "other recipient"],
"exp" => @before_now,
"iat" => @after_now,
"iss" => "other issuer",
"jti" => "other jwt_id",
"nbf" => @after_now
}
test "verify/2 w valid claims, returns :ok" do
assert :ok == JWT.Claim.verify(@default_claims, @default_options)
end
test "verify/2 w invalid claims, returns {:error, [rejected_claims]}" do
{:error, result} = JWT.Claim.verify(@invalid_claims, @default_options)
result_len = length(result)
expected_len = length(Enum.into(@invalid_claims, []))
assert expected_len == result_len
end
end
| 23.409836 | 74 | 0.614146 |
9e62e9852640e9453270fd977a3825daf64a03ee | 294 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/rooms_mode.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/rooms_mode.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/rooms_mode.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChatWeb.FlexBar.Tab.RoomsMode do
use UccChatWeb.FlexBar.Helpers
alias UcxUcc.TabBar.Tab
def add_buttons do
TabBar.add_button Tab.new(
__MODULE__,
~w[im],
"rooms-mode",
~g"Rooms Mode",
"icon-hash",
View,
"",
2)
end
end
| 15.473684 | 45 | 0.598639 |
9e62f9cfed479d9564a2861ae80a4ef7626efde6 | 749 | ex | Elixir | clients/hydra/elixir/lib/ory/model/plugin_mount.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/hydra/elixir/lib/ory/model/plugin_mount.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/hydra/elixir/lib/ory/model/plugin_mount.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule Ory.Model.PluginMount do
@moduledoc """
PluginMount plugin mount
"""
@derive [Poison.Encoder]
defstruct [
:"Description",
:"Destination",
:"Name",
:"Options",
:"Settable",
:"Source",
:"Type"
]
@type t :: %__MODULE__{
:"Description" => String.t,
:"Destination" => String.t,
:"Name" => String.t,
:"Options" => [String.t],
:"Settable" => [String.t],
:"Source" => String.t,
:"Type" => String.t
}
end
defimpl Poison.Decoder, for: Ory.Model.PluginMount do
def decode(value, _options) do
value
end
end
| 19.710526 | 91 | 0.611482 |
9e6303f799dd8f37bc520959ebac458888685f4e | 698 | ex | Elixir | lib/srp/identity_verifier.ex | thiamsantos/spr | c1db6c338543ecb9ec4d855d05a125a490c1606b | [
"Apache-2.0"
] | 15 | 2018-11-03T18:39:21.000Z | 2022-02-21T22:17:50.000Z | lib/srp/identity_verifier.ex | thiamsantos/spr | c1db6c338543ecb9ec4d855d05a125a490c1606b | [
"Apache-2.0"
] | 7 | 2018-10-21T16:36:58.000Z | 2018-10-25T00:56:05.000Z | lib/srp/identity_verifier.ex | thiamsantos/spr | c1db6c338543ecb9ec4d855d05a125a490c1606b | [
"Apache-2.0"
] | 3 | 2019-05-14T16:24:04.000Z | 2019-07-06T21:47:40.000Z | defmodule SRP.IdentityVerifier do
@moduledoc """
A user identity verifier.
This verifier is formed by the username, a random salt generated at registration time,
and a password verifier derived from the user password.
"""
@enforce_keys [:username, :salt, :password_verifier]
defstruct [:username, :salt, :password_verifier]
@type t :: %__MODULE__{username: String.t(), salt: binary(), password_verifier: binary()}
@doc false
def new(username, salt, password_verifier)
when is_binary(username) and is_binary(salt) and is_binary(password_verifier) do
%__MODULE__{
username: username,
salt: salt,
password_verifier: password_verifier
}
end
end
| 30.347826 | 91 | 0.722063 |
9e631848f43688cedbac75cf659182e9a78d0cbd | 525 | ex | Elixir | base/fc_state_storage/lib/fc_state_storage.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | base/fc_state_storage/lib/fc_state_storage.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | base/fc_state_storage/lib/fc_state_storage.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | defmodule FCStateStorage do
@store Application.get_env(:fc_state_storage, :adapter)
@callback get(key :: String.t(), opts :: keyword) :: any
@callback put(key :: String.t(), record :: map, opts :: keyword) :: {:ok, any} | {:error, any}
@callback delete(key :: String.t(), opts :: keyword) :: :ok
@callback reset!() :: :ok
defdelegate get(key, opts \\ []), to: @store
defdelegate put(key, record, opts \\ []), to: @store
defdelegate delete(key, opts \\ []), to: @store
defdelegate reset!(), to: @store
end
| 37.5 | 96 | 0.630476 |
9e6331214a75c41ec819acb16f6882ea819fe30f | 1,174 | exs | Elixir | test/oauth2/strategy/auth_code_test.exs | trustatom-oss/oauth2c | 342372624529c9e92897915c85b12418fb1f41b8 | [
"MIT"
] | null | null | null | test/oauth2/strategy/auth_code_test.exs | trustatom-oss/oauth2c | 342372624529c9e92897915c85b12418fb1f41b8 | [
"MIT"
] | null | null | null | test/oauth2/strategy/auth_code_test.exs | trustatom-oss/oauth2c | 342372624529c9e92897915c85b12418fb1f41b8 | [
"MIT"
] | null | null | null | defmodule OAuth2.Strategy.AuthCodeTest do
use ExUnit.Case, async: true
use Plug.Test
test "new" do
conn = call(Client, conn(:get, "/"))
strategy = conn.private.oauth2_strategy
assert strategy.client_id == "client_id"
assert strategy.client_secret == "secret"
assert strategy.site == "http://localhost:4000"
assert strategy.authorize_url == "/oauth/authorize"
assert strategy.token_url == "/oauth/token"
assert strategy.token_method == :post
assert strategy.params == %{}
assert strategy.headers == %{}
end
test "authorize_url" do
Plug.Adapters.Cowboy.http Provider, []
Plug.Adapters.Cowboy.http Client, [], port: 4001
conn = call(Client, conn(:get, "/auth"))
[location] = get_resp_header conn, "Location"
conn = call(Provider, conn(:get, location))
assert conn.status == 302
[location] = get_resp_header conn, "Location"
conn = call(Client, conn(:get, location))
assert conn.params["code"] == "1234"
assert_receive %OAuth2.AccessToken{access_token: "abc123", token_type: "Bearer"}
end
defp call(mod, conn) do
mod.call(conn, [])
end
end
| 30.102564 | 84 | 0.653322 |
9e63593f7c5d09a7d56ad17bf29939f90be07b08 | 1,471 | ex | Elixir | lib/artsy_web/views/error_helpers.ex | EhsanZ/artsy | de1cd2eb7f53ff33569223cb7d2561e39ea7a372 | [
"MIT"
] | 1 | 2019-10-26T07:17:24.000Z | 2019-10-26T07:17:24.000Z | lib/artsy_web/views/error_helpers.ex | EhsanZ/artsy | de1cd2eb7f53ff33569223cb7d2561e39ea7a372 | [
"MIT"
] | 2 | 2021-03-09T21:15:44.000Z | 2021-05-10T17:59:48.000Z | lib/artsy_web/views/error_helpers.ex | EhsanZ/artsy | de1cd2eb7f53ff33569223cb7d2561e39ea7a372 | [
"MIT"
] | null | null | null | defmodule ArtsyWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(ArtsyWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(ArtsyWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.688889 | 74 | 0.668933 |
9e637207421f1e94e0cdd06a479d61f340615871 | 794 | ex | Elixir | lib/ash/error/side_load/no_such_relationship.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | lib/ash/error/side_load/no_such_relationship.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | lib/ash/error/side_load/no_such_relationship.ex | savish/ash | 7faf73097a6c6f801851e9d89569b6f5e6e87f81 | [
"MIT"
] | null | null | null | defmodule Ash.Error.SideLoad.NoSuchRelationship do
@moduledoc "Used when attempting to side load a relationship that does not exist"
use Ash.Error.Exception
def_ash_error([:resource, :relationship, :side_load_path], class: :invalid)
defimpl Ash.ErrorKind do
def id(_), do: Ash.UUID.generate()
def code(_), do: "no_such_side_load_relationship"
def class(_), do: :invalid
def message(%{resource: resource, relationship: relationship, side_load_path: side_load_path}) do
if side_load_path == [] do
"No such relationship #{inspect(resource)}.#{relationship}"
else
"No such relationship #{inspect(resource)}.#{relationship} at #{
Enum.join(side_load_path, ".")
}"
end
end
def stacktrace(_), do: nil
end
end
| 29.407407 | 101 | 0.678841 |
9e6388319c38ef791bdc18e252833824f569d7e8 | 1,825 | ex | Elixir | lib/sgp40/transport.ex | mnishiguchi/sgp40 | cd59ceea8bc36ee6a8dd7d463b4c3d05067d94bd | [
"MIT"
] | 2 | 2021-06-13T21:09:19.000Z | 2021-06-16T13:20:14.000Z | lib/sgp40/transport.ex | elixir-sensors/sgp40 | cd59ceea8bc36ee6a8dd7d463b4c3d05067d94bd | [
"MIT"
] | 3 | 2021-06-14T13:09:08.000Z | 2021-06-26T19:43:55.000Z | lib/sgp40/transport.ex | mnishiguchi/sgp40 | cd59ceea8bc36ee6a8dd7d463b4c3d05067d94bd | [
"MIT"
] | null | null | null | defmodule SGP40.Transport do
@moduledoc false
defstruct [:ref, :bus_address]
@type t :: %__MODULE__{ref: reference(), bus_address: 0..127}
@type option :: {:bus_name, String.t()} | {:bus_address, 0..127}
@callback open([option()]) :: {:ok, t()} | {:error, any()}
@callback read(t(), pos_integer()) :: {:ok, binary()} | {:error, any()}
@callback write(t(), iodata()) :: :ok | {:error, any()}
@callback write_read(t(), iodata(), pos_integer()) :: {:ok, binary()} | {:error, any()}
end
defmodule SGP40.Transport.I2C do
@moduledoc false
@behaviour SGP40.Transport
@impl SGP40.Transport
def open(opts) do
bus_name = Access.fetch!(opts, :bus_name)
bus_address = Access.fetch!(opts, :bus_address)
case Circuits.I2C.open(bus_name) do
{:ok, ref} ->
{:ok, %SGP40.Transport{ref: ref, bus_address: bus_address}}
_ ->
:error
end
end
@impl SGP40.Transport
def read(transport, bytes_to_read) do
Circuits.I2C.read(transport.ref, transport.bus_address, bytes_to_read)
end
@impl SGP40.Transport
def write(transport, register_and_data) do
Circuits.I2C.write(transport.ref, transport.bus_address, register_and_data)
end
@impl SGP40.Transport
def write_read(transport, register, bytes_to_read) do
Circuits.I2C.write_read(transport.ref, transport.bus_address, register, bytes_to_read)
end
end
defmodule SGP40.Transport.Stub do
@moduledoc false
@behaviour SGP40.Transport
@impl SGP40.Transport
def open(_opts), do: {:ok, %SGP40.Transport{ref: make_ref(), bus_address: 0x00}}
@impl SGP40.Transport
def read(_transport, _bytes_to_read), do: {:ok, "stub"}
@impl SGP40.Transport
def write(_transport, _data), do: :ok
@impl SGP40.Transport
def write_read(_transport, _data, _bytes_to_read), do: {:ok, "stub"}
end
| 26.071429 | 90 | 0.680548 |
9e63e2da6503ae0dbe2bff5d8d048a3cf6aba137 | 83 | exs | Elixir | 9-elixir-phoenix/test/test_helper.exs | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | 3 | 2017-02-22T08:45:53.000Z | 2017-04-29T13:40:23.000Z | 9-elixir-phoenix/test/test_helper.exs | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | null | null | null | 9-elixir-phoenix/test/test_helper.exs | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | null | null | null | ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(AwesomeprojectBackend.Repo, :manual)
| 16.6 | 67 | 0.819277 |
9e6405ed058dfba020a10cda30ad1c1ed83b9754 | 97 | ex | Elixir | lib/app/aviasales/countries.ex | ilgarsh/travelbot | f6a10bf39e1942e46d571b59b41fb3001436f520 | [
"MIT"
] | null | null | null | lib/app/aviasales/countries.ex | ilgarsh/travelbot | f6a10bf39e1942e46d571b59b41fb3001436f520 | [
"MIT"
] | 1 | 2017-12-11T21:11:44.000Z | 2017-12-11T21:11:44.000Z | lib/app/aviasales/countries.ex | ilgarsh/travelbot | f6a10bf39e1942e46d571b59b41fb3001436f520 | [
"MIT"
] | null | null | null | defmodule Country do
@derive[Poison.Encoder]
defstruct [:code,
:name,
:currency]
end | 12.125 | 24 | 0.670103 |
9e64241cc55f8a8d56b5dd02be185d83fdcdb49f | 1,945 | ex | Elixir | lib/ex_onixo/parser/product/descriptive_detail.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | 1 | 2021-12-11T06:44:18.000Z | 2021-12-11T06:44:18.000Z | lib/ex_onixo/parser/product/descriptive_detail.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | lib/ex_onixo/parser/product/descriptive_detail.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | defmodule ExOnixo.Parser.Product.DescriptiveDetail do
import SweetXml
alias ExOnixo.Helper.ElementYml
alias ExOnixo.Parser.Product.DescriptiveDetail.{
ProductFormDetail, Extent, EpubUsageConstraint,
Collection, TitleDetail, Contributor, Subject,
LanguageCode, LanguageRole
}
def parse_recursive(xml) do
SweetXml.xpath(xml, ~x"./DescriptiveDetail"l)
|> Enum.map(fn descriptive_detail ->
%{
product_composition: ElementYml.get_tag(descriptive_detail, "/ProductComposition", "ProductComposition"),
product_form: ElementYml.get_tag(descriptive_detail, "/ProductForm", "ProductForm"),
product_form_details: ProductFormDetail.parse_recursive(descriptive_detail),
primary_content_type: xpath(descriptive_detail, ~x"./PrimaryContentType/text()"s),
product_content_type: xpath(descriptive_detail, ~x"./ProductContentType/text()"s),
epub_technical_protection: ElementYml.get_tag(descriptive_detail, "/EpubTechnicalProtection", "EpubTechnicalProtection"),
edition_number: xpath(descriptive_detail, ~x"./EditionNumber/text()"s),
language_roles: LanguageRole.parse_recursive(descriptive_detail),
language_codes: LanguageCode.parse_recursive(descriptive_detail),
extents: Extent.parse_recursive(descriptive_detail),
epub_usage_constraints: EpubUsageConstraint.parse_recursive(descriptive_detail),
collections: Collection.parse_recursive(descriptive_detail),
title_details: TitleDetail.parse_recursive(descriptive_detail),
contributors: Contributor.parse_recursive(descriptive_detail),
subjects: Subject.parse_recursive(descriptive_detail)
}
end)
|> Enum.to_list
|> handle_maps
end
defp handle_maps(nil), do: %{}
defp handle_maps([]), do: %{}
defp handle_maps(list) do
List.first(list)
end
end
| 47.439024 | 133 | 0.722365 |
9e64350726015ab9769b3eff9436ff1c6698ef2c | 2,334 | ex | Elixir | lib/dispatch/repositories/contributors.ex | mirego/dispatch | 65f81e264e45676ece8a6dc5f203cf9f283d6ec7 | [
"BSD-3-Clause"
] | 21 | 2019-02-13T15:26:00.000Z | 2021-09-18T13:05:42.000Z | lib/dispatch/repositories/contributors.ex | mirego/dispatch | 65f81e264e45676ece8a6dc5f203cf9f283d6ec7 | [
"BSD-3-Clause"
] | 26 | 2019-02-13T18:42:44.000Z | 2021-09-16T15:40:05.000Z | lib/dispatch/repositories/contributors.ex | mirego/dispatch | 65f81e264e45676ece8a6dc5f203cf9f283d6ec7 | [
"BSD-3-Clause"
] | 2 | 2020-05-26T09:09:19.000Z | 2021-04-21T20:43:07.000Z | defmodule Dispatch.Repositories.Contributors do
alias Dispatch.SelectedUser
alias Dispatch.Utils.{Random, TimeHelper}
@doc """
Loop through each contributors and randomly select one based on his relevancy
Example:
Given a list of 3 contributors
%{username: "John", relevancy: 1}
%{username: "Jane", relevancy: 7}
%{username: "joe", relevancy: 2}
For a relevancy total of 10
So contributors will be selected based on the random pick index like this:
John: 1
Jane: 2 to 8
Joe: 9 and 10
"""
def select(nil), do: []
def select([]), do: []
def select(contributors) do
random_pick_index =
contributors
|> Enum.reduce(0, fn %{relevancy: relevancy}, acc -> relevancy + acc end)
|> Random.uniform()
Enum.reduce_while(contributors, random_pick_index, fn contributor, acc ->
acc = acc - contributor.relevancy
if acc <= 0 do
{:halt,
[
%SelectedUser{
username: contributor.username,
type: "contributor",
metadata: %{
recent_commit_count: contributor.recent_commit_count,
total_commit_count: contributor.total_commit_count
}
}
]}
else
{:cont, acc}
end
end)
end
@doc """
Calculate the relevancy of the contributor
Take the all time total commits and add the last three months commits
Weekly Hash (weeks array):
w - Start of the week, given as a Unix timestamp.
a - Number of additions
d - Number of deletions
c - Number of commits
More at https://developer.github.com/v3/repos/statistics/#get-contributors-list-with-additions-deletions-and-commit-counts
"""
def calculate_relevancy(%{"total" => total, "weeks" => weeks}) do
recent_commit_count = retrieve_relevant_week_commits(weeks)
relevancy = total + recent_commit_count
{total, recent_commit_count, relevancy}
end
defp retrieve_relevant_week_commits(weeks) do
starting_week = TimeHelper.unix_beginning_of_week(relevant_activity_days())
Enum.reduce(weeks, 0, fn
%{"w" => week, "c" => count}, acc when week >= starting_week ->
acc + count
_, acc ->
acc
end)
end
def relevant_activity_days, do: Application.get_env(:dispatch, __MODULE__)[:relevant_activity_days]
end
| 26.522727 | 124 | 0.656812 |
9e643536dac968c83ff3321305bbd6ad0ea89ddf | 5,241 | ex | Elixir | lib/ebnf_parser/sparql.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 1 | 2019-09-05T23:00:48.000Z | 2019-09-05T23:00:48.000Z | lib/ebnf_parser/sparql.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 7 | 2020-10-27T20:42:06.000Z | 2021-11-15T07:41:15.000Z | lib/ebnf_parser/sparql.ex | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 6 | 2016-04-06T09:28:43.000Z | 2021-08-09T12:29:16.000Z | defmodule EbnfParser.Sparql do
require Logger
require ALog
use GenServer
@moduledoc """
Parser which allows you to efficiently fetch the parsed spraql
syntax.
"""
@spec split_single_form(String.t(), boolean) :: {atom, {boolean, any}}
@type syntax :: %{optional(atom) => any}
### GenServer API
@doc """
GenServer.init/1 callback
"""
def init(_) do
{:ok, EbnfParser.Sparql.parse_sparql()}
end
@doc """
GenServer.handle_call/3 callback
"""
def handle_call(:get, _from, syntax) do
{:reply, syntax, syntax}
end
### Client API / Helper functions
def start_link(state \\ %{}) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
def syntax do
GenServer.call(__MODULE__, :get)
end
def split_single_form(string, terminal \\ false) do
split_string = String.split(string, "::=", parts: 2)
[name, clause] = Enum.map(split_string, &String.trim/1)
{String.to_atom(name), {terminal, full_parse(clause)}}
end
def full_parse(string) do
EbnfParser.Parser.tokenize_and_parse(string)
end
def split_forms(forms) do
Enum.map(forms, &split_single_form/1)
end
@spec parse_sparql() :: syntax
def parse_sparql() do
%{non_terminal: non_terminal_forms, terminal: terminal_forms} = EbnfParser.Forms.sparql()
non_terminal_map =
non_terminal_forms
|> Enum.map(fn x -> split_single_form(x, false) end)
|> Enum.into(%{})
full_syntax_map =
terminal_forms
|> Enum.map(fn x -> split_single_form(x, true) end)
|> Enum.into(non_terminal_map)
_regexp_empowered_map =
full_syntax_map
|> augment_with_regexp_terminators
end
def augment_with_regexp_terminators(map) do
map
# TODO add other string literals
|> Map.put(
:STRING_LITERAL_LONG1,
{true, [regex: ~r/^'''(''|')?([^\\']|(\\[tbnrf'"\\]))*'''/mf]}
)
|> Map.put(
:STRING_LITERAL_LONG2,
{true, [regex: ~r/^"""(""|")?([^\\"]|(\\[tbnrf"'\\]))*"""/mf]}
)
|> Map.put(
:STRING_LITERAL1,
{true, [regex: ~r/^'([^\x{27}\x{5C}\x{A}\x{D}]|(\\[tbnrf\\"']))*'/]}
)
|> Map.put(
:STRING_LITERAL2,
{true, [regex: ~r/^"([^\x{22}\x{5C}\x{A}\x{D}]|(\\[tbnrf\\"']))*"/]}
)
|> Map.put(
:VARNAME,
{true,
[
regex:
~r/^[A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_0-9][A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_0-9\x{00B7}\x{0300}-\x{036F}\x{203F}-\x{2040}]*/u
]}
)
|> Map.put(
:PN_PREFIX,
{true,
[
regex:
~r/^[A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}]([A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_\-0-9\x{00B7}\x{0300}-\x{036F}\x{203F}-\x{2040}\.]*[A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_\-0-9\x{00B7}\x{0300}-\x{036F}\x{203F}-\x{2040}])?/u
]}
)
|> Map.put(
:PN_LOCAL,
{true,
[
regex:
~r/^([A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_:0-9]|(%[0-9A-Fa-f][0-9A-Fa-f])|(\\[_~\.\-!$&'()*+,;=\/?#@%]))(([A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_\-0-9\x{00B7}\x{0300}-\x{036F}\x{203F}-\x{2040}\.:]|(%[0-9A-Fa-f][0-9A-Fa-f])|(\\[_~\.\-!$&'()*+,;=\/?#@%]))*(([A-Za-z\x{00C0}-\x{00D6}\x{00D8}-\x{00F6}\x{00F8}-\x{02FF}\x{0370}-\x{037D}\x{037F}-\x{1FFF}\x{200C}-\x{200D}\x{2070}-\x{218F}\x{2C00}-\x{2FEF}\x{3001}-\x{D7FF}\x{F900}-\x{FDCF}\x{FDF0}-\x{FFFD}\x{10000}-\x{EFFFF}_\-0-9\x{00B7}\x{0300}-\x{036F}\x{203F}-\x{2040}:])|(%[0-9a-zA-Z][0-9a-zA-Z])|(\\[_~\.\-!$&'()*+,;=\/?\#@%])))?/u
]}
)
|> Map.put(:IRIREF, {true, [regex: ~r/^<([^<>\\"{}|^`\x{00}-\x{20}])*>/u]})
end
def parse_sparql_as_ordered_array do
%{non_terminal: non_terminal_forms, terminal: terminal_forms} = EbnfParser.Forms.sparql()
parsed_non_terminal_forms =
non_terminal_forms
|> Enum.map(fn x -> {x, split_single_form(x, false)} end)
parsed_terminal_forms =
terminal_forms
|> Enum.map(fn x -> {x, split_single_form(x, true)} end)
parsed_non_terminal_forms ++ parsed_terminal_forms
end
end
| 40.007634 | 945 | 0.569357 |
9e643fc24456324c000a94b7b90e815bda83c670 | 3,756 | ex | Elixir | lib/ultimatum_game.ex | xeejp/ultimatum-game | 2d2b0c00e47de31814de33d9a66bc78b4838688d | [
"MIT"
] | null | null | null | lib/ultimatum_game.ex | xeejp/ultimatum-game | 2d2b0c00e47de31814de33d9a66bc78b4838688d | [
"MIT"
] | null | null | null | lib/ultimatum_game.ex | xeejp/ultimatum-game | 2d2b0c00e47de31814de33d9a66bc78b4838688d | [
"MIT"
] | null | null | null | defmodule UltimatumGame do
use XeeThemeScript
require Logger
alias UltimatumGame.Host
alias UltimatumGame.Participant
alias UltimatumGame.Main
alias UltimatumGame.Actions
# Callbacks
def script_type do
:message
end
def install, do: nil
def init do
{:ok, %{data: %{
dynamic_text: %{},
page: "waiting",
game_progress: 0,
game_round: 1,
game_redo: 0,
inf_redo: false,
game_progress: 0,
participants: %{},
pairs: %{},
ultimatum_results: %{},
dictator_results: %{},
is_first_visit: true
}
}}
end
def join(data, id) do
result = unless Map.has_key?(data.participants, id) do
new = Main.new_participant()
put_in(data, [:participants, id], new)
else
data
end
wrap_result(data, result)
end
# Host router
def handle_received(data, %{"action" => action, "params" => params}) do
Logger.debug("[Ultimatum Game] #{action} #{inspect params}")
result = case {action, params} do
{"FETCH_CONTENTS", _} -> Actions.update_host_contents(data)
{"MATCH", _} -> Host.match(data)
{"RESET", _} -> Host.reset(data)
{"VISIT", _} -> Host.visit(data)
{"CHANGE_DESCRIPTION", dynamic_text} -> Host.change_description(data, dynamic_text)
{"CHANGE_PAGE", page} -> Host.change_page(data, page)
{"CHANGE_GAME_ROUND", game_round} -> Host.change_game_round(data, game_round)
{"CHANGE_INF_REDO", inf_redo} -> Host.change_inf_redo(data, inf_redo)
{"CHANGE_GAME_REDO", game_redo} -> Host.change_game_redo(data, game_redo)
_ -> {:ok, %{data: data}}
end
wrap_result(data, result)
end
# Participant router
def handle_received(data, %{"action" => action, "params" => params}, id) do
Logger.debug("[Ultimatum Game] #{action} #{inspect params}")
result = case {action, params} do
{"FETCH_CONTENTS", _} -> Actions.update_participant_contents(data, id)
{"FINISH_ALLOCATING", allo_temp} -> Participant.finish_allocating(data, id, allo_temp)
{"CHANGE_ALLO_TEMP", allo_temp} -> Participant.change_allo_temp(data, id, allo_temp)
{"RESPONSE_OK", result} -> Participant.response_ok(data, id, result)
{"REDO_ALLOCATING", _} -> Participant.redo_allocating(data, id)
{"RESPONSE_NG", result} -> Participant.response_ng(data, id, result)
_ -> {:ok, %{data: data}}
end
wrap_result(data, result)
end
def compute_diff(old, %{data: new} = result) do
import Participant, only: [filter_data: 2]
import Host, only: [filter_data: 1]
host = Map.get(result, :host, %{})
participant = Map.get(result, :participant, %{})
participant_tasks = Enum.map(old.participants, fn {id, _} ->
{id, Task.async(fn -> JsonDiffEx.diff(filter_data(old, id), filter_data(new, id)) end)}
end)
host_task = Task.async(fn -> JsonDiffEx.diff(filter_data(old), filter_data(new)) end)
host_diff = Task.await(host_task)
participant_diff = Enum.map(participant_tasks, fn {id, task} -> {id, %{diff: Task.await(task)}} end)
|> Enum.filter(fn {_, map} -> map_size(map.diff) != 0 end)
|> Enum.into(%{})
host = Map.merge(host, %{diff: host_diff})
host = if map_size(host.diff) == 0 do
Map.delete(host, :diff)
else
host
end
host = if map_size(host) == 0 do
nil
else
host
end
participant = Map.merge(participant, participant_diff, fn _k, v1, v2 ->
Map.merge(v1, v2)
end)
%{data: new, host: host, participant: participant}
end
def wrap_result(old, {:ok, result}) do
{:ok, compute_diff(old, result)}
end
def wrap_result(old, new) do
{:ok, compute_diff(old, %{data: new})}
end
end
| 32.37931 | 104 | 0.629393 |
9e6446c5036dbbf229cccf151ed8a71590d4e8e4 | 4,324 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/90.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/90.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/90.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
noun * verb_aux cm * verb_aux * verb;
adjective * verb_aux particle * verb_aux * verb;
adjective * pn cm * noun * verb;
adjective * verb_aux verb a32 verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
cm * verb_aux noun * verb * verb;
verb * verb cm * verb_aux * verb_aux;
neg * verb_aux adjective * SYM * verb;
adjective * verb_aux particle * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
adjective * verb_aux adverb * verb_aux * verb;
adjective * noun cm * pn * verb;
noun * verb_aux cm * verb_aux * verb;
neg * verb_aux noun * verb_aux * verb;
adjective * verb_aux neg * verb_aux * verb;
neg * verb_aux particle * verb_aux * verb;
verb * verb_aux neg * verb_aux * verb_aux;
neg * verb_aux noun * verb_aux * verb;
verb * verb_aux neg * verb_aux * verb_aux;
verb * verb_aux neg * verb_aux * verb_aux;
noun * verb_aux cm * verb_aux * verb;
adverb * noun verb * verb * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * SYM * verb;
noun * verb_aux cm * SYM * verb;
adjective * verb_aux cm * verb_aux * verb;
noun * verb_aux nst * verb_aux * verb;
neg * verb_aux quantifier * verb_aux * verb;
noun * verb_aux noun * verb_aux * verb;
neg * verb_aux noun * SYM * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux verb a57 verb_aux * verb;
neg * verb_aux verb * verb_aux * verb_aux;
adjective * verb_aux verb * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux verb * verb_aux * verb;
noun * verb_aux cm * SYM * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux particle * verb_aux * verb;
adjective * verb_aux particle * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
adjective * verb_aux cm * SYM * verb;
neg * verb_aux noun * SYM * verb;
noun * verb_aux cm * verb_aux * verb;
cm * verb_aux noun * verb_aux * verb;
cm * verb noun a33 verb_aux * verb;
noun * verb_aux nst * verb_aux * verb;
adjective * verb_aux noun * verb_aux * verb;
noun * verb_aux quantifier * verb_aux * verb;
noun * verb_aux cardinal * verb_aux * verb;
adjective * verb_aux particle * verb_aux * verb;
adjective * verb_aux noun * verb_aux * verb;
adjective * verb_aux cm * verb_aux * verb;
adjective * verb_aux pn * conj * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux demonstrative * verb_aux * verb;
adjective * verb_aux conj * verb_aux * verb;
noun * verb_aux adjective * verb_aux * verb;
adjective * verb_aux noun * pn * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * SYM * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
pn * verb_aux pn * SYM * verb;
noun * verb_aux adjective * verb_aux * verb;
noun * cm conj * cardinal * verb;
verb * noun noun * cm * verb_aux;
adjective * verb_aux cm * verb_aux * verb;
noun * verb_aux noun * verb_aux * verb;
noun * verb_aux noun * SYM * verb;
noun * verb_aux cm * conj * verb;
adjective * verb_aux verb a33 SYM * verb;
adjective * verb_aux pn * verb_aux * verb;
adjective * verb_aux noun * verb_aux * verb;
neg * verb_aux verb * SYM * verb;
neg * verb_aux noun * conj * verb;
noun * verb_aux pn * noun * verb;
noun * verb_aux cm * SYM * verb;
adjective * verb_aux noun * verb_aux * verb;
noun * verb_aux cm * noun * verb;
adjective * verb_aux cm * SYM * verb;
adjective * verb_aux noun * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
cm * verb_aux noun a84 verb_aux * verb;
particle * verb_aux pnoun * verb_aux * verb;
noun * verb_aux pn * SYM * verb;
adjective * verb_aux cm * verb_aux * verb;
noun * verb_aux verb * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
noun * verb_aux cm * verb_aux * verb;
verb * noun noun * cm * verb_aux;
adjective * verb_aux noun * verb_aux * verb;
adjective * verb_aux particle * conj * verb;
neg * verb_aux noun * conj * verb;
adjective * verb_aux verb a34 SYM * verb;
adjective * verb_aux cm * verb_aux * verb;
| 39.669725 | 49 | 0.681082 |
9e6497afc0cc8e691332b17c783c350058773c30 | 3,209 | exs | Elixir | test/protein/server_test.exs | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 9 | 2018-01-16T12:25:58.000Z | 2021-11-24T13:19:35.000Z | test/protein/server_test.exs | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-09-07T16:09:57.000Z | 2019-02-19T10:37:13.000Z | test/protein/server_test.exs | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-05-27T08:56:52.000Z | 2021-11-24T13:19:37.000Z | defmodule Protein.ServerTest do
use ExUnit.Case, async: false
@moduletag :external
alias Mix.Config
alias Protein.{
EmptyClient,
EmptyServer
}
import TestUtil
describe "start_link/1" do
test "graceful shutdown success" do
Config.persist(
protein: [
mocking_enabled: false
]
)
{:ok, server_pid} = EmptyServer.start_link()
{:ok, _client_pid} = EmptyClient.start_link()
request = %EmptyClient.Empty.Request{}
parent = self()
spawn(fn ->
Process.flag(:trap_exit, true)
response = EmptyClient.call(request)
send(parent, response)
end)
# wait for server to start processing
:timer.sleep(50)
try do
Process.flag(:trap_exit, true)
Process.exit(server_pid, :shutdown)
receive do
{:EXIT, _pid, _error} -> :ok
end
rescue
e in RuntimeError -> e
end
Process.flag(:trap_exit, false)
receive do
response ->
assert {:ok, %Protein.EmptyClient.Empty.Response{}} == response
end
after
Config.persist(
protein: [
mocking_enabled: true
]
)
end
test "success" do
Config.persist(
protein: [
mocking_enabled: false
]
)
{:ok, server_pid} = EmptyServer.start_link()
{:ok, client_pid} = EmptyClient.start_link()
request = %EmptyClient.Empty.Request{}
_response = EmptyClient.call(request)
:timer.sleep(50)
stop_process(server_pid)
stop_process(client_pid)
after
Config.persist(
protein: [
mocking_enabled: true
]
)
end
test "failed" do
Config.persist(
protein: [
mocking_enabled: false
]
)
{:ok, server_pid} = EmptyServer.start_link()
_result =
try do
{:ok, client_pid} = EmptyClient.start_link()
request = %EmptyClient.Error.Request{}
_response = EmptyClient.call(request)
:timer.sleep(50)
stop_process(server_pid)
stop_process(client_pid)
rescue
e ->
assert %Protein.TransportError{adapter: Protein.AMQPAdapter, context: :service_error} ==
e
end
after
Config.persist(
protein: [
mocking_enabled: true
]
)
end
test "failed in linked process" do
Config.persist(
protein: [
mocking_enabled: false
]
)
{:ok, server_pid} = EmptyServer.start_link()
_result =
try do
{:ok, client_pid} = EmptyClient.start_link()
request = %EmptyClient.AsyncError.Request{}
_response = EmptyClient.call(request)
:timer.sleep(50)
stop_process(server_pid)
stop_process(client_pid)
rescue
e ->
assert %Protein.TransportError{adapter: Protein.AMQPAdapter, context: :service_error} ==
e
end
after
Config.persist(
protein: [
mocking_enabled: true
]
)
end
end
end
| 20.703226 | 100 | 0.549704 |
9e64ad19430d664810843a6c89d7ec6cb8b699e1 | 1,952 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/model/event.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/genomics/lib/google_api/genomics/v1/model/event.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/event.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Model.Event do
@moduledoc """
Carries information about events that occur during pipeline execution.
## Attributes
- description (String.t): A human-readable description of the event. Note that these strings can change at any time without notice. Any application logic must use the information in the `details` field. Defaults to: `null`.
- details (%{optional(String.t) => String.t}): Machine-readable details about the event. Defaults to: `null`.
- timestamp (DateTime.t): The time at which the event occurred. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => any(),
:details => map(),
:timestamp => DateTime.t()
}
field(:description)
field(:details, type: :map)
field(:timestamp, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Genomics.V1.Model.Event do
def decode(value, options) do
GoogleApi.Genomics.V1.Model.Event.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Genomics.V1.Model.Event do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.148148 | 235 | 0.727971 |
9e64bf3ebfae9e8bd62e166b5391d9ee88a4c6c9 | 1,747 | ex | Elixir | deps_local/resampler/lib/resampler_worker.ex | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 44 | 2016-09-22T21:37:48.000Z | 2021-10-09T03:43:32.000Z | deps_local/resampler/lib/resampler_worker.ex | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 3 | 2016-09-23T08:41:22.000Z | 2017-05-24T10:58:54.000Z | deps_local/resampler/lib/resampler_worker.ex | evadne/supervised-scaler | 30fa3275a3f9c16c79457ec2205e7bc129658713 | [
"MIT"
] | 5 | 2016-09-25T18:52:00.000Z | 2021-07-28T21:12:33.000Z | defmodule Resampler.Worker do
@server_path Path.expand("../priv_dir/convert", "#{__DIR__}")
def start_link([]) do
:gen_server.start_link(__MODULE__, [], [])
end
def init(state) do
server_options = %{pty: true, stdin: true, stdout: true, stderr: true}
{:ok, server_pid, server_os_pid} = Exexec.run_link(@server_path, server_options)
{:ok, {server_pid, server_os_pid}}
end
def handle_call({:resample, fromPath, toWidth, toHeight}, from, {server_pid, server_os_pid} = state) do
Exexec.send server_pid, "#{toWidth} #{toHeight} #{fromPath}\n"
receive do
{:stderr, ^server_os_pid, message} = x ->
{:reply, {:error, String.strip(message)}, state}
{:stdout, ^server_os_pid, message} = x ->
#
# FIXME: in PTY mode there is no distinction between STDOUT and STDERR
# and STDOUT does not get picked up if I remove PTY flag from erlexec
# so this requires further investigation
#
# https://github.com/saleyn/erlexec/issues/41
#
if String.starts_with?(message, "ERROR") do
{:reply, {:error, String.strip(message)}, state}
else
{:reply, {:ok, String.strip(message)}, state}
end
end
end
#
# messages that were not picked up immediately after Exexec.send should be discarded
# as they are no longer relevant. if they were they would be fed back to the caller
# which has invoked GenServer.call
#
def handle_info({:stdout, _, _}, state), do: {:noreply, state}
def handle_info({:stderr, _, _}, state), do: {:noreply, state}
#
# accept any sort of termination since erlexec does two-way cleanup
#
def terminate(reason, {server_pid, server_os_pid} = state), do: :ok
end
| 36.395833 | 105 | 0.647968 |
9e64cafe53e02ba5db7c0a063708fd0c53297e7f | 70 | exs | Elixir | test/views/page_view_test.exs | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | 1 | 2016-08-05T07:03:43.000Z | 2016-08-05T07:03:43.000Z | test/views/page_view_test.exs | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | null | null | null | test/views/page_view_test.exs | soarpatriot/flour | 52a57c553da84bd3abad5834014e06370f40a20b | [
"MIT"
] | null | null | null | defmodule Flour.PageViewTest do
use Flour.ConnCase, async: true
end
| 17.5 | 33 | 0.8 |
9e64dd61b61bbd074ccc4154af9fddc025e8a1fa | 643 | ex | Elixir | lib/accent/scopes/language.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/accent/scopes/language.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/accent/scopes/language.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Accent.Scopes.Language do
@doc """
## Examples
iex> Accent.Scopes.Language.from_search(Accent.Language, "")
Accent.Language
iex> Accent.Scopes.Language.from_search(Accent.Language, nil)
Accent.Language
iex> Accent.Scopes.Language.from_search(Accent.Language, 1234)
Accent.Language
iex> Accent.Scopes.Language.from_search(Accent.Language, "test")
#Ecto.Query<from l0 in Accent.Language, where: ilike(l0.name, ^"%test%")>
"""
@spec from_search(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_search(query, term) do
Accent.Scopes.Search.from_search(query, term, :name)
end
end
| 33.842105 | 77 | 0.710731 |
9e64ed7293709e4c297a5c7171e2f55271d8bb80 | 1,317 | ex | Elixir | lib/absinthe/plug/document_provider/compiled/check.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 234 | 2016-03-02T07:38:25.000Z | 2022-03-14T19:44:17.000Z | lib/absinthe/plug/document_provider/compiled/check.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 205 | 2016-03-02T13:52:53.000Z | 2022-03-31T23:31:36.000Z | lib/absinthe/plug/document_provider/compiled/check.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 168 | 2016-03-02T09:23:56.000Z | 2022-03-31T23:27:58.000Z | defmodule Absinthe.Plug.DocumentProvider.Compiled.Check do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, opts) do
do_run(input, Map.new(opts))
end
@spec do_run(Blueprint.t(), map) :: Phase.result_t() | no_return
def do_run(input, %{id: id, module: module}) do
{input, errors} = Blueprint.prewalk(input, [], &handle_node/2)
case errors do
[] ->
{:ok, input}
found ->
raise format_errors(found, id, module)
end
end
# Collect the validation errors from nodes
@spec handle_node(Blueprint.node_t(), [Phase.Error.t()]) ::
{Blueprint.node_t(), [Phase.Error.t() | String.t()]}
defp handle_node(%{errors: errs} = node, acc) do
{node, acc ++ errs}
end
defp handle_node(node, acc) do
{node, acc}
end
defp format_errors(errors, id, module) do
Absinthe.Plug.DocumentProvider.Compiled.Writer.error_message(
id,
module,
Enum.map(errors, &format_error/1)
)
end
def format_error(%{locations: [%{line: line} | _], message: message, phase: phase}) do
"On line #{line}: #{message} (#{phase})"
end
def format_error(error) do
"#{inspect(error)}"
end
end
| 23.517857 | 88 | 0.629461 |
9e650f8f2cdb841acc3ea697b32e5f0e17245c42 | 3,201 | ex | Elixir | lib/white_bread/context.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 209 | 2015-03-03T14:14:28.000Z | 2020-10-26T03:23:48.000Z | lib/white_bread/context.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 83 | 2015-03-23T11:46:51.000Z | 2020-11-04T09:47:06.000Z | lib/white_bread/context.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 46 | 2015-06-12T17:37:21.000Z | 2020-10-30T09:52:45.000Z | defmodule WhiteBread.Context do
alias WhiteBread.Context.StepMacroHelpers
alias WhiteBread.Context.Setup
@step_keywords [:given_, :when_, :then_, :and_, :but_]
@default_test_library :ex_unit
@doc false
defmacro __using__(opts \\ []) do
opts = Keyword.merge [test_library: @default_test_library], opts
[test_library: test_library] = opts
quote do
import WhiteBread.Context
unquote(import_test_library test_library)
@behaviour WhiteBread.ContextBehaviour
@steps []
@sub_context_modules []
@scenario_state_definied false
@scenario_finalize_defined false
@feature_state_definied false
@feature_finalize_defined false
@timeouts_definied false
@before_compile WhiteBread.Context
end
end
@doc false
defmacro __before_compile__(_env) do
Setup.before
end
for word <- @step_keywords do
defmacro unquote(word)(step_text, do: block) do
StepMacroHelpers.add_block_to_steps(step_text, block)
end
defmacro unquote(word)(step_text, func_def) do
StepMacroHelpers.add_func_to_steps(step_text, func_def)
end
end
defmacro feature_starting_state(function) do
quote do
@feature_state_definied true
def feature_starting_state() do
unquote(function).()
end
end
end
defmacro scenario_starting_state(function) do
quote do
@scenario_state_definied true
def scenario_starting_state(state) do
unquote(function).(state)
end
end
end
defmacro scenario_finalize(function) do
quote do
@scenario_finalize_defined true
def scenario_finalize(status \\ nil, state) do
cond do
is_function(unquote(function), 2)
-> unquote(function).(status, state)
is_function(unquote(function), 1)
-> unquote(function).(state)
is_function(unquote(function), 0)
-> unquote(function).()
end
end
end
end
defmacro feature_finalize(function) do
quote do
@feature_finalize_defined true
def feature_finalize(status \\ nil, state) do
cond do
is_function(unquote(function), 2)
-> unquote(function).(status, state)
is_function(unquote(function), 1)
-> unquote(function).(state)
is_function(unquote(function), 0)
-> unquote(function).()
end
end
end
end
defmacro scenario_timeouts(function) do
quote do
@timeouts_definied true
def get_scenario_timeout(feature, scenario) do
unquote(function).(feature, scenario)
end
end
end
defmacro import_steps_from(context_module) do
quote do
@sub_context_modules [unquote(context_module) | @sub_context_modules]
end
end
defp import_test_library(test_library) do
case test_library do
:ex_unit -> quote do: import ExUnit.Assertions
:espec -> quote do
require ESpec
use ESpec
end
nil -> quote do: true
_ -> raise ArgumentError, "#{inspect test_library} is not a recognized value for :test_library. Recognized values are :ex_unit, :espec, and nil."
end
end
end
| 24.813953 | 151 | 0.666354 |
9e65719e0256352203185478fb1e374532de3cff | 1,507 | exs | Elixir | config/config.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | 2 | 2017-09-20T08:33:20.000Z | 2018-08-17T11:57:09.000Z | config/config.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | 5 | 2017-09-25T21:48:48.000Z | 2018-02-26T09:06:01.000Z | config/config.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | null | null | null | use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
config :routemaster_client, :redis_cache,
{:system, "REDIS_CACHE_URL", "redis://localhost:6379/0"}
config :routemaster_client, :redis_data,
{:system, "REDIS_DATA_URL", "redis://localhost:6379/1"}
config :routemaster_client, :bus_api_token,
{:system, "ROUTEMASTER_BUS_API_TOKEN"}
config :routemaster_client, :drain_token,
{:system, "ROUTEMASTER_DRAIN_TOKEN"}
config :routemaster_client, :bus_url,
{:system, "ROUTEMASTER_URL"}
config :routemaster_client, :drain_url,
{:system, "ROUTEMASTER_DRAIN_URL"}
config :routemaster_client, :service_auth_credentials,
{:system, "ROUTEMASTER_SERVICE_AUTH_CREDENTIALS"}
config :routemaster_client, :cache_ttl, "86400"
# These match the hackney defaults and are here just as an example.
#
# config :routemaster_client, :director_http_options,
# [{:recv_timeout, 5_000}, {:connect_timeout, 8_000}]
#
# config :routemaster_client, :publisher_http_options,
# [{:recv_timeout, 5_000}, {:connect_timeout, 8_000}]
#
# config :routemaster_client, :fetcher_http_options,
# [{:recv_timeout, 5_000}, {:connect_timeout, 8_000}]
case Mix.env do
:test -> import_config "test.exs"
_ -> nil
end
| 30.14 | 70 | 0.753152 |
9e65889e89c6e0341993a9652faadec6a2cd92fd | 2,404 | ex | Elixir | lib/mibli_web.ex | FabriDamazio/mibli | 853fe5e7575040fdbade5ed5997465a68bcc529d | [
"MIT"
] | null | null | null | lib/mibli_web.ex | FabriDamazio/mibli | 853fe5e7575040fdbade5ed5997465a68bcc529d | [
"MIT"
] | null | null | null | lib/mibli_web.ex | FabriDamazio/mibli | 853fe5e7575040fdbade5ed5997465a68bcc529d | [
"MIT"
] | null | null | null | defmodule MibliWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use MibliWeb, :controller
use MibliWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: MibliWeb
import Plug.Conn
import MibliWeb.Gettext
alias MibliWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/mibli_web/templates",
namespace: MibliWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {MibliWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import MibliWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import MibliWeb.ErrorHelpers
import MibliWeb.Gettext
alias MibliWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.657658 | 81 | 0.673461 |
9e65ab817a4d042227cf309d49668593c652e177 | 129 | ex | Elixir | apps/admin_api/lib/admin_api/mailer.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/admin_api/lib/admin_api/mailer.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/lib/admin_api/mailer.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule AdminAPI.Mailer do
@moduledoc """
The module that sends emails.
"""
use Bamboo.Mailer, otp_app: :admin_api
end
| 18.428571 | 40 | 0.713178 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.